##// END OF EJS Templates
code: fixed deprecated octal calls for py3 compat.
marcink -
r3268:6e0a80a7 default
parent child Browse files
Show More
@@ -1,96 +1,96 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 from pyramid.events import ApplicationCreated
23 from pyramid.events import ApplicationCreated
24 from pyramid.settings import asbool
24 from pyramid.settings import asbool
25
25
26 from rhodecode.apps._base import ADMIN_PREFIX
26 from rhodecode.apps._base import ADMIN_PREFIX
27 from rhodecode.lib.ext_json import json
27 from rhodecode.lib.ext_json import json
28
28
29
29
30 def url_gen(request):
30 def url_gen(request):
31 registry = request.registry
31 registry = request.registry
32 longpoll_url = registry.settings.get('channelstream.longpoll_url', '')
32 longpoll_url = registry.settings.get('channelstream.longpoll_url', '')
33 ws_url = registry.settings.get('channelstream.ws_url', '')
33 ws_url = registry.settings.get('channelstream.ws_url', '')
34 proxy_url = request.route_url('channelstream_proxy')
34 proxy_url = request.route_url('channelstream_proxy')
35 urls = {
35 urls = {
36 'connect': request.route_path('channelstream_connect'),
36 'connect': request.route_path('channelstream_connect'),
37 'subscribe': request.route_path('channelstream_subscribe'),
37 'subscribe': request.route_path('channelstream_subscribe'),
38 'longpoll': longpoll_url or proxy_url,
38 'longpoll': longpoll_url or proxy_url,
39 'ws': ws_url or proxy_url.replace('http', 'ws')
39 'ws': ws_url or proxy_url.replace('http', 'ws')
40 }
40 }
41 return json.dumps(urls)
41 return json.dumps(urls)
42
42
43
43
44 PLUGIN_DEFINITION = {
44 PLUGIN_DEFINITION = {
45 'name': 'channelstream',
45 'name': 'channelstream',
46 'config': {
46 'config': {
47 'javascript': [],
47 'javascript': [],
48 'css': [],
48 'css': [],
49 'template_hooks': {
49 'template_hooks': {
50 'plugin_init_template': 'rhodecode:templates/channelstream/plugin_init.mako'
50 'plugin_init_template': 'rhodecode:templates/channelstream/plugin_init.mako'
51 },
51 },
52 'url_gen': url_gen,
52 'url_gen': url_gen,
53 'static': None,
53 'static': None,
54 'enabled': False,
54 'enabled': False,
55 'server': '',
55 'server': '',
56 'secret': ''
56 'secret': ''
57 }
57 }
58 }
58 }
59
59
60
60
61 def maybe_create_history_store(event):
61 def maybe_create_history_store(event):
62 # create plugin history location
62 # create plugin history location
63 settings = event.app.registry.settings
63 settings = event.app.registry.settings
64 history_dir = settings.get('channelstream.history.location', '')
64 history_dir = settings.get('channelstream.history.location', '')
65 if history_dir and not os.path.exists(history_dir):
65 if history_dir and not os.path.exists(history_dir):
66 os.makedirs(history_dir, 0750)
66 os.makedirs(history_dir, 0o750)
67
67
68
68
69 def includeme(config):
69 def includeme(config):
70 settings = config.registry.settings
70 settings = config.registry.settings
71 PLUGIN_DEFINITION['config']['enabled'] = asbool(
71 PLUGIN_DEFINITION['config']['enabled'] = asbool(
72 settings.get('channelstream.enabled'))
72 settings.get('channelstream.enabled'))
73 PLUGIN_DEFINITION['config']['server'] = settings.get(
73 PLUGIN_DEFINITION['config']['server'] = settings.get(
74 'channelstream.server', '')
74 'channelstream.server', '')
75 PLUGIN_DEFINITION['config']['secret'] = settings.get(
75 PLUGIN_DEFINITION['config']['secret'] = settings.get(
76 'channelstream.secret', '')
76 'channelstream.secret', '')
77 PLUGIN_DEFINITION['config']['history.location'] = settings.get(
77 PLUGIN_DEFINITION['config']['history.location'] = settings.get(
78 'channelstream.history.location', '')
78 'channelstream.history.location', '')
79 config.register_rhodecode_plugin(
79 config.register_rhodecode_plugin(
80 PLUGIN_DEFINITION['name'],
80 PLUGIN_DEFINITION['name'],
81 PLUGIN_DEFINITION['config']
81 PLUGIN_DEFINITION['config']
82 )
82 )
83 config.add_subscriber(maybe_create_history_store, ApplicationCreated)
83 config.add_subscriber(maybe_create_history_store, ApplicationCreated)
84
84
85 config.add_route(
85 config.add_route(
86 name='channelstream_connect',
86 name='channelstream_connect',
87 pattern=ADMIN_PREFIX + '/channelstream/connect')
87 pattern=ADMIN_PREFIX + '/channelstream/connect')
88 config.add_route(
88 config.add_route(
89 name='channelstream_subscribe',
89 name='channelstream_subscribe',
90 pattern=ADMIN_PREFIX + '/channelstream/subscribe')
90 pattern=ADMIN_PREFIX + '/channelstream/subscribe')
91 config.add_route(
91 config.add_route(
92 name='channelstream_proxy',
92 name='channelstream_proxy',
93 pattern=settings.get('channelstream.proxy_path') or '/_channelstream')
93 pattern=settings.get('channelstream.proxy_path') or '/_channelstream')
94
94
95 # Scan module for configuration decorators.
95 # Scan module for configuration decorators.
96 config.scan('.views', ignore='.tests')
96 config.scan('.views', ignore='.tests')
@@ -1,614 +1,614 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import tempfile
25 import tempfile
26 import time
26 import time
27
27
28 from paste.gzipper import make_gzip_middleware
28 from paste.gzipper import make_gzip_middleware
29 import pyramid.events
29 import pyramid.events
30 from pyramid.wsgi import wsgiapp
30 from pyramid.wsgi import wsgiapp
31 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid.authorization import ACLAuthorizationPolicy
32 from pyramid.config import Configurator
32 from pyramid.config import Configurator
33 from pyramid.settings import asbool, aslist
33 from pyramid.settings import asbool, aslist
34 from pyramid.httpexceptions import (
34 from pyramid.httpexceptions import (
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
36 from pyramid.renderers import render_to_response
36 from pyramid.renderers import render_to_response
37
37
38 from rhodecode.model import meta
38 from rhodecode.model import meta
39 from rhodecode.config import patches
39 from rhodecode.config import patches
40 from rhodecode.config import utils as config_utils
40 from rhodecode.config import utils as config_utils
41 from rhodecode.config.environment import load_pyramid_environment
41 from rhodecode.config.environment import load_pyramid_environment
42
42
43 import rhodecode.events
43 import rhodecode.events
44 from rhodecode.lib.middleware.vcs import VCSMiddleware
44 from rhodecode.lib.middleware.vcs import VCSMiddleware
45 from rhodecode.lib.request import Request
45 from rhodecode.lib.request import Request
46 from rhodecode.lib.vcs import VCSCommunicationError
46 from rhodecode.lib.vcs import VCSCommunicationError
47 from rhodecode.lib.exceptions import VCSServerUnavailable
47 from rhodecode.lib.exceptions import VCSServerUnavailable
48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
49 from rhodecode.lib.middleware.https_fixup import HttpsFixup
49 from rhodecode.lib.middleware.https_fixup import HttpsFixup
50 from rhodecode.lib.celerylib.loader import configure_celery
50 from rhodecode.lib.celerylib.loader import configure_celery
51 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
51 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
52 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
52 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
53 from rhodecode.lib.exc_tracking import store_exception
53 from rhodecode.lib.exc_tracking import store_exception
54 from rhodecode.subscribers import (
54 from rhodecode.subscribers import (
55 scan_repositories_if_enabled, write_js_routes_if_enabled,
55 scan_repositories_if_enabled, write_js_routes_if_enabled,
56 write_metadata_if_needed, inject_app_settings)
56 write_metadata_if_needed, inject_app_settings)
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 def is_http_error(response):
62 def is_http_error(response):
63 # error which should have traceback
63 # error which should have traceback
64 return response.status_code > 499
64 return response.status_code > 499
65
65
66
66
67 def should_load_all():
67 def should_load_all():
68 """
68 """
69 Returns if all application components should be loaded. In some cases it's
69 Returns if all application components should be loaded. In some cases it's
70 desired to skip apps loading for faster shell script execution
70 desired to skip apps loading for faster shell script execution
71 """
71 """
72 return True
72 return True
73
73
74
74
75 def make_pyramid_app(global_config, **settings):
75 def make_pyramid_app(global_config, **settings):
76 """
76 """
77 Constructs the WSGI application based on Pyramid.
77 Constructs the WSGI application based on Pyramid.
78
78
79 Specials:
79 Specials:
80
80
81 * The application can also be integrated like a plugin via the call to
81 * The application can also be integrated like a plugin via the call to
82 `includeme`. This is accompanied with the other utility functions which
82 `includeme`. This is accompanied with the other utility functions which
83 are called. Changing this should be done with great care to not break
83 are called. Changing this should be done with great care to not break
84 cases when these fragments are assembled from another place.
84 cases when these fragments are assembled from another place.
85
85
86 """
86 """
87
87
88 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
88 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
89 # will be replaced by the value of the environment variable "NAME" in this case.
89 # will be replaced by the value of the environment variable "NAME" in this case.
90 start_time = time.time()
90 start_time = time.time()
91
91
92 environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()}
92 environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()}
93
93
94 global_config = _substitute_values(global_config, environ)
94 global_config = _substitute_values(global_config, environ)
95 settings = _substitute_values(settings, environ)
95 settings = _substitute_values(settings, environ)
96
96
97 sanitize_settings_and_apply_defaults(settings)
97 sanitize_settings_and_apply_defaults(settings)
98
98
99 config = Configurator(settings=settings)
99 config = Configurator(settings=settings)
100
100
101 # Apply compatibility patches
101 # Apply compatibility patches
102 patches.inspect_getargspec()
102 patches.inspect_getargspec()
103
103
104 load_pyramid_environment(global_config, settings)
104 load_pyramid_environment(global_config, settings)
105
105
106 # Static file view comes first
106 # Static file view comes first
107 includeme_first(config)
107 includeme_first(config)
108
108
109 includeme(config)
109 includeme(config)
110
110
111 pyramid_app = config.make_wsgi_app()
111 pyramid_app = config.make_wsgi_app()
112 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
112 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
113 pyramid_app.config = config
113 pyramid_app.config = config
114
114
115 config.configure_celery(global_config['__file__'])
115 config.configure_celery(global_config['__file__'])
116 # creating the app uses a connection - return it after we are done
116 # creating the app uses a connection - return it after we are done
117 meta.Session.remove()
117 meta.Session.remove()
118 total_time = time.time() - start_time
118 total_time = time.time() - start_time
119 log.info('Pyramid app `%s` created and configured in %.2fs',
119 log.info('Pyramid app `%s` created and configured in %.2fs',
120 pyramid_app.func_name, total_time)
120 pyramid_app.func_name, total_time)
121 return pyramid_app
121 return pyramid_app
122
122
123
123
124 def not_found_view(request):
124 def not_found_view(request):
125 """
125 """
126 This creates the view which should be registered as not-found-view to
126 This creates the view which should be registered as not-found-view to
127 pyramid.
127 pyramid.
128 """
128 """
129
129
130 if not getattr(request, 'vcs_call', None):
130 if not getattr(request, 'vcs_call', None):
131 # handle like regular case with our error_handler
131 # handle like regular case with our error_handler
132 return error_handler(HTTPNotFound(), request)
132 return error_handler(HTTPNotFound(), request)
133
133
134 # handle not found view as a vcs call
134 # handle not found view as a vcs call
135 settings = request.registry.settings
135 settings = request.registry.settings
136 ae_client = getattr(request, 'ae_client', None)
136 ae_client = getattr(request, 'ae_client', None)
137 vcs_app = VCSMiddleware(
137 vcs_app = VCSMiddleware(
138 HTTPNotFound(), request.registry, settings,
138 HTTPNotFound(), request.registry, settings,
139 appenlight_client=ae_client)
139 appenlight_client=ae_client)
140
140
141 return wsgiapp(vcs_app)(None, request)
141 return wsgiapp(vcs_app)(None, request)
142
142
143
143
144 def error_handler(exception, request):
144 def error_handler(exception, request):
145 import rhodecode
145 import rhodecode
146 from rhodecode.lib import helpers
146 from rhodecode.lib import helpers
147
147
148 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
148 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
149
149
150 base_response = HTTPInternalServerError()
150 base_response = HTTPInternalServerError()
151 # prefer original exception for the response since it may have headers set
151 # prefer original exception for the response since it may have headers set
152 if isinstance(exception, HTTPException):
152 if isinstance(exception, HTTPException):
153 base_response = exception
153 base_response = exception
154 elif isinstance(exception, VCSCommunicationError):
154 elif isinstance(exception, VCSCommunicationError):
155 base_response = VCSServerUnavailable()
155 base_response = VCSServerUnavailable()
156
156
157 if is_http_error(base_response):
157 if is_http_error(base_response):
158 log.exception(
158 log.exception(
159 'error occurred handling this request for path: %s', request.path)
159 'error occurred handling this request for path: %s', request.path)
160
160
161 error_explanation = base_response.explanation or str(base_response)
161 error_explanation = base_response.explanation or str(base_response)
162 if base_response.status_code == 404:
162 if base_response.status_code == 404:
163 error_explanation += " Or you don't have permission to access it."
163 error_explanation += " Or you don't have permission to access it."
164 c = AttributeDict()
164 c = AttributeDict()
165 c.error_message = base_response.status
165 c.error_message = base_response.status
166 c.error_explanation = error_explanation
166 c.error_explanation = error_explanation
167 c.visual = AttributeDict()
167 c.visual = AttributeDict()
168
168
169 c.visual.rhodecode_support_url = (
169 c.visual.rhodecode_support_url = (
170 request.registry.settings.get('rhodecode_support_url') or
170 request.registry.settings.get('rhodecode_support_url') or
171 request.route_url('rhodecode_support')
171 request.route_url('rhodecode_support')
172 )
172 )
173 c.redirect_time = 0
173 c.redirect_time = 0
174 c.rhodecode_name = rhodecode_title
174 c.rhodecode_name = rhodecode_title
175 if not c.rhodecode_name:
175 if not c.rhodecode_name:
176 c.rhodecode_name = 'Rhodecode'
176 c.rhodecode_name = 'Rhodecode'
177
177
178 c.causes = []
178 c.causes = []
179 if is_http_error(base_response):
179 if is_http_error(base_response):
180 c.causes.append('Server is overloaded.')
180 c.causes.append('Server is overloaded.')
181 c.causes.append('Server database connection is lost.')
181 c.causes.append('Server database connection is lost.')
182 c.causes.append('Server expected unhandled error.')
182 c.causes.append('Server expected unhandled error.')
183
183
184 if hasattr(base_response, 'causes'):
184 if hasattr(base_response, 'causes'):
185 c.causes = base_response.causes
185 c.causes = base_response.causes
186
186
187 c.messages = helpers.flash.pop_messages(request=request)
187 c.messages = helpers.flash.pop_messages(request=request)
188
188
189 exc_info = sys.exc_info()
189 exc_info = sys.exc_info()
190 c.exception_id = id(exc_info)
190 c.exception_id = id(exc_info)
191 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
191 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
192 or base_response.status_code > 499
192 or base_response.status_code > 499
193 c.exception_id_url = request.route_url(
193 c.exception_id_url = request.route_url(
194 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
194 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
195
195
196 if c.show_exception_id:
196 if c.show_exception_id:
197 store_exception(c.exception_id, exc_info)
197 store_exception(c.exception_id, exc_info)
198
198
199 response = render_to_response(
199 response = render_to_response(
200 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
200 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
201 response=base_response)
201 response=base_response)
202
202
203 return response
203 return response
204
204
205
205
206 def includeme_first(config):
206 def includeme_first(config):
207 # redirect automatic browser favicon.ico requests to correct place
207 # redirect automatic browser favicon.ico requests to correct place
208 def favicon_redirect(context, request):
208 def favicon_redirect(context, request):
209 return HTTPFound(
209 return HTTPFound(
210 request.static_path('rhodecode:public/images/favicon.ico'))
210 request.static_path('rhodecode:public/images/favicon.ico'))
211
211
212 config.add_view(favicon_redirect, route_name='favicon')
212 config.add_view(favicon_redirect, route_name='favicon')
213 config.add_route('favicon', '/favicon.ico')
213 config.add_route('favicon', '/favicon.ico')
214
214
215 def robots_redirect(context, request):
215 def robots_redirect(context, request):
216 return HTTPFound(
216 return HTTPFound(
217 request.static_path('rhodecode:public/robots.txt'))
217 request.static_path('rhodecode:public/robots.txt'))
218
218
219 config.add_view(robots_redirect, route_name='robots')
219 config.add_view(robots_redirect, route_name='robots')
220 config.add_route('robots', '/robots.txt')
220 config.add_route('robots', '/robots.txt')
221
221
222 config.add_static_view(
222 config.add_static_view(
223 '_static/deform', 'deform:static')
223 '_static/deform', 'deform:static')
224 config.add_static_view(
224 config.add_static_view(
225 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
225 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
226
226
227
227
228 def includeme(config):
228 def includeme(config):
229 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
229 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
230 settings = config.registry.settings
230 settings = config.registry.settings
231 config.set_request_factory(Request)
231 config.set_request_factory(Request)
232
232
233 # plugin information
233 # plugin information
234 config.registry.rhodecode_plugins = collections.OrderedDict()
234 config.registry.rhodecode_plugins = collections.OrderedDict()
235
235
236 config.add_directive(
236 config.add_directive(
237 'register_rhodecode_plugin', register_rhodecode_plugin)
237 'register_rhodecode_plugin', register_rhodecode_plugin)
238
238
239 config.add_directive('configure_celery', configure_celery)
239 config.add_directive('configure_celery', configure_celery)
240
240
241 if asbool(settings.get('appenlight', 'false')):
241 if asbool(settings.get('appenlight', 'false')):
242 config.include('appenlight_client.ext.pyramid_tween')
242 config.include('appenlight_client.ext.pyramid_tween')
243
243
244 load_all = should_load_all()
244 load_all = should_load_all()
245
245
246 # Includes which are required. The application would fail without them.
246 # Includes which are required. The application would fail without them.
247 config.include('pyramid_mako')
247 config.include('pyramid_mako')
248 config.include('pyramid_beaker')
248 config.include('pyramid_beaker')
249 config.include('rhodecode.lib.rc_cache')
249 config.include('rhodecode.lib.rc_cache')
250
250
251 config.include('rhodecode.apps._base.navigation')
251 config.include('rhodecode.apps._base.navigation')
252 config.include('rhodecode.apps._base.subscribers')
252 config.include('rhodecode.apps._base.subscribers')
253 config.include('rhodecode.tweens')
253 config.include('rhodecode.tweens')
254
254
255 config.include('rhodecode.integrations')
255 config.include('rhodecode.integrations')
256 config.include('rhodecode.authentication')
256 config.include('rhodecode.authentication')
257
257
258 if load_all:
258 if load_all:
259 from rhodecode.authentication import discover_legacy_plugins
259 from rhodecode.authentication import discover_legacy_plugins
260 # load CE authentication plugins
260 # load CE authentication plugins
261 config.include('rhodecode.authentication.plugins.auth_crowd')
261 config.include('rhodecode.authentication.plugins.auth_crowd')
262 config.include('rhodecode.authentication.plugins.auth_headers')
262 config.include('rhodecode.authentication.plugins.auth_headers')
263 config.include('rhodecode.authentication.plugins.auth_jasig_cas')
263 config.include('rhodecode.authentication.plugins.auth_jasig_cas')
264 config.include('rhodecode.authentication.plugins.auth_ldap')
264 config.include('rhodecode.authentication.plugins.auth_ldap')
265 config.include('rhodecode.authentication.plugins.auth_pam')
265 config.include('rhodecode.authentication.plugins.auth_pam')
266 config.include('rhodecode.authentication.plugins.auth_rhodecode')
266 config.include('rhodecode.authentication.plugins.auth_rhodecode')
267 config.include('rhodecode.authentication.plugins.auth_token')
267 config.include('rhodecode.authentication.plugins.auth_token')
268
268
269 # Auto discover authentication plugins and include their configuration.
269 # Auto discover authentication plugins and include their configuration.
270 discover_legacy_plugins(config)
270 discover_legacy_plugins(config)
271
271
272 # apps
272 # apps
273 config.include('rhodecode.apps._base')
273 config.include('rhodecode.apps._base')
274
274
275 if load_all:
275 if load_all:
276 config.include('rhodecode.apps.ops')
276 config.include('rhodecode.apps.ops')
277 config.include('rhodecode.apps.admin')
277 config.include('rhodecode.apps.admin')
278 config.include('rhodecode.apps.channelstream')
278 config.include('rhodecode.apps.channelstream')
279 config.include('rhodecode.apps.login')
279 config.include('rhodecode.apps.login')
280 config.include('rhodecode.apps.home')
280 config.include('rhodecode.apps.home')
281 config.include('rhodecode.apps.journal')
281 config.include('rhodecode.apps.journal')
282 config.include('rhodecode.apps.repository')
282 config.include('rhodecode.apps.repository')
283 config.include('rhodecode.apps.repo_group')
283 config.include('rhodecode.apps.repo_group')
284 config.include('rhodecode.apps.user_group')
284 config.include('rhodecode.apps.user_group')
285 config.include('rhodecode.apps.search')
285 config.include('rhodecode.apps.search')
286 config.include('rhodecode.apps.user_profile')
286 config.include('rhodecode.apps.user_profile')
287 config.include('rhodecode.apps.user_group_profile')
287 config.include('rhodecode.apps.user_group_profile')
288 config.include('rhodecode.apps.my_account')
288 config.include('rhodecode.apps.my_account')
289 config.include('rhodecode.apps.svn_support')
289 config.include('rhodecode.apps.svn_support')
290 config.include('rhodecode.apps.ssh_support')
290 config.include('rhodecode.apps.ssh_support')
291 config.include('rhodecode.apps.gist')
291 config.include('rhodecode.apps.gist')
292 config.include('rhodecode.apps.debug_style')
292 config.include('rhodecode.apps.debug_style')
293 config.include('rhodecode.api')
293 config.include('rhodecode.api')
294
294
295 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
295 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
296 config.add_translation_dirs('rhodecode:i18n/')
296 config.add_translation_dirs('rhodecode:i18n/')
297 settings['default_locale_name'] = settings.get('lang', 'en')
297 settings['default_locale_name'] = settings.get('lang', 'en')
298
298
299 # Add subscribers.
299 # Add subscribers.
300 config.add_subscriber(inject_app_settings,
300 config.add_subscriber(inject_app_settings,
301 pyramid.events.ApplicationCreated)
301 pyramid.events.ApplicationCreated)
302 config.add_subscriber(scan_repositories_if_enabled,
302 config.add_subscriber(scan_repositories_if_enabled,
303 pyramid.events.ApplicationCreated)
303 pyramid.events.ApplicationCreated)
304 config.add_subscriber(write_metadata_if_needed,
304 config.add_subscriber(write_metadata_if_needed,
305 pyramid.events.ApplicationCreated)
305 pyramid.events.ApplicationCreated)
306 config.add_subscriber(write_js_routes_if_enabled,
306 config.add_subscriber(write_js_routes_if_enabled,
307 pyramid.events.ApplicationCreated)
307 pyramid.events.ApplicationCreated)
308
308
309 # request custom methods
309 # request custom methods
310 config.add_request_method(
310 config.add_request_method(
311 'rhodecode.lib.partial_renderer.get_partial_renderer',
311 'rhodecode.lib.partial_renderer.get_partial_renderer',
312 'get_partial_renderer')
312 'get_partial_renderer')
313
313
314 # Set the authorization policy.
314 # Set the authorization policy.
315 authz_policy = ACLAuthorizationPolicy()
315 authz_policy = ACLAuthorizationPolicy()
316 config.set_authorization_policy(authz_policy)
316 config.set_authorization_policy(authz_policy)
317
317
318 # Set the default renderer for HTML templates to mako.
318 # Set the default renderer for HTML templates to mako.
319 config.add_mako_renderer('.html')
319 config.add_mako_renderer('.html')
320
320
321 config.add_renderer(
321 config.add_renderer(
322 name='json_ext',
322 name='json_ext',
323 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
323 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
324
324
325 # include RhodeCode plugins
325 # include RhodeCode plugins
326 includes = aslist(settings.get('rhodecode.includes', []))
326 includes = aslist(settings.get('rhodecode.includes', []))
327 for inc in includes:
327 for inc in includes:
328 config.include(inc)
328 config.include(inc)
329
329
330 # custom not found view, if our pyramid app doesn't know how to handle
330 # custom not found view, if our pyramid app doesn't know how to handle
331 # the request pass it to potential VCS handling ap
331 # the request pass it to potential VCS handling ap
332 config.add_notfound_view(not_found_view)
332 config.add_notfound_view(not_found_view)
333 if not settings.get('debugtoolbar.enabled', False):
333 if not settings.get('debugtoolbar.enabled', False):
334 # disabled debugtoolbar handle all exceptions via the error_handlers
334 # disabled debugtoolbar handle all exceptions via the error_handlers
335 config.add_view(error_handler, context=Exception)
335 config.add_view(error_handler, context=Exception)
336
336
337 # all errors including 403/404/50X
337 # all errors including 403/404/50X
338 config.add_view(error_handler, context=HTTPError)
338 config.add_view(error_handler, context=HTTPError)
339
339
340
340
341 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
341 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
342 """
342 """
343 Apply outer WSGI middlewares around the application.
343 Apply outer WSGI middlewares around the application.
344 """
344 """
345 registry = config.registry
345 registry = config.registry
346 settings = registry.settings
346 settings = registry.settings
347
347
348 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
348 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
349 pyramid_app = HttpsFixup(pyramid_app, settings)
349 pyramid_app = HttpsFixup(pyramid_app, settings)
350
350
351 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
351 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
352 pyramid_app, settings)
352 pyramid_app, settings)
353 registry.ae_client = _ae_client
353 registry.ae_client = _ae_client
354
354
355 if settings['gzip_responses']:
355 if settings['gzip_responses']:
356 pyramid_app = make_gzip_middleware(
356 pyramid_app = make_gzip_middleware(
357 pyramid_app, settings, compress_level=1)
357 pyramid_app, settings, compress_level=1)
358
358
359 # this should be the outer most middleware in the wsgi stack since
359 # this should be the outer most middleware in the wsgi stack since
360 # middleware like Routes make database calls
360 # middleware like Routes make database calls
361 def pyramid_app_with_cleanup(environ, start_response):
361 def pyramid_app_with_cleanup(environ, start_response):
362 try:
362 try:
363 return pyramid_app(environ, start_response)
363 return pyramid_app(environ, start_response)
364 finally:
364 finally:
365 # Dispose current database session and rollback uncommitted
365 # Dispose current database session and rollback uncommitted
366 # transactions.
366 # transactions.
367 meta.Session.remove()
367 meta.Session.remove()
368
368
369 # In a single threaded mode server, on non sqlite db we should have
369 # In a single threaded mode server, on non sqlite db we should have
370 # '0 Current Checked out connections' at the end of a request,
370 # '0 Current Checked out connections' at the end of a request,
371 # if not, then something, somewhere is leaving a connection open
371 # if not, then something, somewhere is leaving a connection open
372 pool = meta.Base.metadata.bind.engine.pool
372 pool = meta.Base.metadata.bind.engine.pool
373 log.debug('sa pool status: %s', pool.status())
373 log.debug('sa pool status: %s', pool.status())
374 log.debug('Request processing finalized')
374 log.debug('Request processing finalized')
375
375
376 return pyramid_app_with_cleanup
376 return pyramid_app_with_cleanup
377
377
378
378
379 def sanitize_settings_and_apply_defaults(settings):
379 def sanitize_settings_and_apply_defaults(settings):
380 """
380 """
381 Applies settings defaults and does all type conversion.
381 Applies settings defaults and does all type conversion.
382
382
383 We would move all settings parsing and preparation into this place, so that
383 We would move all settings parsing and preparation into this place, so that
384 we have only one place left which deals with this part. The remaining parts
384 we have only one place left which deals with this part. The remaining parts
385 of the application would start to rely fully on well prepared settings.
385 of the application would start to rely fully on well prepared settings.
386
386
387 This piece would later be split up per topic to avoid a big fat monster
387 This piece would later be split up per topic to avoid a big fat monster
388 function.
388 function.
389 """
389 """
390
390
391 settings.setdefault('rhodecode.edition', 'Community Edition')
391 settings.setdefault('rhodecode.edition', 'Community Edition')
392
392
393 if 'mako.default_filters' not in settings:
393 if 'mako.default_filters' not in settings:
394 # set custom default filters if we don't have it defined
394 # set custom default filters if we don't have it defined
395 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
395 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
396 settings['mako.default_filters'] = 'h_filter'
396 settings['mako.default_filters'] = 'h_filter'
397
397
398 if 'mako.directories' not in settings:
398 if 'mako.directories' not in settings:
399 mako_directories = settings.setdefault('mako.directories', [
399 mako_directories = settings.setdefault('mako.directories', [
400 # Base templates of the original application
400 # Base templates of the original application
401 'rhodecode:templates',
401 'rhodecode:templates',
402 ])
402 ])
403 log.debug(
403 log.debug(
404 "Using the following Mako template directories: %s",
404 "Using the following Mako template directories: %s",
405 mako_directories)
405 mako_directories)
406
406
407 # Default includes, possible to change as a user
407 # Default includes, possible to change as a user
408 pyramid_includes = settings.setdefault('pyramid.includes', [
408 pyramid_includes = settings.setdefault('pyramid.includes', [
409 'rhodecode.lib.middleware.request_wrapper',
409 'rhodecode.lib.middleware.request_wrapper',
410 ])
410 ])
411 log.debug(
411 log.debug(
412 "Using the following pyramid.includes: %s",
412 "Using the following pyramid.includes: %s",
413 pyramid_includes)
413 pyramid_includes)
414
414
415 # TODO: johbo: Re-think this, usually the call to config.include
415 # TODO: johbo: Re-think this, usually the call to config.include
416 # should allow to pass in a prefix.
416 # should allow to pass in a prefix.
417 settings.setdefault('rhodecode.api.url', '/_admin/api')
417 settings.setdefault('rhodecode.api.url', '/_admin/api')
418
418
419 # Sanitize generic settings.
419 # Sanitize generic settings.
420 _list_setting(settings, 'default_encoding', 'UTF-8')
420 _list_setting(settings, 'default_encoding', 'UTF-8')
421 _bool_setting(settings, 'is_test', 'false')
421 _bool_setting(settings, 'is_test', 'false')
422 _bool_setting(settings, 'gzip_responses', 'false')
422 _bool_setting(settings, 'gzip_responses', 'false')
423
423
424 # Call split out functions that sanitize settings for each topic.
424 # Call split out functions that sanitize settings for each topic.
425 _sanitize_appenlight_settings(settings)
425 _sanitize_appenlight_settings(settings)
426 _sanitize_vcs_settings(settings)
426 _sanitize_vcs_settings(settings)
427 _sanitize_cache_settings(settings)
427 _sanitize_cache_settings(settings)
428
428
429 # configure instance id
429 # configure instance id
430 config_utils.set_instance_id(settings)
430 config_utils.set_instance_id(settings)
431
431
432 return settings
432 return settings
433
433
434
434
435 def _sanitize_appenlight_settings(settings):
435 def _sanitize_appenlight_settings(settings):
436 _bool_setting(settings, 'appenlight', 'false')
436 _bool_setting(settings, 'appenlight', 'false')
437
437
438
438
439 def _sanitize_vcs_settings(settings):
439 def _sanitize_vcs_settings(settings):
440 """
440 """
441 Applies settings defaults and does type conversion for all VCS related
441 Applies settings defaults and does type conversion for all VCS related
442 settings.
442 settings.
443 """
443 """
444 _string_setting(settings, 'vcs.svn.compatible_version', '')
444 _string_setting(settings, 'vcs.svn.compatible_version', '')
445 _string_setting(settings, 'git_rev_filter', '--all')
445 _string_setting(settings, 'git_rev_filter', '--all')
446 _string_setting(settings, 'vcs.hooks.protocol', 'http')
446 _string_setting(settings, 'vcs.hooks.protocol', 'http')
447 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
447 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
448 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
448 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
449 _string_setting(settings, 'vcs.server', '')
449 _string_setting(settings, 'vcs.server', '')
450 _string_setting(settings, 'vcs.server.log_level', 'debug')
450 _string_setting(settings, 'vcs.server.log_level', 'debug')
451 _string_setting(settings, 'vcs.server.protocol', 'http')
451 _string_setting(settings, 'vcs.server.protocol', 'http')
452 _bool_setting(settings, 'startup.import_repos', 'false')
452 _bool_setting(settings, 'startup.import_repos', 'false')
453 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
453 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
454 _bool_setting(settings, 'vcs.server.enable', 'true')
454 _bool_setting(settings, 'vcs.server.enable', 'true')
455 _bool_setting(settings, 'vcs.start_server', 'false')
455 _bool_setting(settings, 'vcs.start_server', 'false')
456 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
456 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
457 _int_setting(settings, 'vcs.connection_timeout', 3600)
457 _int_setting(settings, 'vcs.connection_timeout', 3600)
458
458
459 # Support legacy values of vcs.scm_app_implementation. Legacy
459 # Support legacy values of vcs.scm_app_implementation. Legacy
460 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
460 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
461 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
461 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
462 scm_app_impl = settings['vcs.scm_app_implementation']
462 scm_app_impl = settings['vcs.scm_app_implementation']
463 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
463 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
464 settings['vcs.scm_app_implementation'] = 'http'
464 settings['vcs.scm_app_implementation'] = 'http'
465
465
466
466
467 def _sanitize_cache_settings(settings):
467 def _sanitize_cache_settings(settings):
468 temp_store = tempfile.gettempdir()
468 temp_store = tempfile.gettempdir()
469 default_cache_dir = os.path.join(temp_store, 'rc_cache')
469 default_cache_dir = os.path.join(temp_store, 'rc_cache')
470
470
471 # save default, cache dir, and use it for all backends later.
471 # save default, cache dir, and use it for all backends later.
472 default_cache_dir = _string_setting(
472 default_cache_dir = _string_setting(
473 settings,
473 settings,
474 'cache_dir',
474 'cache_dir',
475 default_cache_dir, lower=False, default_when_empty=True)
475 default_cache_dir, lower=False, default_when_empty=True)
476
476
477 # ensure we have our dir created
477 # ensure we have our dir created
478 if not os.path.isdir(default_cache_dir):
478 if not os.path.isdir(default_cache_dir):
479 os.makedirs(default_cache_dir, mode=0755)
479 os.makedirs(default_cache_dir, mode=0o755)
480
480
481 # exception store cache
481 # exception store cache
482 _string_setting(
482 _string_setting(
483 settings,
483 settings,
484 'exception_tracker.store_path',
484 'exception_tracker.store_path',
485 temp_store, lower=False, default_when_empty=True)
485 temp_store, lower=False, default_when_empty=True)
486
486
487 # cache_perms
487 # cache_perms
488 _string_setting(
488 _string_setting(
489 settings,
489 settings,
490 'rc_cache.cache_perms.backend',
490 'rc_cache.cache_perms.backend',
491 'dogpile.cache.rc.file_namespace', lower=False)
491 'dogpile.cache.rc.file_namespace', lower=False)
492 _int_setting(
492 _int_setting(
493 settings,
493 settings,
494 'rc_cache.cache_perms.expiration_time',
494 'rc_cache.cache_perms.expiration_time',
495 60)
495 60)
496 _string_setting(
496 _string_setting(
497 settings,
497 settings,
498 'rc_cache.cache_perms.arguments.filename',
498 'rc_cache.cache_perms.arguments.filename',
499 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
499 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
500
500
501 # cache_repo
501 # cache_repo
502 _string_setting(
502 _string_setting(
503 settings,
503 settings,
504 'rc_cache.cache_repo.backend',
504 'rc_cache.cache_repo.backend',
505 'dogpile.cache.rc.file_namespace', lower=False)
505 'dogpile.cache.rc.file_namespace', lower=False)
506 _int_setting(
506 _int_setting(
507 settings,
507 settings,
508 'rc_cache.cache_repo.expiration_time',
508 'rc_cache.cache_repo.expiration_time',
509 60)
509 60)
510 _string_setting(
510 _string_setting(
511 settings,
511 settings,
512 'rc_cache.cache_repo.arguments.filename',
512 'rc_cache.cache_repo.arguments.filename',
513 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
513 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
514
514
515 # cache_license
515 # cache_license
516 _string_setting(
516 _string_setting(
517 settings,
517 settings,
518 'rc_cache.cache_license.backend',
518 'rc_cache.cache_license.backend',
519 'dogpile.cache.rc.file_namespace', lower=False)
519 'dogpile.cache.rc.file_namespace', lower=False)
520 _int_setting(
520 _int_setting(
521 settings,
521 settings,
522 'rc_cache.cache_license.expiration_time',
522 'rc_cache.cache_license.expiration_time',
523 5*60)
523 5*60)
524 _string_setting(
524 _string_setting(
525 settings,
525 settings,
526 'rc_cache.cache_license.arguments.filename',
526 'rc_cache.cache_license.arguments.filename',
527 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
527 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
528
528
529 # cache_repo_longterm memory, 96H
529 # cache_repo_longterm memory, 96H
530 _string_setting(
530 _string_setting(
531 settings,
531 settings,
532 'rc_cache.cache_repo_longterm.backend',
532 'rc_cache.cache_repo_longterm.backend',
533 'dogpile.cache.rc.memory_lru', lower=False)
533 'dogpile.cache.rc.memory_lru', lower=False)
534 _int_setting(
534 _int_setting(
535 settings,
535 settings,
536 'rc_cache.cache_repo_longterm.expiration_time',
536 'rc_cache.cache_repo_longterm.expiration_time',
537 345600)
537 345600)
538 _int_setting(
538 _int_setting(
539 settings,
539 settings,
540 'rc_cache.cache_repo_longterm.max_size',
540 'rc_cache.cache_repo_longterm.max_size',
541 10000)
541 10000)
542
542
543 # sql_cache_short
543 # sql_cache_short
544 _string_setting(
544 _string_setting(
545 settings,
545 settings,
546 'rc_cache.sql_cache_short.backend',
546 'rc_cache.sql_cache_short.backend',
547 'dogpile.cache.rc.memory_lru', lower=False)
547 'dogpile.cache.rc.memory_lru', lower=False)
548 _int_setting(
548 _int_setting(
549 settings,
549 settings,
550 'rc_cache.sql_cache_short.expiration_time',
550 'rc_cache.sql_cache_short.expiration_time',
551 30)
551 30)
552 _int_setting(
552 _int_setting(
553 settings,
553 settings,
554 'rc_cache.sql_cache_short.max_size',
554 'rc_cache.sql_cache_short.max_size',
555 10000)
555 10000)
556
556
557
557
558 def _int_setting(settings, name, default):
558 def _int_setting(settings, name, default):
559 settings[name] = int(settings.get(name, default))
559 settings[name] = int(settings.get(name, default))
560 return settings[name]
560 return settings[name]
561
561
562
562
563 def _bool_setting(settings, name, default):
563 def _bool_setting(settings, name, default):
564 input_val = settings.get(name, default)
564 input_val = settings.get(name, default)
565 if isinstance(input_val, unicode):
565 if isinstance(input_val, unicode):
566 input_val = input_val.encode('utf8')
566 input_val = input_val.encode('utf8')
567 settings[name] = asbool(input_val)
567 settings[name] = asbool(input_val)
568 return settings[name]
568 return settings[name]
569
569
570
570
571 def _list_setting(settings, name, default):
571 def _list_setting(settings, name, default):
572 raw_value = settings.get(name, default)
572 raw_value = settings.get(name, default)
573
573
574 old_separator = ','
574 old_separator = ','
575 if old_separator in raw_value:
575 if old_separator in raw_value:
576 # If we get a comma separated list, pass it to our own function.
576 # If we get a comma separated list, pass it to our own function.
577 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
577 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
578 else:
578 else:
579 # Otherwise we assume it uses pyramids space/newline separation.
579 # Otherwise we assume it uses pyramids space/newline separation.
580 settings[name] = aslist(raw_value)
580 settings[name] = aslist(raw_value)
581 return settings[name]
581 return settings[name]
582
582
583
583
584 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
584 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
585 value = settings.get(name, default)
585 value = settings.get(name, default)
586
586
587 if default_when_empty and not value:
587 if default_when_empty and not value:
588 # use default value when value is empty
588 # use default value when value is empty
589 value = default
589 value = default
590
590
591 if lower:
591 if lower:
592 value = value.lower()
592 value = value.lower()
593 settings[name] = value
593 settings[name] = value
594 return settings[name]
594 return settings[name]
595
595
596
596
597 def _substitute_values(mapping, substitutions):
597 def _substitute_values(mapping, substitutions):
598
598
599 try:
599 try:
600 result = {
600 result = {
601 # Note: Cannot use regular replacements, since they would clash
601 # Note: Cannot use regular replacements, since they would clash
602 # with the implementation of ConfigParser. Using "format" instead.
602 # with the implementation of ConfigParser. Using "format" instead.
603 key: value.format(**substitutions)
603 key: value.format(**substitutions)
604 for key, value in mapping.items()
604 for key, value in mapping.items()
605 }
605 }
606 except KeyError as e:
606 except KeyError as e:
607 raise ValueError(
607 raise ValueError(
608 'Failed to substitute env variable: {}. '
608 'Failed to substitute env variable: {}. '
609 'Make sure you have specified this env variable without ENV_ prefix'.format(e))
609 'Make sure you have specified this env variable without ENV_ prefix'.format(e))
610 except ValueError as e:
610 except ValueError as e:
611 log.warning('Failed to substitute ENV variable: %s', e)
611 log.warning('Failed to substitute ENV variable: %s', e)
612 result = mapping
612 result = mapping
613
613
614 return result
614 return result
@@ -1,1755 +1,1755 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0o100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0o100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179
179
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 """
181 """
182 Initializes repository. Raises RepositoryError if repository could
182 Initializes repository. Raises RepositoryError if repository could
183 not be find at the given ``repo_path`` or directory at ``repo_path``
183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 exists and ``create`` is set to True.
184 exists and ``create`` is set to True.
185
185
186 :param repo_path: local path of the repository
186 :param repo_path: local path of the repository
187 :param config: repository configuration
187 :param config: repository configuration
188 :param create=False: if set to True, would try to create repository.
188 :param create=False: if set to True, would try to create repository.
189 :param src_url=None: if set, should be proper url from which repository
189 :param src_url=None: if set, should be proper url from which repository
190 would be cloned; requires ``create`` parameter to be set to True -
190 would be cloned; requires ``create`` parameter to be set to True -
191 raises RepositoryError if src_url is set and create evaluates to
191 raises RepositoryError if src_url is set and create evaluates to
192 False
192 False
193 """
193 """
194 raise NotImplementedError
194 raise NotImplementedError
195
195
196 def __repr__(self):
196 def __repr__(self):
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198
198
199 def __len__(self):
199 def __len__(self):
200 return self.count()
200 return self.count()
201
201
202 def __eq__(self, other):
202 def __eq__(self, other):
203 same_instance = isinstance(other, self.__class__)
203 same_instance = isinstance(other, self.__class__)
204 return same_instance and other.path == self.path
204 return same_instance and other.path == self.path
205
205
206 def __ne__(self, other):
206 def __ne__(self, other):
207 return not self.__eq__(other)
207 return not self.__eq__(other)
208
208
209 def get_create_shadow_cache_pr_path(self, db_repo):
209 def get_create_shadow_cache_pr_path(self, db_repo):
210 path = db_repo.cached_diffs_dir
210 path = db_repo.cached_diffs_dir
211 if not os.path.exists(path):
211 if not os.path.exists(path):
212 os.makedirs(path, 0755)
212 os.makedirs(path, 0o755)
213 return path
213 return path
214
214
215 @classmethod
215 @classmethod
216 def get_default_config(cls, default=None):
216 def get_default_config(cls, default=None):
217 config = Config()
217 config = Config()
218 if default and isinstance(default, list):
218 if default and isinstance(default, list):
219 for section, key, val in default:
219 for section, key, val in default:
220 config.set(section, key, val)
220 config.set(section, key, val)
221 return config
221 return config
222
222
223 @LazyProperty
223 @LazyProperty
224 def _remote(self):
224 def _remote(self):
225 raise NotImplementedError
225 raise NotImplementedError
226
226
227 @LazyProperty
227 @LazyProperty
228 def EMPTY_COMMIT(self):
228 def EMPTY_COMMIT(self):
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
230
230
231 @LazyProperty
231 @LazyProperty
232 def alias(self):
232 def alias(self):
233 for k, v in settings.BACKENDS.items():
233 for k, v in settings.BACKENDS.items():
234 if v.split('.')[-1] == str(self.__class__.__name__):
234 if v.split('.')[-1] == str(self.__class__.__name__):
235 return k
235 return k
236
236
237 @LazyProperty
237 @LazyProperty
238 def name(self):
238 def name(self):
239 return safe_unicode(os.path.basename(self.path))
239 return safe_unicode(os.path.basename(self.path))
240
240
241 @LazyProperty
241 @LazyProperty
242 def description(self):
242 def description(self):
243 raise NotImplementedError
243 raise NotImplementedError
244
244
245 def refs(self):
245 def refs(self):
246 """
246 """
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 for this repository
248 for this repository
249 """
249 """
250 return dict(
250 return dict(
251 branches=self.branches,
251 branches=self.branches,
252 branches_closed=self.branches_closed,
252 branches_closed=self.branches_closed,
253 tags=self.tags,
253 tags=self.tags,
254 bookmarks=self.bookmarks
254 bookmarks=self.bookmarks
255 )
255 )
256
256
257 @LazyProperty
257 @LazyProperty
258 def branches(self):
258 def branches(self):
259 """
259 """
260 A `dict` which maps branch names to commit ids.
260 A `dict` which maps branch names to commit ids.
261 """
261 """
262 raise NotImplementedError
262 raise NotImplementedError
263
263
264 @LazyProperty
264 @LazyProperty
265 def branches_closed(self):
265 def branches_closed(self):
266 """
266 """
267 A `dict` which maps tags names to commit ids.
267 A `dict` which maps tags names to commit ids.
268 """
268 """
269 raise NotImplementedError
269 raise NotImplementedError
270
270
271 @LazyProperty
271 @LazyProperty
272 def bookmarks(self):
272 def bookmarks(self):
273 """
273 """
274 A `dict` which maps tags names to commit ids.
274 A `dict` which maps tags names to commit ids.
275 """
275 """
276 raise NotImplementedError
276 raise NotImplementedError
277
277
278 @LazyProperty
278 @LazyProperty
279 def tags(self):
279 def tags(self):
280 """
280 """
281 A `dict` which maps tags names to commit ids.
281 A `dict` which maps tags names to commit ids.
282 """
282 """
283 raise NotImplementedError
283 raise NotImplementedError
284
284
285 @LazyProperty
285 @LazyProperty
286 def size(self):
286 def size(self):
287 """
287 """
288 Returns combined size in bytes for all repository files
288 Returns combined size in bytes for all repository files
289 """
289 """
290 tip = self.get_commit()
290 tip = self.get_commit()
291 return tip.size
291 return tip.size
292
292
293 def size_at_commit(self, commit_id):
293 def size_at_commit(self, commit_id):
294 commit = self.get_commit(commit_id)
294 commit = self.get_commit(commit_id)
295 return commit.size
295 return commit.size
296
296
297 def is_empty(self):
297 def is_empty(self):
298 return not bool(self.commit_ids)
298 return not bool(self.commit_ids)
299
299
300 @staticmethod
300 @staticmethod
301 def check_url(url, config):
301 def check_url(url, config):
302 """
302 """
303 Function will check given url and try to verify if it's a valid
303 Function will check given url and try to verify if it's a valid
304 link.
304 link.
305 """
305 """
306 raise NotImplementedError
306 raise NotImplementedError
307
307
308 @staticmethod
308 @staticmethod
309 def is_valid_repository(path):
309 def is_valid_repository(path):
310 """
310 """
311 Check if given `path` contains a valid repository of this backend
311 Check if given `path` contains a valid repository of this backend
312 """
312 """
313 raise NotImplementedError
313 raise NotImplementedError
314
314
315 # ==========================================================================
315 # ==========================================================================
316 # COMMITS
316 # COMMITS
317 # ==========================================================================
317 # ==========================================================================
318
318
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 """
320 """
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 are both None, most recent commit is returned.
322 are both None, most recent commit is returned.
323
323
324 :param pre_load: Optional. List of commit attributes to load.
324 :param pre_load: Optional. List of commit attributes to load.
325
325
326 :raises ``EmptyRepositoryError``: if there are no commits
326 :raises ``EmptyRepositoryError``: if there are no commits
327 """
327 """
328 raise NotImplementedError
328 raise NotImplementedError
329
329
330 def __iter__(self):
330 def __iter__(self):
331 for commit_id in self.commit_ids:
331 for commit_id in self.commit_ids:
332 yield self.get_commit(commit_id=commit_id)
332 yield self.get_commit(commit_id=commit_id)
333
333
334 def get_commits(
334 def get_commits(
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 branch_name=None, show_hidden=False, pre_load=None):
336 branch_name=None, show_hidden=False, pre_load=None):
337 """
337 """
338 Returns iterator of `BaseCommit` objects from start to end
338 Returns iterator of `BaseCommit` objects from start to end
339 not inclusive. This should behave just like a list, ie. end is not
339 not inclusive. This should behave just like a list, ie. end is not
340 inclusive.
340 inclusive.
341
341
342 :param start_id: None or str, must be a valid commit id
342 :param start_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
344 :param start_date:
344 :param start_date:
345 :param end_date:
345 :param end_date:
346 :param branch_name:
346 :param branch_name:
347 :param show_hidden:
347 :param show_hidden:
348 :param pre_load:
348 :param pre_load:
349 """
349 """
350 raise NotImplementedError
350 raise NotImplementedError
351
351
352 def __getitem__(self, key):
352 def __getitem__(self, key):
353 """
353 """
354 Allows index based access to the commit objects of this repository.
354 Allows index based access to the commit objects of this repository.
355 """
355 """
356 pre_load = ["author", "branch", "date", "message", "parents"]
356 pre_load = ["author", "branch", "date", "message", "parents"]
357 if isinstance(key, slice):
357 if isinstance(key, slice):
358 return self._get_range(key, pre_load)
358 return self._get_range(key, pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
360
360
361 def _get_range(self, slice_obj, pre_load):
361 def _get_range(self, slice_obj, pre_load):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364
364
365 def count(self):
365 def count(self):
366 return len(self.commit_ids)
366 return len(self.commit_ids)
367
367
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 """
369 """
370 Creates and returns a tag for the given ``commit_id``.
370 Creates and returns a tag for the given ``commit_id``.
371
371
372 :param name: name for new tag
372 :param name: name for new tag
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 :param commit_id: commit id for which new tag would be created
374 :param commit_id: commit id for which new tag would be created
375 :param message: message of the tag's commit
375 :param message: message of the tag's commit
376 :param date: date of tag's commit
376 :param date: date of tag's commit
377
377
378 :raises TagAlreadyExistError: if tag with same name already exists
378 :raises TagAlreadyExistError: if tag with same name already exists
379 """
379 """
380 raise NotImplementedError
380 raise NotImplementedError
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 raise NotImplementedError
393 raise NotImplementedError
394
394
395 def get_diff(
395 def get_diff(
396 self, commit1, commit2, path=None, ignore_whitespace=False,
396 self, commit1, commit2, path=None, ignore_whitespace=False,
397 context=3, path1=None):
397 context=3, path1=None):
398 """
398 """
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 `commit2` since `commit1`.
400 `commit2` since `commit1`.
401
401
402 :param commit1: Entry point from which diff is shown. Can be
402 :param commit1: Entry point from which diff is shown. Can be
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 the changes since empty state of the repository until `commit2`
404 the changes since empty state of the repository until `commit2`
405 :param commit2: Until which commit changes should be shown.
405 :param commit2: Until which commit changes should be shown.
406 :param path: Can be set to a path of a file to create a diff of that
406 :param path: Can be set to a path of a file to create a diff of that
407 file. If `path1` is also set, this value is only associated to
407 file. If `path1` is also set, this value is only associated to
408 `commit2`.
408 `commit2`.
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 changes. Defaults to ``False``.
410 changes. Defaults to ``False``.
411 :param context: How many lines before/after changed lines should be
411 :param context: How many lines before/after changed lines should be
412 shown. Defaults to ``3``.
412 shown. Defaults to ``3``.
413 :param path1: Can be set to a path to associate with `commit1`. This
413 :param path1: Can be set to a path to associate with `commit1`. This
414 parameter works only for backends which support diff generation for
414 parameter works only for backends which support diff generation for
415 different paths. Other backends will raise a `ValueError` if `path1`
415 different paths. Other backends will raise a `ValueError` if `path1`
416 is set and has a different value than `path`.
416 is set and has a different value than `path`.
417 :param file_path: filter this diff by given path pattern
417 :param file_path: filter this diff by given path pattern
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def strip(self, commit_id, branch=None):
421 def strip(self, commit_id, branch=None):
422 """
422 """
423 Strip given commit_id from the repository
423 Strip given commit_id from the repository
424 """
424 """
425 raise NotImplementedError
425 raise NotImplementedError
426
426
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 """
428 """
429 Return a latest common ancestor commit if one exists for this repo
429 Return a latest common ancestor commit if one exists for this repo
430 `commit_id1` vs `commit_id2` from `repo2`.
430 `commit_id1` vs `commit_id2` from `repo2`.
431
431
432 :param commit_id1: Commit it from this repository to use as a
432 :param commit_id1: Commit it from this repository to use as a
433 target for the comparison.
433 target for the comparison.
434 :param commit_id2: Source commit id to use for comparison.
434 :param commit_id2: Source commit id to use for comparison.
435 :param repo2: Source repository to use for comparison.
435 :param repo2: Source repository to use for comparison.
436 """
436 """
437 raise NotImplementedError
437 raise NotImplementedError
438
438
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 """
440 """
441 Compare this repository's revision `commit_id1` with `commit_id2`.
441 Compare this repository's revision `commit_id1` with `commit_id2`.
442
442
443 Returns a tuple(commits, ancestor) that would be merged from
443 Returns a tuple(commits, ancestor) that would be merged from
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 will be returned as ancestor.
445 will be returned as ancestor.
446
446
447 :param commit_id1: Commit it from this repository to use as a
447 :param commit_id1: Commit it from this repository to use as a
448 target for the comparison.
448 target for the comparison.
449 :param commit_id2: Source commit id to use for comparison.
449 :param commit_id2: Source commit id to use for comparison.
450 :param repo2: Source repository to use for comparison.
450 :param repo2: Source repository to use for comparison.
451 :param merge: If set to ``True`` will do a merge compare which also
451 :param merge: If set to ``True`` will do a merge compare which also
452 returns the common ancestor.
452 returns the common ancestor.
453 :param pre_load: Optional. List of commit attributes to load.
453 :param pre_load: Optional. List of commit attributes to load.
454 """
454 """
455 raise NotImplementedError
455 raise NotImplementedError
456
456
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 user_name='', user_email='', message='', dry_run=False,
458 user_name='', user_email='', message='', dry_run=False,
459 use_rebase=False, close_branch=False):
459 use_rebase=False, close_branch=False):
460 """
460 """
461 Merge the revisions specified in `source_ref` from `source_repo`
461 Merge the revisions specified in `source_ref` from `source_repo`
462 onto the `target_ref` of this repository.
462 onto the `target_ref` of this repository.
463
463
464 `source_ref` and `target_ref` are named tupls with the following
464 `source_ref` and `target_ref` are named tupls with the following
465 fields `type`, `name` and `commit_id`.
465 fields `type`, `name` and `commit_id`.
466
466
467 Returns a MergeResponse named tuple with the following fields
467 Returns a MergeResponse named tuple with the following fields
468 'possible', 'executed', 'source_commit', 'target_commit',
468 'possible', 'executed', 'source_commit', 'target_commit',
469 'merge_commit'.
469 'merge_commit'.
470
470
471 :param repo_id: `repo_id` target repo id.
471 :param repo_id: `repo_id` target repo id.
472 :param workspace_id: `workspace_id` unique identifier.
472 :param workspace_id: `workspace_id` unique identifier.
473 :param target_ref: `target_ref` points to the commit on top of which
473 :param target_ref: `target_ref` points to the commit on top of which
474 the `source_ref` should be merged.
474 the `source_ref` should be merged.
475 :param source_repo: The repository that contains the commits to be
475 :param source_repo: The repository that contains the commits to be
476 merged.
476 merged.
477 :param source_ref: `source_ref` points to the topmost commit from
477 :param source_ref: `source_ref` points to the topmost commit from
478 the `source_repo` which should be merged.
478 the `source_repo` which should be merged.
479 :param user_name: Merge commit `user_name`.
479 :param user_name: Merge commit `user_name`.
480 :param user_email: Merge commit `user_email`.
480 :param user_email: Merge commit `user_email`.
481 :param message: Merge commit `message`.
481 :param message: Merge commit `message`.
482 :param dry_run: If `True` the merge will not take place.
482 :param dry_run: If `True` the merge will not take place.
483 :param use_rebase: If `True` commits from the source will be rebased
483 :param use_rebase: If `True` commits from the source will be rebased
484 on top of the target instead of being merged.
484 on top of the target instead of being merged.
485 :param close_branch: If `True` branch will be close before merging it
485 :param close_branch: If `True` branch will be close before merging it
486 """
486 """
487 if dry_run:
487 if dry_run:
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 else:
491 else:
492 if not user_name:
492 if not user_name:
493 raise ValueError('user_name cannot be empty')
493 raise ValueError('user_name cannot be empty')
494 if not user_email:
494 if not user_email:
495 raise ValueError('user_email cannot be empty')
495 raise ValueError('user_email cannot be empty')
496 if not message:
496 if not message:
497 raise ValueError('message cannot be empty')
497 raise ValueError('message cannot be empty')
498
498
499 try:
499 try:
500 return self._merge_repo(
500 return self._merge_repo(
501 repo_id, workspace_id, target_ref, source_repo,
501 repo_id, workspace_id, target_ref, source_repo,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
503 use_rebase=use_rebase, close_branch=close_branch)
503 use_rebase=use_rebase, close_branch=close_branch)
504 except RepositoryError:
504 except RepositoryError:
505 log.exception(
505 log.exception(
506 'Unexpected failure when running merge, dry-run=%s',
506 'Unexpected failure when running merge, dry-run=%s',
507 dry_run)
507 dry_run)
508 return MergeResponse(
508 return MergeResponse(
509 False, False, None, MergeFailureReason.UNKNOWN)
509 False, False, None, MergeFailureReason.UNKNOWN)
510
510
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 source_repo, source_ref, merge_message,
512 source_repo, source_ref, merge_message,
513 merger_name, merger_email, dry_run=False,
513 merger_name, merger_email, dry_run=False,
514 use_rebase=False, close_branch=False):
514 use_rebase=False, close_branch=False):
515 """Internal implementation of merge."""
515 """Internal implementation of merge."""
516 raise NotImplementedError
516 raise NotImplementedError
517
517
518 def _maybe_prepare_merge_workspace(
518 def _maybe_prepare_merge_workspace(
519 self, repo_id, workspace_id, target_ref, source_ref):
519 self, repo_id, workspace_id, target_ref, source_ref):
520 """
520 """
521 Create the merge workspace.
521 Create the merge workspace.
522
522
523 :param workspace_id: `workspace_id` unique identifier.
523 :param workspace_id: `workspace_id` unique identifier.
524 """
524 """
525 raise NotImplementedError
525 raise NotImplementedError
526
526
527 def _get_legacy_shadow_repository_path(self, workspace_id):
527 def _get_legacy_shadow_repository_path(self, workspace_id):
528 """
528 """
529 Legacy version that was used before. We still need it for
529 Legacy version that was used before. We still need it for
530 backward compat
530 backward compat
531 """
531 """
532 return os.path.join(
532 return os.path.join(
533 os.path.dirname(self.path),
533 os.path.dirname(self.path),
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535
535
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 # The name of the shadow repository must start with '.', so it is
537 # The name of the shadow repository must start with '.', so it is
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 if os.path.exists(legacy_repository_path):
540 if os.path.exists(legacy_repository_path):
541 return legacy_repository_path
541 return legacy_repository_path
542 else:
542 else:
543 return os.path.join(
543 return os.path.join(
544 os.path.dirname(self.path),
544 os.path.dirname(self.path),
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546
546
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 """
548 """
549 Remove merge workspace.
549 Remove merge workspace.
550
550
551 This function MUST not fail in case there is no workspace associated to
551 This function MUST not fail in case there is no workspace associated to
552 the given `workspace_id`.
552 the given `workspace_id`.
553
553
554 :param workspace_id: `workspace_id` unique identifier.
554 :param workspace_id: `workspace_id` unique identifier.
555 """
555 """
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 shadow_repository_path_del = '{}.{}.delete'.format(
557 shadow_repository_path_del = '{}.{}.delete'.format(
558 shadow_repository_path, time.time())
558 shadow_repository_path, time.time())
559
559
560 # move the shadow repo, so it never conflicts with the one used.
560 # move the shadow repo, so it never conflicts with the one used.
561 # we use this method because shutil.rmtree had some edge case problems
561 # we use this method because shutil.rmtree had some edge case problems
562 # removing symlinked repositories
562 # removing symlinked repositories
563 if not os.path.isdir(shadow_repository_path):
563 if not os.path.isdir(shadow_repository_path):
564 return
564 return
565
565
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 try:
567 try:
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 except Exception:
569 except Exception:
570 log.exception('Failed to gracefully remove shadow repo under %s',
570 log.exception('Failed to gracefully remove shadow repo under %s',
571 shadow_repository_path_del)
571 shadow_repository_path_del)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573
573
574 # ========== #
574 # ========== #
575 # COMMIT API #
575 # COMMIT API #
576 # ========== #
576 # ========== #
577
577
578 @LazyProperty
578 @LazyProperty
579 def in_memory_commit(self):
579 def in_memory_commit(self):
580 """
580 """
581 Returns :class:`InMemoryCommit` object for this repository.
581 Returns :class:`InMemoryCommit` object for this repository.
582 """
582 """
583 raise NotImplementedError
583 raise NotImplementedError
584
584
585 # ======================== #
585 # ======================== #
586 # UTILITIES FOR SUBCLASSES #
586 # UTILITIES FOR SUBCLASSES #
587 # ======================== #
587 # ======================== #
588
588
589 def _validate_diff_commits(self, commit1, commit2):
589 def _validate_diff_commits(self, commit1, commit2):
590 """
590 """
591 Validates that the given commits are related to this repository.
591 Validates that the given commits are related to this repository.
592
592
593 Intended as a utility for sub classes to have a consistent validation
593 Intended as a utility for sub classes to have a consistent validation
594 of input parameters in methods like :meth:`get_diff`.
594 of input parameters in methods like :meth:`get_diff`.
595 """
595 """
596 self._validate_commit(commit1)
596 self._validate_commit(commit1)
597 self._validate_commit(commit2)
597 self._validate_commit(commit2)
598 if (isinstance(commit1, EmptyCommit) and
598 if (isinstance(commit1, EmptyCommit) and
599 isinstance(commit2, EmptyCommit)):
599 isinstance(commit2, EmptyCommit)):
600 raise ValueError("Cannot compare two empty commits")
600 raise ValueError("Cannot compare two empty commits")
601
601
602 def _validate_commit(self, commit):
602 def _validate_commit(self, commit):
603 if not isinstance(commit, BaseCommit):
603 if not isinstance(commit, BaseCommit):
604 raise TypeError(
604 raise TypeError(
605 "%s is not of type BaseCommit" % repr(commit))
605 "%s is not of type BaseCommit" % repr(commit))
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 raise ValueError(
607 raise ValueError(
608 "Commit %s must be a valid commit from this repository %s, "
608 "Commit %s must be a valid commit from this repository %s, "
609 "related to this repository instead %s." %
609 "related to this repository instead %s." %
610 (commit, self, commit.repository))
610 (commit, self, commit.repository))
611
611
612 def _validate_commit_id(self, commit_id):
612 def _validate_commit_id(self, commit_id):
613 if not isinstance(commit_id, basestring):
613 if not isinstance(commit_id, basestring):
614 raise TypeError("commit_id must be a string value")
614 raise TypeError("commit_id must be a string value")
615
615
616 def _validate_commit_idx(self, commit_idx):
616 def _validate_commit_idx(self, commit_idx):
617 if not isinstance(commit_idx, (int, long)):
617 if not isinstance(commit_idx, (int, long)):
618 raise TypeError("commit_idx must be a numeric value")
618 raise TypeError("commit_idx must be a numeric value")
619
619
620 def _validate_branch_name(self, branch_name):
620 def _validate_branch_name(self, branch_name):
621 if branch_name and branch_name not in self.branches_all:
621 if branch_name and branch_name not in self.branches_all:
622 msg = ("Branch %s not found in %s" % (branch_name, self))
622 msg = ("Branch %s not found in %s" % (branch_name, self))
623 raise BranchDoesNotExistError(msg)
623 raise BranchDoesNotExistError(msg)
624
624
625 #
625 #
626 # Supporting deprecated API parts
626 # Supporting deprecated API parts
627 # TODO: johbo: consider to move this into a mixin
627 # TODO: johbo: consider to move this into a mixin
628 #
628 #
629
629
630 @property
630 @property
631 def EMPTY_CHANGESET(self):
631 def EMPTY_CHANGESET(self):
632 warnings.warn(
632 warnings.warn(
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 return self.EMPTY_COMMIT_ID
634 return self.EMPTY_COMMIT_ID
635
635
636 @property
636 @property
637 def revisions(self):
637 def revisions(self):
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 return self.commit_ids
639 return self.commit_ids
640
640
641 @revisions.setter
641 @revisions.setter
642 def revisions(self, value):
642 def revisions(self, value):
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 self.commit_ids = value
644 self.commit_ids = value
645
645
646 def get_changeset(self, revision=None, pre_load=None):
646 def get_changeset(self, revision=None, pre_load=None):
647 warnings.warn("Use get_commit instead", DeprecationWarning)
647 warnings.warn("Use get_commit instead", DeprecationWarning)
648 commit_id = None
648 commit_id = None
649 commit_idx = None
649 commit_idx = None
650 if isinstance(revision, basestring):
650 if isinstance(revision, basestring):
651 commit_id = revision
651 commit_id = revision
652 else:
652 else:
653 commit_idx = revision
653 commit_idx = revision
654 return self.get_commit(
654 return self.get_commit(
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656
656
657 def get_changesets(
657 def get_changesets(
658 self, start=None, end=None, start_date=None, end_date=None,
658 self, start=None, end=None, start_date=None, end_date=None,
659 branch_name=None, pre_load=None):
659 branch_name=None, pre_load=None):
660 warnings.warn("Use get_commits instead", DeprecationWarning)
660 warnings.warn("Use get_commits instead", DeprecationWarning)
661 start_id = self._revision_to_commit(start)
661 start_id = self._revision_to_commit(start)
662 end_id = self._revision_to_commit(end)
662 end_id = self._revision_to_commit(end)
663 return self.get_commits(
663 return self.get_commits(
664 start_id=start_id, end_id=end_id, start_date=start_date,
664 start_id=start_id, end_id=end_id, start_date=start_date,
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666
666
667 def _revision_to_commit(self, revision):
667 def _revision_to_commit(self, revision):
668 """
668 """
669 Translates a revision to a commit_id
669 Translates a revision to a commit_id
670
670
671 Helps to support the old changeset based API which allows to use
671 Helps to support the old changeset based API which allows to use
672 commit ids and commit indices interchangeable.
672 commit ids and commit indices interchangeable.
673 """
673 """
674 if revision is None:
674 if revision is None:
675 return revision
675 return revision
676
676
677 if isinstance(revision, basestring):
677 if isinstance(revision, basestring):
678 commit_id = revision
678 commit_id = revision
679 else:
679 else:
680 commit_id = self.commit_ids[revision]
680 commit_id = self.commit_ids[revision]
681 return commit_id
681 return commit_id
682
682
683 @property
683 @property
684 def in_memory_changeset(self):
684 def in_memory_changeset(self):
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 return self.in_memory_commit
686 return self.in_memory_commit
687
687
688 def get_path_permissions(self, username):
688 def get_path_permissions(self, username):
689 """
689 """
690 Returns a path permission checker or None if not supported
690 Returns a path permission checker or None if not supported
691
691
692 :param username: session user name
692 :param username: session user name
693 :return: an instance of BasePathPermissionChecker or None
693 :return: an instance of BasePathPermissionChecker or None
694 """
694 """
695 return None
695 return None
696
696
697 def install_hooks(self, force=False):
697 def install_hooks(self, force=False):
698 return self._remote.install_hooks(force)
698 return self._remote.install_hooks(force)
699
699
700
700
701 class BaseCommit(object):
701 class BaseCommit(object):
702 """
702 """
703 Each backend should implement it's commit representation.
703 Each backend should implement it's commit representation.
704
704
705 **Attributes**
705 **Attributes**
706
706
707 ``repository``
707 ``repository``
708 repository object within which commit exists
708 repository object within which commit exists
709
709
710 ``id``
710 ``id``
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 just ``tip``.
712 just ``tip``.
713
713
714 ``raw_id``
714 ``raw_id``
715 raw commit representation (i.e. full 40 length sha for git
715 raw commit representation (i.e. full 40 length sha for git
716 backend)
716 backend)
717
717
718 ``short_id``
718 ``short_id``
719 shortened (if apply) version of ``raw_id``; it would be simple
719 shortened (if apply) version of ``raw_id``; it would be simple
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 as ``raw_id`` for subversion
721 as ``raw_id`` for subversion
722
722
723 ``idx``
723 ``idx``
724 commit index
724 commit index
725
725
726 ``files``
726 ``files``
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728
728
729 ``dirs``
729 ``dirs``
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731
731
732 ``nodes``
732 ``nodes``
733 combined list of ``Node`` objects
733 combined list of ``Node`` objects
734
734
735 ``author``
735 ``author``
736 author of the commit, as unicode
736 author of the commit, as unicode
737
737
738 ``message``
738 ``message``
739 message of the commit, as unicode
739 message of the commit, as unicode
740
740
741 ``parents``
741 ``parents``
742 list of parent commits
742 list of parent commits
743
743
744 """
744 """
745
745
746 branch = None
746 branch = None
747 """
747 """
748 Depending on the backend this should be set to the branch name of the
748 Depending on the backend this should be set to the branch name of the
749 commit. Backends not supporting branches on commits should leave this
749 commit. Backends not supporting branches on commits should leave this
750 value as ``None``.
750 value as ``None``.
751 """
751 """
752
752
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 """
754 """
755 This template is used to generate a default prefix for repository archives
755 This template is used to generate a default prefix for repository archives
756 if no prefix has been specified.
756 if no prefix has been specified.
757 """
757 """
758
758
759 def __str__(self):
759 def __str__(self):
760 return '<%s at %s:%s>' % (
760 return '<%s at %s:%s>' % (
761 self.__class__.__name__, self.idx, self.short_id)
761 self.__class__.__name__, self.idx, self.short_id)
762
762
763 def __repr__(self):
763 def __repr__(self):
764 return self.__str__()
764 return self.__str__()
765
765
766 def __unicode__(self):
766 def __unicode__(self):
767 return u'%s:%s' % (self.idx, self.short_id)
767 return u'%s:%s' % (self.idx, self.short_id)
768
768
769 def __eq__(self, other):
769 def __eq__(self, other):
770 same_instance = isinstance(other, self.__class__)
770 same_instance = isinstance(other, self.__class__)
771 return same_instance and self.raw_id == other.raw_id
771 return same_instance and self.raw_id == other.raw_id
772
772
773 def __json__(self):
773 def __json__(self):
774 parents = []
774 parents = []
775 try:
775 try:
776 for parent in self.parents:
776 for parent in self.parents:
777 parents.append({'raw_id': parent.raw_id})
777 parents.append({'raw_id': parent.raw_id})
778 except NotImplementedError:
778 except NotImplementedError:
779 # empty commit doesn't have parents implemented
779 # empty commit doesn't have parents implemented
780 pass
780 pass
781
781
782 return {
782 return {
783 'short_id': self.short_id,
783 'short_id': self.short_id,
784 'raw_id': self.raw_id,
784 'raw_id': self.raw_id,
785 'revision': self.idx,
785 'revision': self.idx,
786 'message': self.message,
786 'message': self.message,
787 'date': self.date,
787 'date': self.date,
788 'author': self.author,
788 'author': self.author,
789 'parents': parents,
789 'parents': parents,
790 'branch': self.branch
790 'branch': self.branch
791 }
791 }
792
792
793 def __getstate__(self):
793 def __getstate__(self):
794 d = self.__dict__.copy()
794 d = self.__dict__.copy()
795 d.pop('_remote', None)
795 d.pop('_remote', None)
796 d.pop('repository', None)
796 d.pop('repository', None)
797 return d
797 return d
798
798
799 def _get_refs(self):
799 def _get_refs(self):
800 return {
800 return {
801 'branches': [self.branch] if self.branch else [],
801 'branches': [self.branch] if self.branch else [],
802 'bookmarks': getattr(self, 'bookmarks', []),
802 'bookmarks': getattr(self, 'bookmarks', []),
803 'tags': self.tags
803 'tags': self.tags
804 }
804 }
805
805
806 @LazyProperty
806 @LazyProperty
807 def last(self):
807 def last(self):
808 """
808 """
809 ``True`` if this is last commit in repository, ``False``
809 ``True`` if this is last commit in repository, ``False``
810 otherwise; trying to access this attribute while there is no
810 otherwise; trying to access this attribute while there is no
811 commits would raise `EmptyRepositoryError`
811 commits would raise `EmptyRepositoryError`
812 """
812 """
813 if self.repository is None:
813 if self.repository is None:
814 raise CommitError("Cannot check if it's most recent commit")
814 raise CommitError("Cannot check if it's most recent commit")
815 return self.raw_id == self.repository.commit_ids[-1]
815 return self.raw_id == self.repository.commit_ids[-1]
816
816
817 @LazyProperty
817 @LazyProperty
818 def parents(self):
818 def parents(self):
819 """
819 """
820 Returns list of parent commits.
820 Returns list of parent commits.
821 """
821 """
822 raise NotImplementedError
822 raise NotImplementedError
823
823
824 @LazyProperty
824 @LazyProperty
825 def first_parent(self):
825 def first_parent(self):
826 """
826 """
827 Returns list of parent commits.
827 Returns list of parent commits.
828 """
828 """
829 return self.parents[0] if self.parents else EmptyCommit()
829 return self.parents[0] if self.parents else EmptyCommit()
830
830
831 @property
831 @property
832 def merge(self):
832 def merge(self):
833 """
833 """
834 Returns boolean if commit is a merge.
834 Returns boolean if commit is a merge.
835 """
835 """
836 return len(self.parents) > 1
836 return len(self.parents) > 1
837
837
838 @LazyProperty
838 @LazyProperty
839 def children(self):
839 def children(self):
840 """
840 """
841 Returns list of child commits.
841 Returns list of child commits.
842 """
842 """
843 raise NotImplementedError
843 raise NotImplementedError
844
844
845 @LazyProperty
845 @LazyProperty
846 def id(self):
846 def id(self):
847 """
847 """
848 Returns string identifying this commit.
848 Returns string identifying this commit.
849 """
849 """
850 raise NotImplementedError
850 raise NotImplementedError
851
851
852 @LazyProperty
852 @LazyProperty
853 def raw_id(self):
853 def raw_id(self):
854 """
854 """
855 Returns raw string identifying this commit.
855 Returns raw string identifying this commit.
856 """
856 """
857 raise NotImplementedError
857 raise NotImplementedError
858
858
859 @LazyProperty
859 @LazyProperty
860 def short_id(self):
860 def short_id(self):
861 """
861 """
862 Returns shortened version of ``raw_id`` attribute, as string,
862 Returns shortened version of ``raw_id`` attribute, as string,
863 identifying this commit, useful for presentation to users.
863 identifying this commit, useful for presentation to users.
864 """
864 """
865 raise NotImplementedError
865 raise NotImplementedError
866
866
867 @LazyProperty
867 @LazyProperty
868 def idx(self):
868 def idx(self):
869 """
869 """
870 Returns integer identifying this commit.
870 Returns integer identifying this commit.
871 """
871 """
872 raise NotImplementedError
872 raise NotImplementedError
873
873
874 @LazyProperty
874 @LazyProperty
875 def committer(self):
875 def committer(self):
876 """
876 """
877 Returns committer for this commit
877 Returns committer for this commit
878 """
878 """
879 raise NotImplementedError
879 raise NotImplementedError
880
880
881 @LazyProperty
881 @LazyProperty
882 def committer_name(self):
882 def committer_name(self):
883 """
883 """
884 Returns committer name for this commit
884 Returns committer name for this commit
885 """
885 """
886
886
887 return author_name(self.committer)
887 return author_name(self.committer)
888
888
889 @LazyProperty
889 @LazyProperty
890 def committer_email(self):
890 def committer_email(self):
891 """
891 """
892 Returns committer email address for this commit
892 Returns committer email address for this commit
893 """
893 """
894
894
895 return author_email(self.committer)
895 return author_email(self.committer)
896
896
897 @LazyProperty
897 @LazyProperty
898 def author(self):
898 def author(self):
899 """
899 """
900 Returns author for this commit
900 Returns author for this commit
901 """
901 """
902
902
903 raise NotImplementedError
903 raise NotImplementedError
904
904
905 @LazyProperty
905 @LazyProperty
906 def author_name(self):
906 def author_name(self):
907 """
907 """
908 Returns author name for this commit
908 Returns author name for this commit
909 """
909 """
910
910
911 return author_name(self.author)
911 return author_name(self.author)
912
912
913 @LazyProperty
913 @LazyProperty
914 def author_email(self):
914 def author_email(self):
915 """
915 """
916 Returns author email address for this commit
916 Returns author email address for this commit
917 """
917 """
918
918
919 return author_email(self.author)
919 return author_email(self.author)
920
920
921 def get_file_mode(self, path):
921 def get_file_mode(self, path):
922 """
922 """
923 Returns stat mode of the file at `path`.
923 Returns stat mode of the file at `path`.
924 """
924 """
925 raise NotImplementedError
925 raise NotImplementedError
926
926
927 def is_link(self, path):
927 def is_link(self, path):
928 """
928 """
929 Returns ``True`` if given `path` is a symlink
929 Returns ``True`` if given `path` is a symlink
930 """
930 """
931 raise NotImplementedError
931 raise NotImplementedError
932
932
933 def get_file_content(self, path):
933 def get_file_content(self, path):
934 """
934 """
935 Returns content of the file at the given `path`.
935 Returns content of the file at the given `path`.
936 """
936 """
937 raise NotImplementedError
937 raise NotImplementedError
938
938
939 def get_file_size(self, path):
939 def get_file_size(self, path):
940 """
940 """
941 Returns size of the file at the given `path`.
941 Returns size of the file at the given `path`.
942 """
942 """
943 raise NotImplementedError
943 raise NotImplementedError
944
944
945 def get_file_commit(self, path, pre_load=None):
945 def get_file_commit(self, path, pre_load=None):
946 """
946 """
947 Returns last commit of the file at the given `path`.
947 Returns last commit of the file at the given `path`.
948
948
949 :param pre_load: Optional. List of commit attributes to load.
949 :param pre_load: Optional. List of commit attributes to load.
950 """
950 """
951 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
951 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
952 if not commits:
952 if not commits:
953 raise RepositoryError(
953 raise RepositoryError(
954 'Failed to fetch history for path {}. '
954 'Failed to fetch history for path {}. '
955 'Please check if such path exists in your repository'.format(
955 'Please check if such path exists in your repository'.format(
956 path))
956 path))
957 return commits[0]
957 return commits[0]
958
958
959 def get_file_history(self, path, limit=None, pre_load=None):
959 def get_file_history(self, path, limit=None, pre_load=None):
960 """
960 """
961 Returns history of file as reversed list of :class:`BaseCommit`
961 Returns history of file as reversed list of :class:`BaseCommit`
962 objects for which file at given `path` has been modified.
962 objects for which file at given `path` has been modified.
963
963
964 :param limit: Optional. Allows to limit the size of the returned
964 :param limit: Optional. Allows to limit the size of the returned
965 history. This is intended as a hint to the underlying backend, so
965 history. This is intended as a hint to the underlying backend, so
966 that it can apply optimizations depending on the limit.
966 that it can apply optimizations depending on the limit.
967 :param pre_load: Optional. List of commit attributes to load.
967 :param pre_load: Optional. List of commit attributes to load.
968 """
968 """
969 raise NotImplementedError
969 raise NotImplementedError
970
970
971 def get_file_annotate(self, path, pre_load=None):
971 def get_file_annotate(self, path, pre_load=None):
972 """
972 """
973 Returns a generator of four element tuples with
973 Returns a generator of four element tuples with
974 lineno, sha, commit lazy loader and line
974 lineno, sha, commit lazy loader and line
975
975
976 :param pre_load: Optional. List of commit attributes to load.
976 :param pre_load: Optional. List of commit attributes to load.
977 """
977 """
978 raise NotImplementedError
978 raise NotImplementedError
979
979
980 def get_nodes(self, path):
980 def get_nodes(self, path):
981 """
981 """
982 Returns combined ``DirNode`` and ``FileNode`` objects list representing
982 Returns combined ``DirNode`` and ``FileNode`` objects list representing
983 state of commit at the given ``path``.
983 state of commit at the given ``path``.
984
984
985 :raises ``CommitError``: if node at the given ``path`` is not
985 :raises ``CommitError``: if node at the given ``path`` is not
986 instance of ``DirNode``
986 instance of ``DirNode``
987 """
987 """
988 raise NotImplementedError
988 raise NotImplementedError
989
989
990 def get_node(self, path):
990 def get_node(self, path):
991 """
991 """
992 Returns ``Node`` object from the given ``path``.
992 Returns ``Node`` object from the given ``path``.
993
993
994 :raises ``NodeDoesNotExistError``: if there is no node at the given
994 :raises ``NodeDoesNotExistError``: if there is no node at the given
995 ``path``
995 ``path``
996 """
996 """
997 raise NotImplementedError
997 raise NotImplementedError
998
998
999 def get_largefile_node(self, path):
999 def get_largefile_node(self, path):
1000 """
1000 """
1001 Returns the path to largefile from Mercurial/Git-lfs storage.
1001 Returns the path to largefile from Mercurial/Git-lfs storage.
1002 or None if it's not a largefile node
1002 or None if it's not a largefile node
1003 """
1003 """
1004 return None
1004 return None
1005
1005
1006 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1006 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1007 prefix=None, write_metadata=False, mtime=None):
1007 prefix=None, write_metadata=False, mtime=None):
1008 """
1008 """
1009 Creates an archive containing the contents of the repository.
1009 Creates an archive containing the contents of the repository.
1010
1010
1011 :param file_path: path to the file which to create the archive.
1011 :param file_path: path to the file which to create the archive.
1012 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1012 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1013 :param prefix: name of root directory in archive.
1013 :param prefix: name of root directory in archive.
1014 Default is repository name and commit's short_id joined with dash:
1014 Default is repository name and commit's short_id joined with dash:
1015 ``"{repo_name}-{short_id}"``.
1015 ``"{repo_name}-{short_id}"``.
1016 :param write_metadata: write a metadata file into archive.
1016 :param write_metadata: write a metadata file into archive.
1017 :param mtime: custom modification time for archive creation, defaults
1017 :param mtime: custom modification time for archive creation, defaults
1018 to time.time() if not given.
1018 to time.time() if not given.
1019
1019
1020 :raise VCSError: If prefix has a problem.
1020 :raise VCSError: If prefix has a problem.
1021 """
1021 """
1022 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1022 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1023 if kind not in allowed_kinds:
1023 if kind not in allowed_kinds:
1024 raise ImproperArchiveTypeError(
1024 raise ImproperArchiveTypeError(
1025 'Archive kind (%s) not supported use one of %s' %
1025 'Archive kind (%s) not supported use one of %s' %
1026 (kind, allowed_kinds))
1026 (kind, allowed_kinds))
1027
1027
1028 prefix = self._validate_archive_prefix(prefix)
1028 prefix = self._validate_archive_prefix(prefix)
1029
1029
1030 mtime = mtime or time.mktime(self.date.timetuple())
1030 mtime = mtime or time.mktime(self.date.timetuple())
1031
1031
1032 file_info = []
1032 file_info = []
1033 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1033 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1034 for _r, _d, files in cur_rev.walk('/'):
1034 for _r, _d, files in cur_rev.walk('/'):
1035 for f in files:
1035 for f in files:
1036 f_path = os.path.join(prefix, f.path)
1036 f_path = os.path.join(prefix, f.path)
1037 file_info.append(
1037 file_info.append(
1038 (f_path, f.mode, f.is_link(), f.raw_bytes))
1038 (f_path, f.mode, f.is_link(), f.raw_bytes))
1039
1039
1040 if write_metadata:
1040 if write_metadata:
1041 metadata = [
1041 metadata = [
1042 ('repo_name', self.repository.name),
1042 ('repo_name', self.repository.name),
1043 ('rev', self.raw_id),
1043 ('rev', self.raw_id),
1044 ('create_time', mtime),
1044 ('create_time', mtime),
1045 ('branch', self.branch),
1045 ('branch', self.branch),
1046 ('tags', ','.join(self.tags)),
1046 ('tags', ','.join(self.tags)),
1047 ]
1047 ]
1048 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1048 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1049 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1049 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1050
1050
1051 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1051 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1052
1052
1053 def _validate_archive_prefix(self, prefix):
1053 def _validate_archive_prefix(self, prefix):
1054 if prefix is None:
1054 if prefix is None:
1055 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1055 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1056 repo_name=safe_str(self.repository.name),
1056 repo_name=safe_str(self.repository.name),
1057 short_id=self.short_id)
1057 short_id=self.short_id)
1058 elif not isinstance(prefix, str):
1058 elif not isinstance(prefix, str):
1059 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1059 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1060 elif prefix.startswith('/'):
1060 elif prefix.startswith('/'):
1061 raise VCSError("Prefix cannot start with leading slash")
1061 raise VCSError("Prefix cannot start with leading slash")
1062 elif prefix.strip() == '':
1062 elif prefix.strip() == '':
1063 raise VCSError("Prefix cannot be empty")
1063 raise VCSError("Prefix cannot be empty")
1064 return prefix
1064 return prefix
1065
1065
1066 @LazyProperty
1066 @LazyProperty
1067 def root(self):
1067 def root(self):
1068 """
1068 """
1069 Returns ``RootNode`` object for this commit.
1069 Returns ``RootNode`` object for this commit.
1070 """
1070 """
1071 return self.get_node('')
1071 return self.get_node('')
1072
1072
1073 def next(self, branch=None):
1073 def next(self, branch=None):
1074 """
1074 """
1075 Returns next commit from current, if branch is gives it will return
1075 Returns next commit from current, if branch is gives it will return
1076 next commit belonging to this branch
1076 next commit belonging to this branch
1077
1077
1078 :param branch: show commits within the given named branch
1078 :param branch: show commits within the given named branch
1079 """
1079 """
1080 indexes = xrange(self.idx + 1, self.repository.count())
1080 indexes = xrange(self.idx + 1, self.repository.count())
1081 return self._find_next(indexes, branch)
1081 return self._find_next(indexes, branch)
1082
1082
1083 def prev(self, branch=None):
1083 def prev(self, branch=None):
1084 """
1084 """
1085 Returns previous commit from current, if branch is gives it will
1085 Returns previous commit from current, if branch is gives it will
1086 return previous commit belonging to this branch
1086 return previous commit belonging to this branch
1087
1087
1088 :param branch: show commit within the given named branch
1088 :param branch: show commit within the given named branch
1089 """
1089 """
1090 indexes = xrange(self.idx - 1, -1, -1)
1090 indexes = xrange(self.idx - 1, -1, -1)
1091 return self._find_next(indexes, branch)
1091 return self._find_next(indexes, branch)
1092
1092
1093 def _find_next(self, indexes, branch=None):
1093 def _find_next(self, indexes, branch=None):
1094 if branch and self.branch != branch:
1094 if branch and self.branch != branch:
1095 raise VCSError('Branch option used on commit not belonging '
1095 raise VCSError('Branch option used on commit not belonging '
1096 'to that branch')
1096 'to that branch')
1097
1097
1098 for next_idx in indexes:
1098 for next_idx in indexes:
1099 commit = self.repository.get_commit(commit_idx=next_idx)
1099 commit = self.repository.get_commit(commit_idx=next_idx)
1100 if branch and branch != commit.branch:
1100 if branch and branch != commit.branch:
1101 continue
1101 continue
1102 return commit
1102 return commit
1103 raise CommitDoesNotExistError
1103 raise CommitDoesNotExistError
1104
1104
1105 def diff(self, ignore_whitespace=True, context=3):
1105 def diff(self, ignore_whitespace=True, context=3):
1106 """
1106 """
1107 Returns a `Diff` object representing the change made by this commit.
1107 Returns a `Diff` object representing the change made by this commit.
1108 """
1108 """
1109 parent = self.first_parent
1109 parent = self.first_parent
1110 diff = self.repository.get_diff(
1110 diff = self.repository.get_diff(
1111 parent, self,
1111 parent, self,
1112 ignore_whitespace=ignore_whitespace,
1112 ignore_whitespace=ignore_whitespace,
1113 context=context)
1113 context=context)
1114 return diff
1114 return diff
1115
1115
1116 @LazyProperty
1116 @LazyProperty
1117 def added(self):
1117 def added(self):
1118 """
1118 """
1119 Returns list of added ``FileNode`` objects.
1119 Returns list of added ``FileNode`` objects.
1120 """
1120 """
1121 raise NotImplementedError
1121 raise NotImplementedError
1122
1122
1123 @LazyProperty
1123 @LazyProperty
1124 def changed(self):
1124 def changed(self):
1125 """
1125 """
1126 Returns list of modified ``FileNode`` objects.
1126 Returns list of modified ``FileNode`` objects.
1127 """
1127 """
1128 raise NotImplementedError
1128 raise NotImplementedError
1129
1129
1130 @LazyProperty
1130 @LazyProperty
1131 def removed(self):
1131 def removed(self):
1132 """
1132 """
1133 Returns list of removed ``FileNode`` objects.
1133 Returns list of removed ``FileNode`` objects.
1134 """
1134 """
1135 raise NotImplementedError
1135 raise NotImplementedError
1136
1136
1137 @LazyProperty
1137 @LazyProperty
1138 def size(self):
1138 def size(self):
1139 """
1139 """
1140 Returns total number of bytes from contents of all filenodes.
1140 Returns total number of bytes from contents of all filenodes.
1141 """
1141 """
1142 return sum((node.size for node in self.get_filenodes_generator()))
1142 return sum((node.size for node in self.get_filenodes_generator()))
1143
1143
1144 def walk(self, topurl=''):
1144 def walk(self, topurl=''):
1145 """
1145 """
1146 Similar to os.walk method. Insted of filesystem it walks through
1146 Similar to os.walk method. Insted of filesystem it walks through
1147 commit starting at given ``topurl``. Returns generator of tuples
1147 commit starting at given ``topurl``. Returns generator of tuples
1148 (topnode, dirnodes, filenodes).
1148 (topnode, dirnodes, filenodes).
1149 """
1149 """
1150 topnode = self.get_node(topurl)
1150 topnode = self.get_node(topurl)
1151 if not topnode.is_dir():
1151 if not topnode.is_dir():
1152 return
1152 return
1153 yield (topnode, topnode.dirs, topnode.files)
1153 yield (topnode, topnode.dirs, topnode.files)
1154 for dirnode in topnode.dirs:
1154 for dirnode in topnode.dirs:
1155 for tup in self.walk(dirnode.path):
1155 for tup in self.walk(dirnode.path):
1156 yield tup
1156 yield tup
1157
1157
1158 def get_filenodes_generator(self):
1158 def get_filenodes_generator(self):
1159 """
1159 """
1160 Returns generator that yields *all* file nodes.
1160 Returns generator that yields *all* file nodes.
1161 """
1161 """
1162 for topnode, dirs, files in self.walk():
1162 for topnode, dirs, files in self.walk():
1163 for node in files:
1163 for node in files:
1164 yield node
1164 yield node
1165
1165
1166 #
1166 #
1167 # Utilities for sub classes to support consistent behavior
1167 # Utilities for sub classes to support consistent behavior
1168 #
1168 #
1169
1169
1170 def no_node_at_path(self, path):
1170 def no_node_at_path(self, path):
1171 return NodeDoesNotExistError(
1171 return NodeDoesNotExistError(
1172 u"There is no file nor directory at the given path: "
1172 u"There is no file nor directory at the given path: "
1173 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1173 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1174
1174
1175 def _fix_path(self, path):
1175 def _fix_path(self, path):
1176 """
1176 """
1177 Paths are stored without trailing slash so we need to get rid off it if
1177 Paths are stored without trailing slash so we need to get rid off it if
1178 needed.
1178 needed.
1179 """
1179 """
1180 return path.rstrip('/')
1180 return path.rstrip('/')
1181
1181
1182 #
1182 #
1183 # Deprecated API based on changesets
1183 # Deprecated API based on changesets
1184 #
1184 #
1185
1185
1186 @property
1186 @property
1187 def revision(self):
1187 def revision(self):
1188 warnings.warn("Use idx instead", DeprecationWarning)
1188 warnings.warn("Use idx instead", DeprecationWarning)
1189 return self.idx
1189 return self.idx
1190
1190
1191 @revision.setter
1191 @revision.setter
1192 def revision(self, value):
1192 def revision(self, value):
1193 warnings.warn("Use idx instead", DeprecationWarning)
1193 warnings.warn("Use idx instead", DeprecationWarning)
1194 self.idx = value
1194 self.idx = value
1195
1195
1196 def get_file_changeset(self, path):
1196 def get_file_changeset(self, path):
1197 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1197 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1198 return self.get_file_commit(path)
1198 return self.get_file_commit(path)
1199
1199
1200
1200
1201 class BaseChangesetClass(type):
1201 class BaseChangesetClass(type):
1202
1202
1203 def __instancecheck__(self, instance):
1203 def __instancecheck__(self, instance):
1204 return isinstance(instance, BaseCommit)
1204 return isinstance(instance, BaseCommit)
1205
1205
1206
1206
1207 class BaseChangeset(BaseCommit):
1207 class BaseChangeset(BaseCommit):
1208
1208
1209 __metaclass__ = BaseChangesetClass
1209 __metaclass__ = BaseChangesetClass
1210
1210
1211 def __new__(cls, *args, **kwargs):
1211 def __new__(cls, *args, **kwargs):
1212 warnings.warn(
1212 warnings.warn(
1213 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1213 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1214 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1214 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1215
1215
1216
1216
1217 class BaseInMemoryCommit(object):
1217 class BaseInMemoryCommit(object):
1218 """
1218 """
1219 Represents differences between repository's state (most recent head) and
1219 Represents differences between repository's state (most recent head) and
1220 changes made *in place*.
1220 changes made *in place*.
1221
1221
1222 **Attributes**
1222 **Attributes**
1223
1223
1224 ``repository``
1224 ``repository``
1225 repository object for this in-memory-commit
1225 repository object for this in-memory-commit
1226
1226
1227 ``added``
1227 ``added``
1228 list of ``FileNode`` objects marked as *added*
1228 list of ``FileNode`` objects marked as *added*
1229
1229
1230 ``changed``
1230 ``changed``
1231 list of ``FileNode`` objects marked as *changed*
1231 list of ``FileNode`` objects marked as *changed*
1232
1232
1233 ``removed``
1233 ``removed``
1234 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1234 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1235 *removed*
1235 *removed*
1236
1236
1237 ``parents``
1237 ``parents``
1238 list of :class:`BaseCommit` instances representing parents of
1238 list of :class:`BaseCommit` instances representing parents of
1239 in-memory commit. Should always be 2-element sequence.
1239 in-memory commit. Should always be 2-element sequence.
1240
1240
1241 """
1241 """
1242
1242
1243 def __init__(self, repository):
1243 def __init__(self, repository):
1244 self.repository = repository
1244 self.repository = repository
1245 self.added = []
1245 self.added = []
1246 self.changed = []
1246 self.changed = []
1247 self.removed = []
1247 self.removed = []
1248 self.parents = []
1248 self.parents = []
1249
1249
1250 def add(self, *filenodes):
1250 def add(self, *filenodes):
1251 """
1251 """
1252 Marks given ``FileNode`` objects as *to be committed*.
1252 Marks given ``FileNode`` objects as *to be committed*.
1253
1253
1254 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1254 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1255 latest commit
1255 latest commit
1256 :raises ``NodeAlreadyAddedError``: if node with same path is already
1256 :raises ``NodeAlreadyAddedError``: if node with same path is already
1257 marked as *added*
1257 marked as *added*
1258 """
1258 """
1259 # Check if not already marked as *added* first
1259 # Check if not already marked as *added* first
1260 for node in filenodes:
1260 for node in filenodes:
1261 if node.path in (n.path for n in self.added):
1261 if node.path in (n.path for n in self.added):
1262 raise NodeAlreadyAddedError(
1262 raise NodeAlreadyAddedError(
1263 "Such FileNode %s is already marked for addition"
1263 "Such FileNode %s is already marked for addition"
1264 % node.path)
1264 % node.path)
1265 for node in filenodes:
1265 for node in filenodes:
1266 self.added.append(node)
1266 self.added.append(node)
1267
1267
1268 def change(self, *filenodes):
1268 def change(self, *filenodes):
1269 """
1269 """
1270 Marks given ``FileNode`` objects to be *changed* in next commit.
1270 Marks given ``FileNode`` objects to be *changed* in next commit.
1271
1271
1272 :raises ``EmptyRepositoryError``: if there are no commits yet
1272 :raises ``EmptyRepositoryError``: if there are no commits yet
1273 :raises ``NodeAlreadyExistsError``: if node with same path is already
1273 :raises ``NodeAlreadyExistsError``: if node with same path is already
1274 marked to be *changed*
1274 marked to be *changed*
1275 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1275 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1276 marked to be *removed*
1276 marked to be *removed*
1277 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1277 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1278 commit
1278 commit
1279 :raises ``NodeNotChangedError``: if node hasn't really be changed
1279 :raises ``NodeNotChangedError``: if node hasn't really be changed
1280 """
1280 """
1281 for node in filenodes:
1281 for node in filenodes:
1282 if node.path in (n.path for n in self.removed):
1282 if node.path in (n.path for n in self.removed):
1283 raise NodeAlreadyRemovedError(
1283 raise NodeAlreadyRemovedError(
1284 "Node at %s is already marked as removed" % node.path)
1284 "Node at %s is already marked as removed" % node.path)
1285 try:
1285 try:
1286 self.repository.get_commit()
1286 self.repository.get_commit()
1287 except EmptyRepositoryError:
1287 except EmptyRepositoryError:
1288 raise EmptyRepositoryError(
1288 raise EmptyRepositoryError(
1289 "Nothing to change - try to *add* new nodes rather than "
1289 "Nothing to change - try to *add* new nodes rather than "
1290 "changing them")
1290 "changing them")
1291 for node in filenodes:
1291 for node in filenodes:
1292 if node.path in (n.path for n in self.changed):
1292 if node.path in (n.path for n in self.changed):
1293 raise NodeAlreadyChangedError(
1293 raise NodeAlreadyChangedError(
1294 "Node at '%s' is already marked as changed" % node.path)
1294 "Node at '%s' is already marked as changed" % node.path)
1295 self.changed.append(node)
1295 self.changed.append(node)
1296
1296
1297 def remove(self, *filenodes):
1297 def remove(self, *filenodes):
1298 """
1298 """
1299 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1299 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1300 *removed* in next commit.
1300 *removed* in next commit.
1301
1301
1302 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1302 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1303 be *removed*
1303 be *removed*
1304 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1304 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1305 be *changed*
1305 be *changed*
1306 """
1306 """
1307 for node in filenodes:
1307 for node in filenodes:
1308 if node.path in (n.path for n in self.removed):
1308 if node.path in (n.path for n in self.removed):
1309 raise NodeAlreadyRemovedError(
1309 raise NodeAlreadyRemovedError(
1310 "Node is already marked to for removal at %s" % node.path)
1310 "Node is already marked to for removal at %s" % node.path)
1311 if node.path in (n.path for n in self.changed):
1311 if node.path in (n.path for n in self.changed):
1312 raise NodeAlreadyChangedError(
1312 raise NodeAlreadyChangedError(
1313 "Node is already marked to be changed at %s" % node.path)
1313 "Node is already marked to be changed at %s" % node.path)
1314 # We only mark node as *removed* - real removal is done by
1314 # We only mark node as *removed* - real removal is done by
1315 # commit method
1315 # commit method
1316 self.removed.append(node)
1316 self.removed.append(node)
1317
1317
1318 def reset(self):
1318 def reset(self):
1319 """
1319 """
1320 Resets this instance to initial state (cleans ``added``, ``changed``
1320 Resets this instance to initial state (cleans ``added``, ``changed``
1321 and ``removed`` lists).
1321 and ``removed`` lists).
1322 """
1322 """
1323 self.added = []
1323 self.added = []
1324 self.changed = []
1324 self.changed = []
1325 self.removed = []
1325 self.removed = []
1326 self.parents = []
1326 self.parents = []
1327
1327
1328 def get_ipaths(self):
1328 def get_ipaths(self):
1329 """
1329 """
1330 Returns generator of paths from nodes marked as added, changed or
1330 Returns generator of paths from nodes marked as added, changed or
1331 removed.
1331 removed.
1332 """
1332 """
1333 for node in itertools.chain(self.added, self.changed, self.removed):
1333 for node in itertools.chain(self.added, self.changed, self.removed):
1334 yield node.path
1334 yield node.path
1335
1335
1336 def get_paths(self):
1336 def get_paths(self):
1337 """
1337 """
1338 Returns list of paths from nodes marked as added, changed or removed.
1338 Returns list of paths from nodes marked as added, changed or removed.
1339 """
1339 """
1340 return list(self.get_ipaths())
1340 return list(self.get_ipaths())
1341
1341
1342 def check_integrity(self, parents=None):
1342 def check_integrity(self, parents=None):
1343 """
1343 """
1344 Checks in-memory commit's integrity. Also, sets parents if not
1344 Checks in-memory commit's integrity. Also, sets parents if not
1345 already set.
1345 already set.
1346
1346
1347 :raises CommitError: if any error occurs (i.e.
1347 :raises CommitError: if any error occurs (i.e.
1348 ``NodeDoesNotExistError``).
1348 ``NodeDoesNotExistError``).
1349 """
1349 """
1350 if not self.parents:
1350 if not self.parents:
1351 parents = parents or []
1351 parents = parents or []
1352 if len(parents) == 0:
1352 if len(parents) == 0:
1353 try:
1353 try:
1354 parents = [self.repository.get_commit(), None]
1354 parents = [self.repository.get_commit(), None]
1355 except EmptyRepositoryError:
1355 except EmptyRepositoryError:
1356 parents = [None, None]
1356 parents = [None, None]
1357 elif len(parents) == 1:
1357 elif len(parents) == 1:
1358 parents += [None]
1358 parents += [None]
1359 self.parents = parents
1359 self.parents = parents
1360
1360
1361 # Local parents, only if not None
1361 # Local parents, only if not None
1362 parents = [p for p in self.parents if p]
1362 parents = [p for p in self.parents if p]
1363
1363
1364 # Check nodes marked as added
1364 # Check nodes marked as added
1365 for p in parents:
1365 for p in parents:
1366 for node in self.added:
1366 for node in self.added:
1367 try:
1367 try:
1368 p.get_node(node.path)
1368 p.get_node(node.path)
1369 except NodeDoesNotExistError:
1369 except NodeDoesNotExistError:
1370 pass
1370 pass
1371 else:
1371 else:
1372 raise NodeAlreadyExistsError(
1372 raise NodeAlreadyExistsError(
1373 "Node `%s` already exists at %s" % (node.path, p))
1373 "Node `%s` already exists at %s" % (node.path, p))
1374
1374
1375 # Check nodes marked as changed
1375 # Check nodes marked as changed
1376 missing = set(self.changed)
1376 missing = set(self.changed)
1377 not_changed = set(self.changed)
1377 not_changed = set(self.changed)
1378 if self.changed and not parents:
1378 if self.changed and not parents:
1379 raise NodeDoesNotExistError(str(self.changed[0].path))
1379 raise NodeDoesNotExistError(str(self.changed[0].path))
1380 for p in parents:
1380 for p in parents:
1381 for node in self.changed:
1381 for node in self.changed:
1382 try:
1382 try:
1383 old = p.get_node(node.path)
1383 old = p.get_node(node.path)
1384 missing.remove(node)
1384 missing.remove(node)
1385 # if content actually changed, remove node from not_changed
1385 # if content actually changed, remove node from not_changed
1386 if old.content != node.content:
1386 if old.content != node.content:
1387 not_changed.remove(node)
1387 not_changed.remove(node)
1388 except NodeDoesNotExistError:
1388 except NodeDoesNotExistError:
1389 pass
1389 pass
1390 if self.changed and missing:
1390 if self.changed and missing:
1391 raise NodeDoesNotExistError(
1391 raise NodeDoesNotExistError(
1392 "Node `%s` marked as modified but missing in parents: %s"
1392 "Node `%s` marked as modified but missing in parents: %s"
1393 % (node.path, parents))
1393 % (node.path, parents))
1394
1394
1395 if self.changed and not_changed:
1395 if self.changed and not_changed:
1396 raise NodeNotChangedError(
1396 raise NodeNotChangedError(
1397 "Node `%s` wasn't actually changed (parents: %s)"
1397 "Node `%s` wasn't actually changed (parents: %s)"
1398 % (not_changed.pop().path, parents))
1398 % (not_changed.pop().path, parents))
1399
1399
1400 # Check nodes marked as removed
1400 # Check nodes marked as removed
1401 if self.removed and not parents:
1401 if self.removed and not parents:
1402 raise NodeDoesNotExistError(
1402 raise NodeDoesNotExistError(
1403 "Cannot remove node at %s as there "
1403 "Cannot remove node at %s as there "
1404 "were no parents specified" % self.removed[0].path)
1404 "were no parents specified" % self.removed[0].path)
1405 really_removed = set()
1405 really_removed = set()
1406 for p in parents:
1406 for p in parents:
1407 for node in self.removed:
1407 for node in self.removed:
1408 try:
1408 try:
1409 p.get_node(node.path)
1409 p.get_node(node.path)
1410 really_removed.add(node)
1410 really_removed.add(node)
1411 except CommitError:
1411 except CommitError:
1412 pass
1412 pass
1413 not_removed = set(self.removed) - really_removed
1413 not_removed = set(self.removed) - really_removed
1414 if not_removed:
1414 if not_removed:
1415 # TODO: johbo: This code branch does not seem to be covered
1415 # TODO: johbo: This code branch does not seem to be covered
1416 raise NodeDoesNotExistError(
1416 raise NodeDoesNotExistError(
1417 "Cannot remove node at %s from "
1417 "Cannot remove node at %s from "
1418 "following parents: %s" % (not_removed, parents))
1418 "following parents: %s" % (not_removed, parents))
1419
1419
1420 def commit(
1420 def commit(
1421 self, message, author, parents=None, branch=None, date=None,
1421 self, message, author, parents=None, branch=None, date=None,
1422 **kwargs):
1422 **kwargs):
1423 """
1423 """
1424 Performs in-memory commit (doesn't check workdir in any way) and
1424 Performs in-memory commit (doesn't check workdir in any way) and
1425 returns newly created :class:`BaseCommit`. Updates repository's
1425 returns newly created :class:`BaseCommit`. Updates repository's
1426 attribute `commits`.
1426 attribute `commits`.
1427
1427
1428 .. note::
1428 .. note::
1429
1429
1430 While overriding this method each backend's should call
1430 While overriding this method each backend's should call
1431 ``self.check_integrity(parents)`` in the first place.
1431 ``self.check_integrity(parents)`` in the first place.
1432
1432
1433 :param message: message of the commit
1433 :param message: message of the commit
1434 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1434 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1435 :param parents: single parent or sequence of parents from which commit
1435 :param parents: single parent or sequence of parents from which commit
1436 would be derived
1436 would be derived
1437 :param date: ``datetime.datetime`` instance. Defaults to
1437 :param date: ``datetime.datetime`` instance. Defaults to
1438 ``datetime.datetime.now()``.
1438 ``datetime.datetime.now()``.
1439 :param branch: branch name, as string. If none given, default backend's
1439 :param branch: branch name, as string. If none given, default backend's
1440 branch would be used.
1440 branch would be used.
1441
1441
1442 :raises ``CommitError``: if any error occurs while committing
1442 :raises ``CommitError``: if any error occurs while committing
1443 """
1443 """
1444 raise NotImplementedError
1444 raise NotImplementedError
1445
1445
1446
1446
1447 class BaseInMemoryChangesetClass(type):
1447 class BaseInMemoryChangesetClass(type):
1448
1448
1449 def __instancecheck__(self, instance):
1449 def __instancecheck__(self, instance):
1450 return isinstance(instance, BaseInMemoryCommit)
1450 return isinstance(instance, BaseInMemoryCommit)
1451
1451
1452
1452
1453 class BaseInMemoryChangeset(BaseInMemoryCommit):
1453 class BaseInMemoryChangeset(BaseInMemoryCommit):
1454
1454
1455 __metaclass__ = BaseInMemoryChangesetClass
1455 __metaclass__ = BaseInMemoryChangesetClass
1456
1456
1457 def __new__(cls, *args, **kwargs):
1457 def __new__(cls, *args, **kwargs):
1458 warnings.warn(
1458 warnings.warn(
1459 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1459 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1460 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1460 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1461
1461
1462
1462
1463 class EmptyCommit(BaseCommit):
1463 class EmptyCommit(BaseCommit):
1464 """
1464 """
1465 An dummy empty commit. It's possible to pass hash when creating
1465 An dummy empty commit. It's possible to pass hash when creating
1466 an EmptyCommit
1466 an EmptyCommit
1467 """
1467 """
1468
1468
1469 def __init__(
1469 def __init__(
1470 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1470 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1471 message='', author='', date=None):
1471 message='', author='', date=None):
1472 self._empty_commit_id = commit_id
1472 self._empty_commit_id = commit_id
1473 # TODO: johbo: Solve idx parameter, default value does not make
1473 # TODO: johbo: Solve idx parameter, default value does not make
1474 # too much sense
1474 # too much sense
1475 self.idx = idx
1475 self.idx = idx
1476 self.message = message
1476 self.message = message
1477 self.author = author
1477 self.author = author
1478 self.date = date or datetime.datetime.fromtimestamp(0)
1478 self.date = date or datetime.datetime.fromtimestamp(0)
1479 self.repository = repo
1479 self.repository = repo
1480 self.alias = alias
1480 self.alias = alias
1481
1481
1482 @LazyProperty
1482 @LazyProperty
1483 def raw_id(self):
1483 def raw_id(self):
1484 """
1484 """
1485 Returns raw string identifying this commit, useful for web
1485 Returns raw string identifying this commit, useful for web
1486 representation.
1486 representation.
1487 """
1487 """
1488
1488
1489 return self._empty_commit_id
1489 return self._empty_commit_id
1490
1490
1491 @LazyProperty
1491 @LazyProperty
1492 def branch(self):
1492 def branch(self):
1493 if self.alias:
1493 if self.alias:
1494 from rhodecode.lib.vcs.backends import get_backend
1494 from rhodecode.lib.vcs.backends import get_backend
1495 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1495 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1496
1496
1497 @LazyProperty
1497 @LazyProperty
1498 def short_id(self):
1498 def short_id(self):
1499 return self.raw_id[:12]
1499 return self.raw_id[:12]
1500
1500
1501 @LazyProperty
1501 @LazyProperty
1502 def id(self):
1502 def id(self):
1503 return self.raw_id
1503 return self.raw_id
1504
1504
1505 def get_file_commit(self, path):
1505 def get_file_commit(self, path):
1506 return self
1506 return self
1507
1507
1508 def get_file_content(self, path):
1508 def get_file_content(self, path):
1509 return u''
1509 return u''
1510
1510
1511 def get_file_size(self, path):
1511 def get_file_size(self, path):
1512 return 0
1512 return 0
1513
1513
1514
1514
1515 class EmptyChangesetClass(type):
1515 class EmptyChangesetClass(type):
1516
1516
1517 def __instancecheck__(self, instance):
1517 def __instancecheck__(self, instance):
1518 return isinstance(instance, EmptyCommit)
1518 return isinstance(instance, EmptyCommit)
1519
1519
1520
1520
1521 class EmptyChangeset(EmptyCommit):
1521 class EmptyChangeset(EmptyCommit):
1522
1522
1523 __metaclass__ = EmptyChangesetClass
1523 __metaclass__ = EmptyChangesetClass
1524
1524
1525 def __new__(cls, *args, **kwargs):
1525 def __new__(cls, *args, **kwargs):
1526 warnings.warn(
1526 warnings.warn(
1527 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1527 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1528 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1528 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1529
1529
1530 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1530 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1531 alias=None, revision=-1, message='', author='', date=None):
1531 alias=None, revision=-1, message='', author='', date=None):
1532 if requested_revision is not None:
1532 if requested_revision is not None:
1533 warnings.warn(
1533 warnings.warn(
1534 "Parameter requested_revision not supported anymore",
1534 "Parameter requested_revision not supported anymore",
1535 DeprecationWarning)
1535 DeprecationWarning)
1536 super(EmptyChangeset, self).__init__(
1536 super(EmptyChangeset, self).__init__(
1537 commit_id=cs, repo=repo, alias=alias, idx=revision,
1537 commit_id=cs, repo=repo, alias=alias, idx=revision,
1538 message=message, author=author, date=date)
1538 message=message, author=author, date=date)
1539
1539
1540 @property
1540 @property
1541 def revision(self):
1541 def revision(self):
1542 warnings.warn("Use idx instead", DeprecationWarning)
1542 warnings.warn("Use idx instead", DeprecationWarning)
1543 return self.idx
1543 return self.idx
1544
1544
1545 @revision.setter
1545 @revision.setter
1546 def revision(self, value):
1546 def revision(self, value):
1547 warnings.warn("Use idx instead", DeprecationWarning)
1547 warnings.warn("Use idx instead", DeprecationWarning)
1548 self.idx = value
1548 self.idx = value
1549
1549
1550
1550
1551 class EmptyRepository(BaseRepository):
1551 class EmptyRepository(BaseRepository):
1552 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1552 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1553 pass
1553 pass
1554
1554
1555 def get_diff(self, *args, **kwargs):
1555 def get_diff(self, *args, **kwargs):
1556 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1556 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1557 return GitDiff('')
1557 return GitDiff('')
1558
1558
1559
1559
1560 class CollectionGenerator(object):
1560 class CollectionGenerator(object):
1561
1561
1562 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1562 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1563 self.repo = repo
1563 self.repo = repo
1564 self.commit_ids = commit_ids
1564 self.commit_ids = commit_ids
1565 # TODO: (oliver) this isn't currently hooked up
1565 # TODO: (oliver) this isn't currently hooked up
1566 self.collection_size = None
1566 self.collection_size = None
1567 self.pre_load = pre_load
1567 self.pre_load = pre_load
1568
1568
1569 def __len__(self):
1569 def __len__(self):
1570 if self.collection_size is not None:
1570 if self.collection_size is not None:
1571 return self.collection_size
1571 return self.collection_size
1572 return self.commit_ids.__len__()
1572 return self.commit_ids.__len__()
1573
1573
1574 def __iter__(self):
1574 def __iter__(self):
1575 for commit_id in self.commit_ids:
1575 for commit_id in self.commit_ids:
1576 # TODO: johbo: Mercurial passes in commit indices or commit ids
1576 # TODO: johbo: Mercurial passes in commit indices or commit ids
1577 yield self._commit_factory(commit_id)
1577 yield self._commit_factory(commit_id)
1578
1578
1579 def _commit_factory(self, commit_id):
1579 def _commit_factory(self, commit_id):
1580 """
1580 """
1581 Allows backends to override the way commits are generated.
1581 Allows backends to override the way commits are generated.
1582 """
1582 """
1583 return self.repo.get_commit(commit_id=commit_id,
1583 return self.repo.get_commit(commit_id=commit_id,
1584 pre_load=self.pre_load)
1584 pre_load=self.pre_load)
1585
1585
1586 def __getslice__(self, i, j):
1586 def __getslice__(self, i, j):
1587 """
1587 """
1588 Returns an iterator of sliced repository
1588 Returns an iterator of sliced repository
1589 """
1589 """
1590 commit_ids = self.commit_ids[i:j]
1590 commit_ids = self.commit_ids[i:j]
1591 return self.__class__(
1591 return self.__class__(
1592 self.repo, commit_ids, pre_load=self.pre_load)
1592 self.repo, commit_ids, pre_load=self.pre_load)
1593
1593
1594 def __repr__(self):
1594 def __repr__(self):
1595 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1595 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1596
1596
1597
1597
1598 class Config(object):
1598 class Config(object):
1599 """
1599 """
1600 Represents the configuration for a repository.
1600 Represents the configuration for a repository.
1601
1601
1602 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1602 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1603 standard library. It implements only the needed subset.
1603 standard library. It implements only the needed subset.
1604 """
1604 """
1605
1605
1606 def __init__(self):
1606 def __init__(self):
1607 self._values = {}
1607 self._values = {}
1608
1608
1609 def copy(self):
1609 def copy(self):
1610 clone = Config()
1610 clone = Config()
1611 for section, values in self._values.items():
1611 for section, values in self._values.items():
1612 clone._values[section] = values.copy()
1612 clone._values[section] = values.copy()
1613 return clone
1613 return clone
1614
1614
1615 def __repr__(self):
1615 def __repr__(self):
1616 return '<Config(%s sections) at %s>' % (
1616 return '<Config(%s sections) at %s>' % (
1617 len(self._values), hex(id(self)))
1617 len(self._values), hex(id(self)))
1618
1618
1619 def items(self, section):
1619 def items(self, section):
1620 return self._values.get(section, {}).iteritems()
1620 return self._values.get(section, {}).iteritems()
1621
1621
1622 def get(self, section, option):
1622 def get(self, section, option):
1623 return self._values.get(section, {}).get(option)
1623 return self._values.get(section, {}).get(option)
1624
1624
1625 def set(self, section, option, value):
1625 def set(self, section, option, value):
1626 section_values = self._values.setdefault(section, {})
1626 section_values = self._values.setdefault(section, {})
1627 section_values[option] = value
1627 section_values[option] = value
1628
1628
1629 def clear_section(self, section):
1629 def clear_section(self, section):
1630 self._values[section] = {}
1630 self._values[section] = {}
1631
1631
1632 def serialize(self):
1632 def serialize(self):
1633 """
1633 """
1634 Creates a list of three tuples (section, key, value) representing
1634 Creates a list of three tuples (section, key, value) representing
1635 this config object.
1635 this config object.
1636 """
1636 """
1637 items = []
1637 items = []
1638 for section in self._values:
1638 for section in self._values:
1639 for option, value in self._values[section].items():
1639 for option, value in self._values[section].items():
1640 items.append(
1640 items.append(
1641 (safe_str(section), safe_str(option), safe_str(value)))
1641 (safe_str(section), safe_str(option), safe_str(value)))
1642 return items
1642 return items
1643
1643
1644
1644
1645 class Diff(object):
1645 class Diff(object):
1646 """
1646 """
1647 Represents a diff result from a repository backend.
1647 Represents a diff result from a repository backend.
1648
1648
1649 Subclasses have to provide a backend specific value for
1649 Subclasses have to provide a backend specific value for
1650 :attr:`_header_re` and :attr:`_meta_re`.
1650 :attr:`_header_re` and :attr:`_meta_re`.
1651 """
1651 """
1652 _meta_re = None
1652 _meta_re = None
1653 _header_re = None
1653 _header_re = None
1654
1654
1655 def __init__(self, raw_diff):
1655 def __init__(self, raw_diff):
1656 self.raw = raw_diff
1656 self.raw = raw_diff
1657
1657
1658 def chunks(self):
1658 def chunks(self):
1659 """
1659 """
1660 split the diff in chunks of separate --git a/file b/file chunks
1660 split the diff in chunks of separate --git a/file b/file chunks
1661 to make diffs consistent we must prepend with \n, and make sure
1661 to make diffs consistent we must prepend with \n, and make sure
1662 we can detect last chunk as this was also has special rule
1662 we can detect last chunk as this was also has special rule
1663 """
1663 """
1664
1664
1665 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1665 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1666 header = diff_parts[0]
1666 header = diff_parts[0]
1667
1667
1668 if self._meta_re:
1668 if self._meta_re:
1669 match = self._meta_re.match(header)
1669 match = self._meta_re.match(header)
1670
1670
1671 chunks = diff_parts[1:]
1671 chunks = diff_parts[1:]
1672 total_chunks = len(chunks)
1672 total_chunks = len(chunks)
1673
1673
1674 return (
1674 return (
1675 DiffChunk(chunk, self, cur_chunk == total_chunks)
1675 DiffChunk(chunk, self, cur_chunk == total_chunks)
1676 for cur_chunk, chunk in enumerate(chunks, start=1))
1676 for cur_chunk, chunk in enumerate(chunks, start=1))
1677
1677
1678
1678
1679 class DiffChunk(object):
1679 class DiffChunk(object):
1680
1680
1681 def __init__(self, chunk, diff, last_chunk):
1681 def __init__(self, chunk, diff, last_chunk):
1682 self._diff = diff
1682 self._diff = diff
1683
1683
1684 # since we split by \ndiff --git that part is lost from original diff
1684 # since we split by \ndiff --git that part is lost from original diff
1685 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1685 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1686 if not last_chunk:
1686 if not last_chunk:
1687 chunk += '\n'
1687 chunk += '\n'
1688
1688
1689 match = self._diff._header_re.match(chunk)
1689 match = self._diff._header_re.match(chunk)
1690 self.header = match.groupdict()
1690 self.header = match.groupdict()
1691 self.diff = chunk[match.end():]
1691 self.diff = chunk[match.end():]
1692 self.raw = chunk
1692 self.raw = chunk
1693
1693
1694
1694
1695 class BasePathPermissionChecker(object):
1695 class BasePathPermissionChecker(object):
1696
1696
1697 @staticmethod
1697 @staticmethod
1698 def create_from_patterns(includes, excludes):
1698 def create_from_patterns(includes, excludes):
1699 if includes and '*' in includes and not excludes:
1699 if includes and '*' in includes and not excludes:
1700 return AllPathPermissionChecker()
1700 return AllPathPermissionChecker()
1701 elif excludes and '*' in excludes:
1701 elif excludes and '*' in excludes:
1702 return NonePathPermissionChecker()
1702 return NonePathPermissionChecker()
1703 else:
1703 else:
1704 return PatternPathPermissionChecker(includes, excludes)
1704 return PatternPathPermissionChecker(includes, excludes)
1705
1705
1706 @property
1706 @property
1707 def has_full_access(self):
1707 def has_full_access(self):
1708 raise NotImplemented()
1708 raise NotImplemented()
1709
1709
1710 def has_access(self, path):
1710 def has_access(self, path):
1711 raise NotImplemented()
1711 raise NotImplemented()
1712
1712
1713
1713
1714 class AllPathPermissionChecker(BasePathPermissionChecker):
1714 class AllPathPermissionChecker(BasePathPermissionChecker):
1715
1715
1716 @property
1716 @property
1717 def has_full_access(self):
1717 def has_full_access(self):
1718 return True
1718 return True
1719
1719
1720 def has_access(self, path):
1720 def has_access(self, path):
1721 return True
1721 return True
1722
1722
1723
1723
1724 class NonePathPermissionChecker(BasePathPermissionChecker):
1724 class NonePathPermissionChecker(BasePathPermissionChecker):
1725
1725
1726 @property
1726 @property
1727 def has_full_access(self):
1727 def has_full_access(self):
1728 return False
1728 return False
1729
1729
1730 def has_access(self, path):
1730 def has_access(self, path):
1731 return False
1731 return False
1732
1732
1733
1733
1734 class PatternPathPermissionChecker(BasePathPermissionChecker):
1734 class PatternPathPermissionChecker(BasePathPermissionChecker):
1735
1735
1736 def __init__(self, includes, excludes):
1736 def __init__(self, includes, excludes):
1737 self.includes = includes
1737 self.includes = includes
1738 self.excludes = excludes
1738 self.excludes = excludes
1739 self.includes_re = [] if not includes else [
1739 self.includes_re = [] if not includes else [
1740 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1740 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1741 self.excludes_re = [] if not excludes else [
1741 self.excludes_re = [] if not excludes else [
1742 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1742 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1743
1743
1744 @property
1744 @property
1745 def has_full_access(self):
1745 def has_full_access(self):
1746 return '*' in self.includes and not self.excludes
1746 return '*' in self.includes and not self.excludes
1747
1747
1748 def has_access(self, path):
1748 def has_access(self, path):
1749 for regex in self.excludes_re:
1749 for regex in self.excludes_re:
1750 if regex.match(path):
1750 if regex.match(path):
1751 return False
1751 return False
1752 for regex in self.includes_re:
1752 for regex in self.includes_re:
1753 if regex.match(path):
1753 if regex.match(path):
1754 return True
1754 return True
1755 return False
1755 return False
@@ -1,999 +1,999 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire
65 self.with_wire = with_wire
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
75
76 @LazyProperty
76 @LazyProperty
77 def bare(self):
77 def bare(self):
78 return self._remote.bare()
78 return self._remote.bare()
79
79
80 @LazyProperty
80 @LazyProperty
81 def head(self):
81 def head(self):
82 return self._remote.head()
82 return self._remote.head()
83
83
84 @LazyProperty
84 @LazyProperty
85 def commit_ids(self):
85 def commit_ids(self):
86 """
86 """
87 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
89 """
89 """
90 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
92 return commit_ids
92 return commit_ids
93
93
94 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
97
97
98 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
99 """
99 """
100 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
101 (stdout, stderr).
102
102
103 :param cmd: git command to be executed
103 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
105 """
105 """
106 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
108
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
111 if err and not skip_stderr_log:
111 if err and not skip_stderr_log:
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 return out, err
113 return out, err
114
114
115 @staticmethod
115 @staticmethod
116 def check_url(url, config):
116 def check_url(url, config):
117 """
117 """
118 Function will check given url and try to verify if it's a valid
118 Function will check given url and try to verify if it's a valid
119 link. Sometimes it may happened that git will issue basic
119 link. Sometimes it may happened that git will issue basic
120 auth request that can cause whole API to hang when used from python
120 auth request that can cause whole API to hang when used from python
121 or other external calls.
121 or other external calls.
122
122
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 when the return code is non 200
124 when the return code is non 200
125 """
125 """
126 # check first if it's not an url
126 # check first if it's not an url
127 if os.path.isdir(url) or url.startswith('file:'):
127 if os.path.isdir(url) or url.startswith('file:'):
128 return True
128 return True
129
129
130 if '+' in url.split('://', 1)[0]:
130 if '+' in url.split('://', 1)[0]:
131 url = url.split('+', 1)[1]
131 url = url.split('+', 1)[1]
132
132
133 # Request the _remote to verify the url
133 # Request the _remote to verify the url
134 return connection.Git.check_url(url, config.serialize())
134 return connection.Git.check_url(url, config.serialize())
135
135
136 @staticmethod
136 @staticmethod
137 def is_valid_repository(path):
137 def is_valid_repository(path):
138 if os.path.isdir(os.path.join(path, '.git')):
138 if os.path.isdir(os.path.join(path, '.git')):
139 return True
139 return True
140 # check case of bare repository
140 # check case of bare repository
141 try:
141 try:
142 GitRepository(path)
142 GitRepository(path)
143 return True
143 return True
144 except VCSError:
144 except VCSError:
145 pass
145 pass
146 return False
146 return False
147
147
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 bare=False):
149 bare=False):
150 if create and os.path.exists(self.path):
150 if create and os.path.exists(self.path):
151 raise RepositoryError(
151 raise RepositoryError(
152 "Cannot create repository at %s, location already exist"
152 "Cannot create repository at %s, location already exist"
153 % self.path)
153 % self.path)
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 os.makedirs(self.path, mode=0755)
164 os.makedirs(self.path, mode=0o755)
165
165
166 if bare:
166 if bare:
167 self._remote.init_bare()
167 self._remote.init_bare()
168 else:
168 else:
169 self._remote.init()
169 self._remote.init()
170
170
171 if src_url and bare:
171 if src_url and bare:
172 # bare repository only allows a fetch and checkout is not allowed
172 # bare repository only allows a fetch and checkout is not allowed
173 self.fetch(src_url, commit_ids=None)
173 self.fetch(src_url, commit_ids=None)
174 elif src_url:
174 elif src_url:
175 self.pull(src_url, commit_ids=None,
175 self.pull(src_url, commit_ids=None,
176 update_after=do_workspace_checkout)
176 update_after=do_workspace_checkout)
177
177
178 else:
178 else:
179 if not self._remote.assert_correct_path():
179 if not self._remote.assert_correct_path():
180 raise RepositoryError(
180 raise RepositoryError(
181 'Path "%s" does not contain a Git repository' %
181 'Path "%s" does not contain a Git repository' %
182 (self.path,))
182 (self.path,))
183
183
184 # TODO: johbo: check if we have to translate the OSError here
184 # TODO: johbo: check if we have to translate the OSError here
185 except OSError as err:
185 except OSError as err:
186 raise RepositoryError(err)
186 raise RepositoryError(err)
187
187
188 def _get_all_commit_ids(self, filters=None):
188 def _get_all_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194 if not head:
194 if not head:
195 return []
195 return []
196
196
197 rev_filter = ['--branches', '--tags']
197 rev_filter = ['--branches', '--tags']
198 extra_filter = []
198 extra_filter = []
199
199
200 if filters:
200 if filters:
201 if filters.get('since'):
201 if filters.get('since'):
202 extra_filter.append('--since=%s' % (filters['since']))
202 extra_filter.append('--since=%s' % (filters['since']))
203 if filters.get('until'):
203 if filters.get('until'):
204 extra_filter.append('--until=%s' % (filters['until']))
204 extra_filter.append('--until=%s' % (filters['until']))
205 if filters.get('branch_name'):
205 if filters.get('branch_name'):
206 rev_filter = ['--tags']
206 rev_filter = ['--tags']
207 extra_filter.append(filters['branch_name'])
207 extra_filter.append(filters['branch_name'])
208 rev_filter.extend(extra_filter)
208 rev_filter.extend(extra_filter)
209
209
210 # if filters.get('start') or filters.get('end'):
210 # if filters.get('start') or filters.get('end'):
211 # # skip is offset, max-count is limit
211 # # skip is offset, max-count is limit
212 # if filters.get('start'):
212 # if filters.get('start'):
213 # extra_filter += ' --skip=%s' % filters['start']
213 # extra_filter += ' --skip=%s' % filters['start']
214 # if filters.get('end'):
214 # if filters.get('end'):
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216
216
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 try:
218 try:
219 output, __ = self.run_git_command(cmd)
219 output, __ = self.run_git_command(cmd)
220 except RepositoryError:
220 except RepositoryError:
221 # Can be raised for empty repositories
221 # Can be raised for empty repositories
222 return []
222 return []
223 return output.splitlines()
223 return output.splitlines()
224
224
225 def _get_commit_id(self, commit_id_or_idx):
225 def _get_commit_id(self, commit_id_or_idx):
226 def is_null(value):
226 def is_null(value):
227 return len(value) == commit_id_or_idx.count('0')
227 return len(value) == commit_id_or_idx.count('0')
228
228
229 if self.is_empty():
229 if self.is_empty():
230 raise EmptyRepositoryError("There are no commits yet")
230 raise EmptyRepositoryError("There are no commits yet")
231
231
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 return self.commit_ids[-1]
233 return self.commit_ids[-1]
234
234
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 try:
238 try:
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 except Exception:
240 except Exception:
241 msg = "Commit %s does not exist for %s" % (
241 msg = "Commit %s does not exist for %s" % (
242 commit_id_or_idx, self)
242 commit_id_or_idx, self)
243 raise CommitDoesNotExistError(msg)
243 raise CommitDoesNotExistError(msg)
244
244
245 elif is_bstr:
245 elif is_bstr:
246 # check full path ref, eg. refs/heads/master
246 # check full path ref, eg. refs/heads/master
247 ref_id = self._refs.get(commit_id_or_idx)
247 ref_id = self._refs.get(commit_id_or_idx)
248 if ref_id:
248 if ref_id:
249 return ref_id
249 return ref_id
250
250
251 # check branch name
251 # check branch name
252 branch_ids = self.branches.values()
252 branch_ids = self.branches.values()
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 if ref_id:
254 if ref_id:
255 return ref_id
255 return ref_id
256
256
257 # check tag name
257 # check tag name
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 if ref_id:
259 if ref_id:
260 return ref_id
260 return ref_id
261
261
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 commit_id_or_idx not in self.commit_ids):
263 commit_id_or_idx not in self.commit_ids):
264 msg = "Commit %s does not exist for %s" % (
264 msg = "Commit %s does not exist for %s" % (
265 commit_id_or_idx, self)
265 commit_id_or_idx, self)
266 raise CommitDoesNotExistError(msg)
266 raise CommitDoesNotExistError(msg)
267
267
268 # Ensure we return full id
268 # Ensure we return full id
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 raise CommitDoesNotExistError(
270 raise CommitDoesNotExistError(
271 "Given commit id %s not recognized" % commit_id_or_idx)
271 "Given commit id %s not recognized" % commit_id_or_idx)
272 return commit_id_or_idx
272 return commit_id_or_idx
273
273
274 def get_hook_location(self):
274 def get_hook_location(self):
275 """
275 """
276 returns absolute path to location where hooks are stored
276 returns absolute path to location where hooks are stored
277 """
277 """
278 loc = os.path.join(self.path, 'hooks')
278 loc = os.path.join(self.path, 'hooks')
279 if not self.bare:
279 if not self.bare:
280 loc = os.path.join(self.path, '.git', 'hooks')
280 loc = os.path.join(self.path, '.git', 'hooks')
281 return loc
281 return loc
282
282
283 @LazyProperty
283 @LazyProperty
284 def last_change(self):
284 def last_change(self):
285 """
285 """
286 Returns last change made on this repository as
286 Returns last change made on this repository as
287 `datetime.datetime` object.
287 `datetime.datetime` object.
288 """
288 """
289 try:
289 try:
290 return self.get_commit().date
290 return self.get_commit().date
291 except RepositoryError:
291 except RepositoryError:
292 tzoffset = makedate()[1]
292 tzoffset = makedate()[1]
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294
294
295 def _get_fs_mtime(self):
295 def _get_fs_mtime(self):
296 idx_loc = '' if self.bare else '.git'
296 idx_loc = '' if self.bare else '.git'
297 # fallback to filesystem
297 # fallback to filesystem
298 in_path = os.path.join(self.path, idx_loc, "index")
298 in_path = os.path.join(self.path, idx_loc, "index")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 if os.path.exists(in_path):
300 if os.path.exists(in_path):
301 return os.stat(in_path).st_mtime
301 return os.stat(in_path).st_mtime
302 else:
302 else:
303 return os.stat(he_path).st_mtime
303 return os.stat(he_path).st_mtime
304
304
305 @LazyProperty
305 @LazyProperty
306 def description(self):
306 def description(self):
307 description = self._remote.get_description()
307 description = self._remote.get_description()
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309
309
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 if self.is_empty():
311 if self.is_empty():
312 return OrderedDict()
312 return OrderedDict()
313
313
314 result = []
314 result = []
315 for ref, sha in self._refs.iteritems():
315 for ref, sha in self._refs.iteritems():
316 if ref.startswith(prefix):
316 if ref.startswith(prefix):
317 ref_name = ref
317 ref_name = ref
318 if strip_prefix:
318 if strip_prefix:
319 ref_name = ref[len(prefix):]
319 ref_name = ref[len(prefix):]
320 result.append((safe_unicode(ref_name), sha))
320 result.append((safe_unicode(ref_name), sha))
321
321
322 def get_name(entry):
322 def get_name(entry):
323 return entry[0]
323 return entry[0]
324
324
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326
326
327 def _get_branches(self):
327 def _get_branches(self):
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329
329
330 @LazyProperty
330 @LazyProperty
331 def branches(self):
331 def branches(self):
332 return self._get_branches()
332 return self._get_branches()
333
333
334 @LazyProperty
334 @LazyProperty
335 def branches_closed(self):
335 def branches_closed(self):
336 return {}
336 return {}
337
337
338 @LazyProperty
338 @LazyProperty
339 def bookmarks(self):
339 def bookmarks(self):
340 return {}
340 return {}
341
341
342 @LazyProperty
342 @LazyProperty
343 def branches_all(self):
343 def branches_all(self):
344 all_branches = {}
344 all_branches = {}
345 all_branches.update(self.branches)
345 all_branches.update(self.branches)
346 all_branches.update(self.branches_closed)
346 all_branches.update(self.branches_closed)
347 return all_branches
347 return all_branches
348
348
349 @LazyProperty
349 @LazyProperty
350 def tags(self):
350 def tags(self):
351 return self._get_tags()
351 return self._get_tags()
352
352
353 def _get_tags(self):
353 def _get_tags(self):
354 return self._get_refs_entries(
354 return self._get_refs_entries(
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356
356
357 def tag(self, name, user, commit_id=None, message=None, date=None,
357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 **kwargs):
358 **kwargs):
359 # TODO: fix this method to apply annotated tags correct with message
359 # TODO: fix this method to apply annotated tags correct with message
360 """
360 """
361 Creates and returns a tag for the given ``commit_id``.
361 Creates and returns a tag for the given ``commit_id``.
362
362
363 :param name: name for new tag
363 :param name: name for new tag
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 :param commit_id: commit id for which new tag would be created
365 :param commit_id: commit id for which new tag would be created
366 :param message: message of the tag's commit
366 :param message: message of the tag's commit
367 :param date: date of tag's commit
367 :param date: date of tag's commit
368
368
369 :raises TagAlreadyExistError: if tag with same name already exists
369 :raises TagAlreadyExistError: if tag with same name already exists
370 """
370 """
371 if name in self.tags:
371 if name in self.tags:
372 raise TagAlreadyExistError("Tag %s already exists" % name)
372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 commit = self.get_commit(commit_id=commit_id)
373 commit = self.get_commit(commit_id=commit_id)
374 message = message or "Added tag %s for commit %s" % (
374 message = message or "Added tag %s for commit %s" % (
375 name, commit.raw_id)
375 name, commit.raw_id)
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377
377
378 self._refs = self._get_refs()
378 self._refs = self._get_refs()
379 self.tags = self._get_tags()
379 self.tags = self._get_tags()
380 return commit
380 return commit
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 if name not in self.tags:
393 if name not in self.tags:
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 tagpath = vcspath.join(
395 tagpath = vcspath.join(
396 self._remote.get_refs_path(), 'refs', 'tags', name)
396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 try:
397 try:
398 os.remove(tagpath)
398 os.remove(tagpath)
399 self._refs = self._get_refs()
399 self._refs = self._get_refs()
400 self.tags = self._get_tags()
400 self.tags = self._get_tags()
401 except OSError as e:
401 except OSError as e:
402 raise RepositoryError(e.strerror)
402 raise RepositoryError(e.strerror)
403
403
404 def _get_refs(self):
404 def _get_refs(self):
405 return self._remote.get_refs()
405 return self._remote.get_refs()
406
406
407 @LazyProperty
407 @LazyProperty
408 def _refs(self):
408 def _refs(self):
409 return self._get_refs()
409 return self._get_refs()
410
410
411 @property
411 @property
412 def _ref_tree(self):
412 def _ref_tree(self):
413 node = tree = {}
413 node = tree = {}
414 for ref, sha in self._refs.iteritems():
414 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
415 path = ref.split('/')
416 for bit in path[:-1]:
416 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
417 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
418 node[path[-1]] = sha
419 node = tree
419 node = tree
420 return tree
420 return tree
421
421
422 def get_remote_ref(self, ref_name):
422 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
424 try:
425 return self._refs[ref_key]
425 return self._refs[ref_key]
426 except Exception:
426 except Exception:
427 return
427 return
428
428
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 elif commit_idx is not None:
436 elif commit_idx is not None:
437 self._validate_commit_idx(commit_idx)
437 self._validate_commit_idx(commit_idx)
438 commit_id = commit_idx
438 commit_id = commit_idx
439 commit_id = self._get_commit_id(commit_id)
439 commit_id = self._get_commit_id(commit_id)
440 try:
440 try:
441 # Need to call remote to translate id for tagging scenario
441 # Need to call remote to translate id for tagging scenario
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
443 idx = self._commit_ids[commit_id]
443 idx = self._commit_ids[commit_id]
444 except KeyError:
444 except KeyError:
445 raise RepositoryError("Cannot get object with id %s" % commit_id)
445 raise RepositoryError("Cannot get object with id %s" % commit_id)
446
446
447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
448
448
449 def get_commits(
449 def get_commits(
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
451 branch_name=None, show_hidden=False, pre_load=None):
451 branch_name=None, show_hidden=False, pre_load=None):
452 """
452 """
453 Returns generator of `GitCommit` objects from start to end (both
453 Returns generator of `GitCommit` objects from start to end (both
454 are inclusive), in ascending date order.
454 are inclusive), in ascending date order.
455
455
456 :param start_id: None, str(commit_id)
456 :param start_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
458 :param start_date: if specified, commits with commit date less than
458 :param start_date: if specified, commits with commit date less than
459 ``start_date`` would be filtered out from returned set
459 ``start_date`` would be filtered out from returned set
460 :param end_date: if specified, commits with commit date greater than
460 :param end_date: if specified, commits with commit date greater than
461 ``end_date`` would be filtered out from returned set
461 ``end_date`` would be filtered out from returned set
462 :param branch_name: if specified, commits not reachable from given
462 :param branch_name: if specified, commits not reachable from given
463 branch would be filtered out from returned set
463 branch would be filtered out from returned set
464 :param show_hidden: Show hidden commits such as obsolete or hidden from
464 :param show_hidden: Show hidden commits such as obsolete or hidden from
465 Mercurial evolve
465 Mercurial evolve
466 :raise BranchDoesNotExistError: If given `branch_name` does not
466 :raise BranchDoesNotExistError: If given `branch_name` does not
467 exist.
467 exist.
468 :raise CommitDoesNotExistError: If commits for given `start` or
468 :raise CommitDoesNotExistError: If commits for given `start` or
469 `end` could not be found.
469 `end` could not be found.
470
470
471 """
471 """
472 if self.is_empty():
472 if self.is_empty():
473 raise EmptyRepositoryError("There are no commits yet")
473 raise EmptyRepositoryError("There are no commits yet")
474 self._validate_branch_name(branch_name)
474 self._validate_branch_name(branch_name)
475
475
476 if start_id is not None:
476 if start_id is not None:
477 self._validate_commit_id(start_id)
477 self._validate_commit_id(start_id)
478 if end_id is not None:
478 if end_id is not None:
479 self._validate_commit_id(end_id)
479 self._validate_commit_id(end_id)
480
480
481 start_raw_id = self._get_commit_id(start_id)
481 start_raw_id = self._get_commit_id(start_id)
482 start_pos = self._commit_ids[start_raw_id] if start_id else None
482 start_pos = self._commit_ids[start_raw_id] if start_id else None
483 end_raw_id = self._get_commit_id(end_id)
483 end_raw_id = self._get_commit_id(end_id)
484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
485
485
486 if None not in [start_id, end_id] and start_pos > end_pos:
486 if None not in [start_id, end_id] and start_pos > end_pos:
487 raise RepositoryError(
487 raise RepositoryError(
488 "Start commit '%s' cannot be after end commit '%s'" %
488 "Start commit '%s' cannot be after end commit '%s'" %
489 (start_id, end_id))
489 (start_id, end_id))
490
490
491 if end_pos is not None:
491 if end_pos is not None:
492 end_pos += 1
492 end_pos += 1
493
493
494 filter_ = []
494 filter_ = []
495 if branch_name:
495 if branch_name:
496 filter_.append({'branch_name': branch_name})
496 filter_.append({'branch_name': branch_name})
497 if start_date and not end_date:
497 if start_date and not end_date:
498 filter_.append({'since': start_date})
498 filter_.append({'since': start_date})
499 if end_date and not start_date:
499 if end_date and not start_date:
500 filter_.append({'until': end_date})
500 filter_.append({'until': end_date})
501 if start_date and end_date:
501 if start_date and end_date:
502 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
503 filter_.append({'until': end_date})
503 filter_.append({'until': end_date})
504
504
505 # if start_pos or end_pos:
505 # if start_pos or end_pos:
506 # filter_.append({'start': start_pos})
506 # filter_.append({'start': start_pos})
507 # filter_.append({'end': end_pos})
507 # filter_.append({'end': end_pos})
508
508
509 if filter_:
509 if filter_:
510 revfilters = {
510 revfilters = {
511 'branch_name': branch_name,
511 'branch_name': branch_name,
512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
514 'start': start_pos,
514 'start': start_pos,
515 'end': end_pos,
515 'end': end_pos,
516 }
516 }
517 commit_ids = self._get_all_commit_ids(filters=revfilters)
517 commit_ids = self._get_all_commit_ids(filters=revfilters)
518
518
519 # pure python stuff, it's slow due to walker walking whole repo
519 # pure python stuff, it's slow due to walker walking whole repo
520 # def get_revs(walker):
520 # def get_revs(walker):
521 # for walker_entry in walker:
521 # for walker_entry in walker:
522 # yield walker_entry.commit.id
522 # yield walker_entry.commit.id
523 # revfilters = {}
523 # revfilters = {}
524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
525 else:
525 else:
526 commit_ids = self.commit_ids
526 commit_ids = self.commit_ids
527
527
528 if start_pos or end_pos:
528 if start_pos or end_pos:
529 commit_ids = commit_ids[start_pos: end_pos]
529 commit_ids = commit_ids[start_pos: end_pos]
530
530
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
532
532
533 def get_diff(
533 def get_diff(
534 self, commit1, commit2, path='', ignore_whitespace=False,
534 self, commit1, commit2, path='', ignore_whitespace=False,
535 context=3, path1=None):
535 context=3, path1=None):
536 """
536 """
537 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 Returns (git like) *diff*, as plain text. Shows changes introduced by
538 ``commit2`` since ``commit1``.
538 ``commit2`` since ``commit1``.
539
539
540 :param commit1: Entry point from which diff is shown. Can be
540 :param commit1: Entry point from which diff is shown. Can be
541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
542 the changes since empty state of the repository until ``commit2``
542 the changes since empty state of the repository until ``commit2``
543 :param commit2: Until which commits changes should be shown.
543 :param commit2: Until which commits changes should be shown.
544 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 :param ignore_whitespace: If set to ``True``, would not show whitespace
545 changes. Defaults to ``False``.
545 changes. Defaults to ``False``.
546 :param context: How many lines before/after changed lines should be
546 :param context: How many lines before/after changed lines should be
547 shown. Defaults to ``3``.
547 shown. Defaults to ``3``.
548 """
548 """
549 self._validate_diff_commits(commit1, commit2)
549 self._validate_diff_commits(commit1, commit2)
550 if path1 is not None and path1 != path:
550 if path1 is not None and path1 != path:
551 raise ValueError("Diff of two different paths not supported.")
551 raise ValueError("Diff of two different paths not supported.")
552
552
553 flags = [
553 flags = [
554 '-U%s' % context, '--full-index', '--binary', '-p',
554 '-U%s' % context, '--full-index', '--binary', '-p',
555 '-M', '--abbrev=40']
555 '-M', '--abbrev=40']
556 if ignore_whitespace:
556 if ignore_whitespace:
557 flags.append('-w')
557 flags.append('-w')
558
558
559 if commit1 == self.EMPTY_COMMIT:
559 if commit1 == self.EMPTY_COMMIT:
560 cmd = ['show'] + flags + [commit2.raw_id]
560 cmd = ['show'] + flags + [commit2.raw_id]
561 else:
561 else:
562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
563
563
564 if path:
564 if path:
565 cmd.extend(['--', path])
565 cmd.extend(['--', path])
566
566
567 stdout, __ = self.run_git_command(cmd)
567 stdout, __ = self.run_git_command(cmd)
568 # If we used 'show' command, strip first few lines (until actual diff
568 # If we used 'show' command, strip first few lines (until actual diff
569 # starts)
569 # starts)
570 if commit1 == self.EMPTY_COMMIT:
570 if commit1 == self.EMPTY_COMMIT:
571 lines = stdout.splitlines()
571 lines = stdout.splitlines()
572 x = 0
572 x = 0
573 for line in lines:
573 for line in lines:
574 if line.startswith('diff'):
574 if line.startswith('diff'):
575 break
575 break
576 x += 1
576 x += 1
577 # Append new line just like 'diff' command do
577 # Append new line just like 'diff' command do
578 stdout = '\n'.join(lines[x:]) + '\n'
578 stdout = '\n'.join(lines[x:]) + '\n'
579 return GitDiff(stdout)
579 return GitDiff(stdout)
580
580
581 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
582 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
583 if commit.merge:
583 if commit.merge:
584 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
585
585
586 # parent is going to be the new head now
586 # parent is going to be the new head now
587 commit = commit.parents[0]
587 commit = commit.parents[0]
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
589
589
590 self.commit_ids = self._get_all_commit_ids()
590 self.commit_ids = self._get_all_commit_ids()
591 self._rebuild_cache(self.commit_ids)
591 self._rebuild_cache(self.commit_ids)
592
592
593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
594 if commit_id1 == commit_id2:
594 if commit_id1 == commit_id2:
595 return commit_id1
595 return commit_id1
596
596
597 if self != repo2:
597 if self != repo2:
598 commits = self._remote.get_missing_revs(
598 commits = self._remote.get_missing_revs(
599 commit_id1, commit_id2, repo2.path)
599 commit_id1, commit_id2, repo2.path)
600 if commits:
600 if commits:
601 commit = repo2.get_commit(commits[-1])
601 commit = repo2.get_commit(commits[-1])
602 if commit.parents:
602 if commit.parents:
603 ancestor_id = commit.parents[0].raw_id
603 ancestor_id = commit.parents[0].raw_id
604 else:
604 else:
605 ancestor_id = None
605 ancestor_id = None
606 else:
606 else:
607 # no commits from other repo, ancestor_id is the commit_id2
607 # no commits from other repo, ancestor_id is the commit_id2
608 ancestor_id = commit_id2
608 ancestor_id = commit_id2
609 else:
609 else:
610 output, __ = self.run_git_command(
610 output, __ = self.run_git_command(
611 ['merge-base', commit_id1, commit_id2])
611 ['merge-base', commit_id1, commit_id2])
612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
613
613
614 return ancestor_id
614 return ancestor_id
615
615
616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
617 repo1 = self
617 repo1 = self
618 ancestor_id = None
618 ancestor_id = None
619
619
620 if commit_id1 == commit_id2:
620 if commit_id1 == commit_id2:
621 commits = []
621 commits = []
622 elif repo1 != repo2:
622 elif repo1 != repo2:
623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
624 repo2.path)
624 repo2.path)
625 commits = [
625 commits = [
626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
627 for commit_id in reversed(missing_ids)]
627 for commit_id in reversed(missing_ids)]
628 else:
628 else:
629 output, __ = repo1.run_git_command(
629 output, __ = repo1.run_git_command(
630 ['log', '--reverse', '--pretty=format: %H', '-s',
630 ['log', '--reverse', '--pretty=format: %H', '-s',
631 '%s..%s' % (commit_id1, commit_id2)])
631 '%s..%s' % (commit_id1, commit_id2)])
632 commits = [
632 commits = [
633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
635
635
636 return commits
636 return commits
637
637
638 @LazyProperty
638 @LazyProperty
639 def in_memory_commit(self):
639 def in_memory_commit(self):
640 """
640 """
641 Returns ``GitInMemoryCommit`` object for this repository.
641 Returns ``GitInMemoryCommit`` object for this repository.
642 """
642 """
643 return GitInMemoryCommit(self)
643 return GitInMemoryCommit(self)
644
644
645 def pull(self, url, commit_ids=None, update_after=False):
645 def pull(self, url, commit_ids=None, update_after=False):
646 """
646 """
647 Pull changes from external location. Pull is different in GIT
647 Pull changes from external location. Pull is different in GIT
648 that fetch since it's doing a checkout
648 that fetch since it's doing a checkout
649
649
650 :param commit_ids: Optional. Can be set to a list of commit ids
650 :param commit_ids: Optional. Can be set to a list of commit ids
651 which shall be pulled from the other repository.
651 which shall be pulled from the other repository.
652 """
652 """
653 refs = None
653 refs = None
654 if commit_ids is not None:
654 if commit_ids is not None:
655 remote_refs = self._remote.get_remote_refs(url)
655 remote_refs = self._remote.get_remote_refs(url)
656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
657 self._remote.pull(url, refs=refs, update_after=update_after)
657 self._remote.pull(url, refs=refs, update_after=update_after)
658 self._remote.invalidate_vcs_cache()
658 self._remote.invalidate_vcs_cache()
659
659
660 def fetch(self, url, commit_ids=None):
660 def fetch(self, url, commit_ids=None):
661 """
661 """
662 Fetch all git objects from external location.
662 Fetch all git objects from external location.
663 """
663 """
664 self._remote.sync_fetch(url, refs=commit_ids)
664 self._remote.sync_fetch(url, refs=commit_ids)
665 self._remote.invalidate_vcs_cache()
665 self._remote.invalidate_vcs_cache()
666
666
667 def push(self, url):
667 def push(self, url):
668 refs = None
668 refs = None
669 self._remote.sync_push(url, refs=refs)
669 self._remote.sync_push(url, refs=refs)
670
670
671 def set_refs(self, ref_name, commit_id):
671 def set_refs(self, ref_name, commit_id):
672 self._remote.set_refs(ref_name, commit_id)
672 self._remote.set_refs(ref_name, commit_id)
673
673
674 def remove_ref(self, ref_name):
674 def remove_ref(self, ref_name):
675 self._remote.remove_ref(ref_name)
675 self._remote.remove_ref(ref_name)
676
676
677 def _update_server_info(self):
677 def _update_server_info(self):
678 """
678 """
679 runs gits update-server-info command in this repo instance
679 runs gits update-server-info command in this repo instance
680 """
680 """
681 self._remote.update_server_info()
681 self._remote.update_server_info()
682
682
683 def _current_branch(self):
683 def _current_branch(self):
684 """
684 """
685 Return the name of the current branch.
685 Return the name of the current branch.
686
686
687 It only works for non bare repositories (i.e. repositories with a
687 It only works for non bare repositories (i.e. repositories with a
688 working copy)
688 working copy)
689 """
689 """
690 if self.bare:
690 if self.bare:
691 raise RepositoryError('Bare git repos do not have active branches')
691 raise RepositoryError('Bare git repos do not have active branches')
692
692
693 if self.is_empty():
693 if self.is_empty():
694 return None
694 return None
695
695
696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
697 return stdout.strip()
697 return stdout.strip()
698
698
699 def _checkout(self, branch_name, create=False, force=False):
699 def _checkout(self, branch_name, create=False, force=False):
700 """
700 """
701 Checkout a branch in the working directory.
701 Checkout a branch in the working directory.
702
702
703 It tries to create the branch if create is True, failing if the branch
703 It tries to create the branch if create is True, failing if the branch
704 already exists.
704 already exists.
705
705
706 It only works for non bare repositories (i.e. repositories with a
706 It only works for non bare repositories (i.e. repositories with a
707 working copy)
707 working copy)
708 """
708 """
709 if self.bare:
709 if self.bare:
710 raise RepositoryError('Cannot checkout branches in a bare git repo')
710 raise RepositoryError('Cannot checkout branches in a bare git repo')
711
711
712 cmd = ['checkout']
712 cmd = ['checkout']
713 if force:
713 if force:
714 cmd.append('-f')
714 cmd.append('-f')
715 if create:
715 if create:
716 cmd.append('-b')
716 cmd.append('-b')
717 cmd.append(branch_name)
717 cmd.append(branch_name)
718 self.run_git_command(cmd, fail_on_stderr=False)
718 self.run_git_command(cmd, fail_on_stderr=False)
719
719
720 def _identify(self):
720 def _identify(self):
721 """
721 """
722 Return the current state of the working directory.
722 Return the current state of the working directory.
723 """
723 """
724 if self.bare:
724 if self.bare:
725 raise RepositoryError('Bare git repos do not have active branches')
725 raise RepositoryError('Bare git repos do not have active branches')
726
726
727 if self.is_empty():
727 if self.is_empty():
728 return None
728 return None
729
729
730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
731 return stdout.strip()
731 return stdout.strip()
732
732
733 def _local_clone(self, clone_path, branch_name, source_branch=None):
733 def _local_clone(self, clone_path, branch_name, source_branch=None):
734 """
734 """
735 Create a local clone of the current repo.
735 Create a local clone of the current repo.
736 """
736 """
737 # N.B.(skreft): the --branch option is required as otherwise the shallow
737 # N.B.(skreft): the --branch option is required as otherwise the shallow
738 # clone will only fetch the active branch.
738 # clone will only fetch the active branch.
739 cmd = ['clone', '--branch', branch_name,
739 cmd = ['clone', '--branch', branch_name,
740 self.path, os.path.abspath(clone_path)]
740 self.path, os.path.abspath(clone_path)]
741
741
742 self.run_git_command(cmd, fail_on_stderr=False)
742 self.run_git_command(cmd, fail_on_stderr=False)
743
743
744 # if we get the different source branch, make sure we also fetch it for
744 # if we get the different source branch, make sure we also fetch it for
745 # merge conditions
745 # merge conditions
746 if source_branch and source_branch != branch_name:
746 if source_branch and source_branch != branch_name:
747 # check if the ref exists.
747 # check if the ref exists.
748 shadow_repo = GitRepository(os.path.abspath(clone_path))
748 shadow_repo = GitRepository(os.path.abspath(clone_path))
749 if shadow_repo.get_remote_ref(source_branch):
749 if shadow_repo.get_remote_ref(source_branch):
750 cmd = ['fetch', self.path, source_branch]
750 cmd = ['fetch', self.path, source_branch]
751 self.run_git_command(cmd, fail_on_stderr=False)
751 self.run_git_command(cmd, fail_on_stderr=False)
752
752
753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
754 """
754 """
755 Fetch a branch from a local repository.
755 Fetch a branch from a local repository.
756 """
756 """
757 repository_path = os.path.abspath(repository_path)
757 repository_path = os.path.abspath(repository_path)
758 if repository_path == self.path:
758 if repository_path == self.path:
759 raise ValueError('Cannot fetch from the same repository')
759 raise ValueError('Cannot fetch from the same repository')
760
760
761 if use_origin:
761 if use_origin:
762 branch_name = '+{branch}:refs/heads/{branch}'.format(
762 branch_name = '+{branch}:refs/heads/{branch}'.format(
763 branch=branch_name)
763 branch=branch_name)
764
764
765 cmd = ['fetch', '--no-tags', '--update-head-ok',
765 cmd = ['fetch', '--no-tags', '--update-head-ok',
766 repository_path, branch_name]
766 repository_path, branch_name]
767 self.run_git_command(cmd, fail_on_stderr=False)
767 self.run_git_command(cmd, fail_on_stderr=False)
768
768
769 def _local_reset(self, branch_name):
769 def _local_reset(self, branch_name):
770 branch_name = '{}'.format(branch_name)
770 branch_name = '{}'.format(branch_name)
771 cmd = ['reset', '--hard', branch_name, '--']
771 cmd = ['reset', '--hard', branch_name, '--']
772 self.run_git_command(cmd, fail_on_stderr=False)
772 self.run_git_command(cmd, fail_on_stderr=False)
773
773
774 def _last_fetch_heads(self):
774 def _last_fetch_heads(self):
775 """
775 """
776 Return the last fetched heads that need merging.
776 Return the last fetched heads that need merging.
777
777
778 The algorithm is defined at
778 The algorithm is defined at
779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
780 """
780 """
781 if not self.bare:
781 if not self.bare:
782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
783 else:
783 else:
784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
785
785
786 heads = []
786 heads = []
787 with open(fetch_heads_path) as f:
787 with open(fetch_heads_path) as f:
788 for line in f:
788 for line in f:
789 if ' not-for-merge ' in line:
789 if ' not-for-merge ' in line:
790 continue
790 continue
791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
792 heads.append(line)
792 heads.append(line)
793
793
794 return heads
794 return heads
795
795
796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
797 return GitRepository(shadow_repository_path)
797 return GitRepository(shadow_repository_path)
798
798
799 def _local_pull(self, repository_path, branch_name, ff_only=True):
799 def _local_pull(self, repository_path, branch_name, ff_only=True):
800 """
800 """
801 Pull a branch from a local repository.
801 Pull a branch from a local repository.
802 """
802 """
803 if self.bare:
803 if self.bare:
804 raise RepositoryError('Cannot pull into a bare git repository')
804 raise RepositoryError('Cannot pull into a bare git repository')
805 # N.B.(skreft): The --ff-only option is to make sure this is a
805 # N.B.(skreft): The --ff-only option is to make sure this is a
806 # fast-forward (i.e., we are only pulling new changes and there are no
806 # fast-forward (i.e., we are only pulling new changes and there are no
807 # conflicts with our current branch)
807 # conflicts with our current branch)
808 # Additionally, that option needs to go before --no-tags, otherwise git
808 # Additionally, that option needs to go before --no-tags, otherwise git
809 # pull complains about it being an unknown flag.
809 # pull complains about it being an unknown flag.
810 cmd = ['pull']
810 cmd = ['pull']
811 if ff_only:
811 if ff_only:
812 cmd.append('--ff-only')
812 cmd.append('--ff-only')
813 cmd.extend(['--no-tags', repository_path, branch_name])
813 cmd.extend(['--no-tags', repository_path, branch_name])
814 self.run_git_command(cmd, fail_on_stderr=False)
814 self.run_git_command(cmd, fail_on_stderr=False)
815
815
816 def _local_merge(self, merge_message, user_name, user_email, heads):
816 def _local_merge(self, merge_message, user_name, user_email, heads):
817 """
817 """
818 Merge the given head into the checked out branch.
818 Merge the given head into the checked out branch.
819
819
820 It will force a merge commit.
820 It will force a merge commit.
821
821
822 Currently it raises an error if the repo is empty, as it is not possible
822 Currently it raises an error if the repo is empty, as it is not possible
823 to create a merge commit in an empty repo.
823 to create a merge commit in an empty repo.
824
824
825 :param merge_message: The message to use for the merge commit.
825 :param merge_message: The message to use for the merge commit.
826 :param heads: the heads to merge.
826 :param heads: the heads to merge.
827 """
827 """
828 if self.bare:
828 if self.bare:
829 raise RepositoryError('Cannot merge into a bare git repository')
829 raise RepositoryError('Cannot merge into a bare git repository')
830
830
831 if not heads:
831 if not heads:
832 return
832 return
833
833
834 if self.is_empty():
834 if self.is_empty():
835 # TODO(skreft): do somehting more robust in this case.
835 # TODO(skreft): do somehting more robust in this case.
836 raise RepositoryError(
836 raise RepositoryError(
837 'Do not know how to merge into empty repositories yet')
837 'Do not know how to merge into empty repositories yet')
838
838
839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
840 # commit message. We also specify the user who is doing the merge.
840 # commit message. We also specify the user who is doing the merge.
841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
842 '-c', 'user.email=%s' % safe_str(user_email),
842 '-c', 'user.email=%s' % safe_str(user_email),
843 'merge', '--no-ff', '-m', safe_str(merge_message)]
843 'merge', '--no-ff', '-m', safe_str(merge_message)]
844 cmd.extend(heads)
844 cmd.extend(heads)
845 try:
845 try:
846 output = self.run_git_command(cmd, fail_on_stderr=False)
846 output = self.run_git_command(cmd, fail_on_stderr=False)
847 except RepositoryError:
847 except RepositoryError:
848 # Cleanup any merge leftovers
848 # Cleanup any merge leftovers
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 raise
850 raise
851
851
852 def _local_push(
852 def _local_push(
853 self, source_branch, repository_path, target_branch,
853 self, source_branch, repository_path, target_branch,
854 enable_hooks=False, rc_scm_data=None):
854 enable_hooks=False, rc_scm_data=None):
855 """
855 """
856 Push the source_branch to the given repository and target_branch.
856 Push the source_branch to the given repository and target_branch.
857
857
858 Currently it if the target_branch is not master and the target repo is
858 Currently it if the target_branch is not master and the target repo is
859 empty, the push will work, but then GitRepository won't be able to find
859 empty, the push will work, but then GitRepository won't be able to find
860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
861 pointing to master, which does not exist).
861 pointing to master, which does not exist).
862
862
863 It does not run the hooks in the target repo.
863 It does not run the hooks in the target repo.
864 """
864 """
865 # TODO(skreft): deal with the case in which the target repo is empty,
865 # TODO(skreft): deal with the case in which the target repo is empty,
866 # and the target_branch is not master.
866 # and the target_branch is not master.
867 target_repo = GitRepository(repository_path)
867 target_repo = GitRepository(repository_path)
868 if (not target_repo.bare and
868 if (not target_repo.bare and
869 target_repo._current_branch() == target_branch):
869 target_repo._current_branch() == target_branch):
870 # Git prevents pushing to the checked out branch, so simulate it by
870 # Git prevents pushing to the checked out branch, so simulate it by
871 # pulling into the target repository.
871 # pulling into the target repository.
872 target_repo._local_pull(self.path, source_branch)
872 target_repo._local_pull(self.path, source_branch)
873 else:
873 else:
874 cmd = ['push', os.path.abspath(repository_path),
874 cmd = ['push', os.path.abspath(repository_path),
875 '%s:%s' % (source_branch, target_branch)]
875 '%s:%s' % (source_branch, target_branch)]
876 gitenv = {}
876 gitenv = {}
877 if rc_scm_data:
877 if rc_scm_data:
878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
879
879
880 if not enable_hooks:
880 if not enable_hooks:
881 gitenv['RC_SKIP_HOOKS'] = '1'
881 gitenv['RC_SKIP_HOOKS'] = '1'
882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
883
883
884 def _get_new_pr_branch(self, source_branch, target_branch):
884 def _get_new_pr_branch(self, source_branch, target_branch):
885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
886 pr_branches = []
886 pr_branches = []
887 for branch in self.branches:
887 for branch in self.branches:
888 if branch.startswith(prefix):
888 if branch.startswith(prefix):
889 pr_branches.append(int(branch[len(prefix):]))
889 pr_branches.append(int(branch[len(prefix):]))
890
890
891 if not pr_branches:
891 if not pr_branches:
892 branch_id = 0
892 branch_id = 0
893 else:
893 else:
894 branch_id = max(pr_branches) + 1
894 branch_id = max(pr_branches) + 1
895
895
896 return '%s%d' % (prefix, branch_id)
896 return '%s%d' % (prefix, branch_id)
897
897
898 def _maybe_prepare_merge_workspace(
898 def _maybe_prepare_merge_workspace(
899 self, repo_id, workspace_id, target_ref, source_ref):
899 self, repo_id, workspace_id, target_ref, source_ref):
900 shadow_repository_path = self._get_shadow_repository_path(
900 shadow_repository_path = self._get_shadow_repository_path(
901 repo_id, workspace_id)
901 repo_id, workspace_id)
902 if not os.path.exists(shadow_repository_path):
902 if not os.path.exists(shadow_repository_path):
903 self._local_clone(
903 self._local_clone(
904 shadow_repository_path, target_ref.name, source_ref.name)
904 shadow_repository_path, target_ref.name, source_ref.name)
905 log.debug(
905 log.debug(
906 'Prepared shadow repository in %s', shadow_repository_path)
906 'Prepared shadow repository in %s', shadow_repository_path)
907
907
908 return shadow_repository_path
908 return shadow_repository_path
909
909
910 def _merge_repo(self, repo_id, workspace_id, target_ref,
910 def _merge_repo(self, repo_id, workspace_id, target_ref,
911 source_repo, source_ref, merge_message,
911 source_repo, source_ref, merge_message,
912 merger_name, merger_email, dry_run=False,
912 merger_name, merger_email, dry_run=False,
913 use_rebase=False, close_branch=False):
913 use_rebase=False, close_branch=False):
914 if target_ref.commit_id != self.branches[target_ref.name]:
914 if target_ref.commit_id != self.branches[target_ref.name]:
915 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
915 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
916 target_ref.commit_id, self.branches[target_ref.name])
916 target_ref.commit_id, self.branches[target_ref.name])
917 return MergeResponse(
917 return MergeResponse(
918 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
918 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
919
919
920 shadow_repository_path = self._maybe_prepare_merge_workspace(
920 shadow_repository_path = self._maybe_prepare_merge_workspace(
921 repo_id, workspace_id, target_ref, source_ref)
921 repo_id, workspace_id, target_ref, source_ref)
922 shadow_repo = self._get_shadow_instance(shadow_repository_path)
922 shadow_repo = self._get_shadow_instance(shadow_repository_path)
923
923
924 # checkout source, if it's different. Otherwise we could not
924 # checkout source, if it's different. Otherwise we could not
925 # fetch proper commits for merge testing
925 # fetch proper commits for merge testing
926 if source_ref.name != target_ref.name:
926 if source_ref.name != target_ref.name:
927 if shadow_repo.get_remote_ref(source_ref.name):
927 if shadow_repo.get_remote_ref(source_ref.name):
928 shadow_repo._checkout(source_ref.name, force=True)
928 shadow_repo._checkout(source_ref.name, force=True)
929
929
930 # checkout target, and fetch changes
930 # checkout target, and fetch changes
931 shadow_repo._checkout(target_ref.name, force=True)
931 shadow_repo._checkout(target_ref.name, force=True)
932
932
933 # fetch/reset pull the target, in case it is changed
933 # fetch/reset pull the target, in case it is changed
934 # this handles even force changes
934 # this handles even force changes
935 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
935 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
936 shadow_repo._local_reset(target_ref.name)
936 shadow_repo._local_reset(target_ref.name)
937
937
938 # Need to reload repo to invalidate the cache, or otherwise we cannot
938 # Need to reload repo to invalidate the cache, or otherwise we cannot
939 # retrieve the last target commit.
939 # retrieve the last target commit.
940 shadow_repo = self._get_shadow_instance(shadow_repository_path)
940 shadow_repo = self._get_shadow_instance(shadow_repository_path)
941 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
941 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
942 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
942 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
943 target_ref, target_ref.commit_id,
943 target_ref, target_ref.commit_id,
944 shadow_repo.branches[target_ref.name])
944 shadow_repo.branches[target_ref.name])
945 return MergeResponse(
945 return MergeResponse(
946 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
946 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
947
947
948 # calculate new branch
948 # calculate new branch
949 pr_branch = shadow_repo._get_new_pr_branch(
949 pr_branch = shadow_repo._get_new_pr_branch(
950 source_ref.name, target_ref.name)
950 source_ref.name, target_ref.name)
951 log.debug('using pull-request merge branch: `%s`', pr_branch)
951 log.debug('using pull-request merge branch: `%s`', pr_branch)
952 # checkout to temp branch, and fetch changes
952 # checkout to temp branch, and fetch changes
953 shadow_repo._checkout(pr_branch, create=True)
953 shadow_repo._checkout(pr_branch, create=True)
954 try:
954 try:
955 shadow_repo._local_fetch(source_repo.path, source_ref.name)
955 shadow_repo._local_fetch(source_repo.path, source_ref.name)
956 except RepositoryError:
956 except RepositoryError:
957 log.exception('Failure when doing local fetch on git shadow repo')
957 log.exception('Failure when doing local fetch on git shadow repo')
958 return MergeResponse(
958 return MergeResponse(
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
960
960
961 merge_ref = None
961 merge_ref = None
962 merge_failure_reason = MergeFailureReason.NONE
962 merge_failure_reason = MergeFailureReason.NONE
963 try:
963 try:
964 shadow_repo._local_merge(merge_message, merger_name, merger_email,
964 shadow_repo._local_merge(merge_message, merger_name, merger_email,
965 [source_ref.commit_id])
965 [source_ref.commit_id])
966 merge_possible = True
966 merge_possible = True
967
967
968 # Need to reload repo to invalidate the cache, or otherwise we
968 # Need to reload repo to invalidate the cache, or otherwise we
969 # cannot retrieve the merge commit.
969 # cannot retrieve the merge commit.
970 shadow_repo = GitRepository(shadow_repository_path)
970 shadow_repo = GitRepository(shadow_repository_path)
971 merge_commit_id = shadow_repo.branches[pr_branch]
971 merge_commit_id = shadow_repo.branches[pr_branch]
972
972
973 # Set a reference pointing to the merge commit. This reference may
973 # Set a reference pointing to the merge commit. This reference may
974 # be used to easily identify the last successful merge commit in
974 # be used to easily identify the last successful merge commit in
975 # the shadow repository.
975 # the shadow repository.
976 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
976 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
977 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
977 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
978 except RepositoryError:
978 except RepositoryError:
979 log.exception('Failure when doing local merge on git shadow repo')
979 log.exception('Failure when doing local merge on git shadow repo')
980 merge_possible = False
980 merge_possible = False
981 merge_failure_reason = MergeFailureReason.MERGE_FAILED
981 merge_failure_reason = MergeFailureReason.MERGE_FAILED
982
982
983 if merge_possible and not dry_run:
983 if merge_possible and not dry_run:
984 try:
984 try:
985 shadow_repo._local_push(
985 shadow_repo._local_push(
986 pr_branch, self.path, target_ref.name, enable_hooks=True,
986 pr_branch, self.path, target_ref.name, enable_hooks=True,
987 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
987 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
988 merge_succeeded = True
988 merge_succeeded = True
989 except RepositoryError:
989 except RepositoryError:
990 log.exception(
990 log.exception(
991 'Failure when doing local push on git shadow repo')
991 'Failure when doing local push on git shadow repo')
992 merge_succeeded = False
992 merge_succeeded = False
993 merge_failure_reason = MergeFailureReason.PUSH_FAILED
993 merge_failure_reason = MergeFailureReason.PUSH_FAILED
994 else:
994 else:
995 merge_succeeded = False
995 merge_succeeded = False
996
996
997 return MergeResponse(
997 return MergeResponse(
998 merge_possible, merge_succeeded, merge_ref,
998 merge_possible, merge_succeeded, merge_ref,
999 merge_failure_reason)
999 merge_failure_reason)
@@ -1,924 +1,924 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 :param bare: not used, compatible with other VCS
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
81 self.with_wire = with_wire
82
82
83 self._init_repo(create, src_url, do_workspace_checkout)
83 self._init_repo(create, src_url, do_workspace_checkout)
84
84
85 # caches
85 # caches
86 self._commit_ids = {}
86 self._commit_ids = {}
87
87
88 @LazyProperty
88 @LazyProperty
89 def _remote(self):
89 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
91
92 @LazyProperty
92 @LazyProperty
93 def commit_ids(self):
93 def commit_ids(self):
94 """
94 """
95 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
97 """
97 """
98 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
100 return commit_ids
100 return commit_ids
101
101
102 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def branches_closed(self):
111 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def branches_all(self):
115 def branches_all(self):
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed active branches by default
124 Returns only not closed active branches by default
125
125
126 :param active: return also active branches
126 :param active: return also active branches
127 :param closed: return also closed branches
127 :param closed: return also closed branches
128
128
129 """
129 """
130 if self.is_empty():
130 if self.is_empty():
131 return {}
131 return {}
132
132
133 def get_name(ctx):
133 def get_name(ctx):
134 return ctx[0]
134 return ctx[0]
135
135
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
138
138
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
140
141 @LazyProperty
141 @LazyProperty
142 def tags(self):
142 def tags(self):
143 """
143 """
144 Gets tags for this repository
144 Gets tags for this repository
145 """
145 """
146 return self._get_tags()
146 return self._get_tags()
147
147
148 def _get_tags(self):
148 def _get_tags(self):
149 if self.is_empty():
149 if self.is_empty():
150 return {}
150 return {}
151
151
152 def get_name(ctx):
152 def get_name(ctx):
153 return ctx[0]
153 return ctx[0]
154
154
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
156 self._remote.tags().items()]
157
157
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
159
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
180
181 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
182
182
183 self._remote.tag(
183 self._remote.tag(
184 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
186
186
187 # Reinitialize tags
187 # Reinitialize tags
188 self.tags = self._get_tags()
188 self.tags = self._get_tags()
189 tag_id = self.tags[name]
189 tag_id = self.tags[name]
190
190
191 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
192
192
193 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
194 """
194 """
195 Removes tag with the given `name`.
195 Removes tag with the given `name`.
196
196
197 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
201
201
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
203 """
204 if name not in self.tags:
204 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 if message is None:
206 if message is None:
207 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
208 local = False
208 local = False
209
209
210 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
211
211
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
214 self.tags = self._get_tags()
215
215
216 @LazyProperty
216 @LazyProperty
217 def bookmarks(self):
217 def bookmarks(self):
218 """
218 """
219 Gets bookmarks for this repository
219 Gets bookmarks for this repository
220 """
220 """
221 return self._get_bookmarks()
221 return self._get_bookmarks()
222
222
223 def _get_bookmarks(self):
223 def _get_bookmarks(self):
224 if self.is_empty():
224 if self.is_empty():
225 return {}
225 return {}
226
226
227 def get_name(ctx):
227 def get_name(ctx):
228 return ctx[0]
228 return ctx[0]
229
229
230 _bookmarks = [
230 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
233
233
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
235
235
236 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
238
238
239 def get_diff(
239 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
241 context=3, path1=None):
242 """
242 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
244 `commit2` since `commit1`.
245
245
246 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
254 """
254 """
255 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
258
258
259 if path:
259 if path:
260 file_filter = [self.path, path]
260 file_filter = [self.path, path]
261 else:
261 else:
262 file_filter = None
262 file_filter = None
263
263
264 diff = self._remote.diff(
264 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
267 context=context)
268 return MercurialDiff(diff)
268 return MercurialDiff(diff)
269
269
270 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
275 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
276
276
277 def verify(self):
277 def verify(self):
278 verify = self._remote.verify()
278 verify = self._remote.verify()
279
279
280 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
281 return verify
281 return verify
282
282
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
285 return commit_id1
285 return commit_id1
286
286
287 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
289 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
291
291
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
294 commits = []
294 commits = []
295 else:
295 else:
296 if merge:
296 if merge:
297 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
300 else:
301 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
304
304
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
306 for idx in indexes]
307
307
308 return commits
308 return commits
309
309
310 @staticmethod
310 @staticmethod
311 def check_url(url, config):
311 def check_url(url, config):
312 """
312 """
313 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
316 or other external calls.
316 or other external calls.
317
317
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
319 when the return code is non 200
320 """
320 """
321 # check first if it's not an local url
321 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
323 return True
323 return True
324
324
325 # Request the _remote to verify the url
325 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
327
327
328 @staticmethod
328 @staticmethod
329 def is_valid_repository(path):
329 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
331
331
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
333 """
334 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
337 be created.
337 be created.
338
338
339 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
342 """
342 """
343 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
344 raise RepositoryError(
344 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
346 % self.path)
346 % self.path)
347
347
348 if src_url:
348 if src_url:
349 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
351
351
352 self._remote.clone(url, self.path, do_workspace_checkout)
352 self._remote.clone(url, self.path, do_workspace_checkout)
353
353
354 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
355 create = False
355 create = False
356
356
357 if create:
357 if create:
358 os.makedirs(self.path, mode=0755)
358 os.makedirs(self.path, mode=0o755)
359
359
360 self._remote.localrepository(create)
360 self._remote.localrepository(create)
361
361
362 @LazyProperty
362 @LazyProperty
363 def in_memory_commit(self):
363 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
365
365
366 @LazyProperty
366 @LazyProperty
367 def description(self):
367 def description(self):
368 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
371
372 @LazyProperty
372 @LazyProperty
373 def contact(self):
373 def contact(self):
374 contact = (
374 contact = (
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
378
379 @LazyProperty
379 @LazyProperty
380 def last_change(self):
380 def last_change(self):
381 """
381 """
382 Returns last change made on this repository as
382 Returns last change made on this repository as
383 `datetime.datetime` object.
383 `datetime.datetime` object.
384 """
384 """
385 try:
385 try:
386 return self.get_commit().date
386 return self.get_commit().date
387 except RepositoryError:
387 except RepositoryError:
388 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
390
391 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
392 # fallback to filesystem
392 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
397 else:
397 else:
398 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
399
399
400 def _get_url(self, url):
400 def _get_url(self, url):
401 """
401 """
402 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
403 to filesystem
403 to filesystem
404 (``file:///``) schema.
404 (``file:///``) schema.
405 """
405 """
406 url = url.encode('utf8')
406 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
409 return url
409 return url
410
410
411 def get_hook_location(self):
411 def get_hook_location(self):
412 """
412 """
413 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
414 """
414 """
415 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
416
416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
418 """
418 """
419 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
421 """
421 """
422 if self.is_empty():
422 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
424
424
425 if commit_id is not None:
425 if commit_id is not None:
426 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
427 try:
427 try:
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
430 except KeyError:
431 pass
431 pass
432 elif commit_idx is not None:
432 elif commit_idx is not None:
433 self._validate_commit_idx(commit_idx)
433 self._validate_commit_idx(commit_idx)
434 try:
434 try:
435 id_ = self.commit_ids[commit_idx]
435 id_ = self.commit_ids[commit_idx]
436 if commit_idx < 0:
436 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 self, id_, commit_idx, pre_load=pre_load)
440 except IndexError:
440 except IndexError:
441 commit_id = commit_idx
441 commit_id = commit_idx
442 else:
442 else:
443 commit_id = "tip"
443 commit_id = "tip"
444
444
445 if isinstance(commit_id, unicode):
445 if isinstance(commit_id, unicode):
446 commit_id = safe_str(commit_id)
446 commit_id = safe_str(commit_id)
447
447
448 try:
448 try:
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 except CommitDoesNotExistError:
450 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
452 commit_id, self)
453 raise CommitDoesNotExistError(msg)
453 raise CommitDoesNotExistError(msg)
454
454
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456
456
457 def get_commits(
457 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None):
459 branch_name=None, show_hidden=False, pre_load=None):
460 """
460 """
461 Returns generator of ``MercurialCommit`` objects from start to end
461 Returns generator of ``MercurialCommit`` objects from start to end
462 (both are inclusive)
462 (both are inclusive)
463
463
464 :param start_id: None, str(commit_id)
464 :param start_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
466 :param start_date: if specified, commits with commit date less than
466 :param start_date: if specified, commits with commit date less than
467 ``start_date`` would be filtered out from returned set
467 ``start_date`` would be filtered out from returned set
468 :param end_date: if specified, commits with commit date greater than
468 :param end_date: if specified, commits with commit date greater than
469 ``end_date`` would be filtered out from returned set
469 ``end_date`` would be filtered out from returned set
470 :param branch_name: if specified, commits not reachable from given
470 :param branch_name: if specified, commits not reachable from given
471 branch would be filtered out from returned set
471 branch would be filtered out from returned set
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 Mercurial evolve
473 Mercurial evolve
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 exist.
475 exist.
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 ``end`` could not be found.
477 ``end`` could not be found.
478 """
478 """
479 # actually we should check now if it's not an empty repo
479 # actually we should check now if it's not an empty repo
480 branch_ancestors = False
480 branch_ancestors = False
481 if self.is_empty():
481 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
482 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
483 self._validate_branch_name(branch_name)
484
484
485 if start_id is not None:
485 if start_id is not None:
486 self._validate_commit_id(start_id)
486 self._validate_commit_id(start_id)
487 c_start = self.get_commit(commit_id=start_id)
487 c_start = self.get_commit(commit_id=start_id)
488 start_pos = self._commit_ids[c_start.raw_id]
488 start_pos = self._commit_ids[c_start.raw_id]
489 else:
489 else:
490 start_pos = None
490 start_pos = None
491
491
492 if end_id is not None:
492 if end_id is not None:
493 self._validate_commit_id(end_id)
493 self._validate_commit_id(end_id)
494 c_end = self.get_commit(commit_id=end_id)
494 c_end = self.get_commit(commit_id=end_id)
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 else:
496 else:
497 end_pos = None
497 end_pos = None
498
498
499 if None not in [start_id, end_id] and start_pos > end_pos:
499 if None not in [start_id, end_id] and start_pos > end_pos:
500 raise RepositoryError(
500 raise RepositoryError(
501 "Start commit '%s' cannot be after end commit '%s'" %
501 "Start commit '%s' cannot be after end commit '%s'" %
502 (start_id, end_id))
502 (start_id, end_id))
503
503
504 if end_pos is not None:
504 if end_pos is not None:
505 end_pos += 1
505 end_pos += 1
506
506
507 commit_filter = []
507 commit_filter = []
508
508
509 if branch_name and not branch_ancestors:
509 if branch_name and not branch_ancestors:
510 commit_filter.append('branch("%s")' % (branch_name,))
510 commit_filter.append('branch("%s")' % (branch_name,))
511 elif branch_name and branch_ancestors:
511 elif branch_name and branch_ancestors:
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513
513
514 if start_date and not end_date:
514 if start_date and not end_date:
515 commit_filter.append('date(">%s")' % (start_date,))
515 commit_filter.append('date(">%s")' % (start_date,))
516 if end_date and not start_date:
516 if end_date and not start_date:
517 commit_filter.append('date("<%s")' % (end_date,))
517 commit_filter.append('date("<%s")' % (end_date,))
518 if start_date and end_date:
518 if start_date and end_date:
519 commit_filter.append(
519 commit_filter.append(
520 'date(">%s") and date("<%s")' % (start_date, end_date))
520 'date(">%s") and date("<%s")' % (start_date, end_date))
521
521
522 if not show_hidden:
522 if not show_hidden:
523 commit_filter.append('not obsolete()')
523 commit_filter.append('not obsolete()')
524 commit_filter.append('not hidden()')
524 commit_filter.append('not hidden()')
525
525
526 # TODO: johbo: Figure out a simpler way for this solution
526 # TODO: johbo: Figure out a simpler way for this solution
527 collection_generator = CollectionGenerator
527 collection_generator = CollectionGenerator
528 if commit_filter:
528 if commit_filter:
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 revisions = self._remote.rev_range([commit_filter])
530 revisions = self._remote.rev_range([commit_filter])
531 collection_generator = MercurialIndexBasedCollectionGenerator
531 collection_generator = MercurialIndexBasedCollectionGenerator
532 else:
532 else:
533 revisions = self.commit_ids
533 revisions = self.commit_ids
534
534
535 if start_pos or end_pos:
535 if start_pos or end_pos:
536 revisions = revisions[start_pos:end_pos]
536 revisions = revisions[start_pos:end_pos]
537
537
538 return collection_generator(self, revisions, pre_load=pre_load)
538 return collection_generator(self, revisions, pre_load=pre_load)
539
539
540 def pull(self, url, commit_ids=None):
540 def pull(self, url, commit_ids=None):
541 """
541 """
542 Pull changes from external location.
542 Pull changes from external location.
543
543
544 :param commit_ids: Optional. Can be set to a list of commit ids
544 :param commit_ids: Optional. Can be set to a list of commit ids
545 which shall be pulled from the other repository.
545 which shall be pulled from the other repository.
546 """
546 """
547 url = self._get_url(url)
547 url = self._get_url(url)
548 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.invalidate_vcs_cache()
549 self._remote.invalidate_vcs_cache()
550
550
551 def fetch(self, url, commit_ids=None):
551 def fetch(self, url, commit_ids=None):
552 """
552 """
553 Backward compatibility with GIT fetch==pull
553 Backward compatibility with GIT fetch==pull
554 """
554 """
555 return self.pull(url, commit_ids=commit_ids)
555 return self.pull(url, commit_ids=commit_ids)
556
556
557 def push(self, url):
557 def push(self, url):
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.sync_push(url)
559 self._remote.sync_push(url)
560
560
561 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
562 """
562 """
563 Create a local clone of the current repo.
563 Create a local clone of the current repo.
564 """
564 """
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 hooks=False)
566 hooks=False)
567
567
568 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
569 """
569 """
570 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
571 """
571 """
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
574
574
575 def _identify(self):
575 def _identify(self):
576 """
576 """
577 Return the current state of the working directory.
577 Return the current state of the working directory.
578 """
578 """
579 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
580
580
581 def _heads(self, branch=None):
581 def _heads(self, branch=None):
582 """
582 """
583 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
584 """
584 """
585 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
586
586
587 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
588 """
588 """
589 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
590 """
590 """
591 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
592
592
593 def _local_push(
593 def _local_push(
594 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
595 enable_hooks=False):
595 enable_hooks=False):
596 """
596 """
597 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
598
598
599 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
600 """
600 """
601 self._remote.push(
601 self._remote.push(
602 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
603 push_branches=push_branches)
603 push_branches=push_branches)
604
604
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
607 """
607 """
608 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
609
609
610 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
611 commit needs to be pushed.
611 commit needs to be pushed.
612 """
612 """
613 self._update(target_ref.commit_id)
613 self._update(target_ref.commit_id)
614
614
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617
617
618 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
619 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
620 return target_ref.commit_id, False
620 return target_ref.commit_id, False
621
621
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 # In this case we should force a commit message
623 # In this case we should force a commit message
624 return source_ref.commit_id, True
624 return source_ref.commit_id, True
625
625
626 if use_rebase:
626 if use_rebase:
627 try:
627 try:
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 target_ref.commit_id)
629 target_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self._remote.rebase(
631 self._remote.rebase(
632 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._update(bookmark_name)
634 self._update(bookmark_name)
635 return self._identify(), True
635 return self._identify(), True
636 except RepositoryError:
636 except RepositoryError:
637 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
638 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
639 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
640
640
641 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
642 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
643 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645 self._remote.update(clean=True)
645 self._remote.update(clean=True)
646 raise
646 raise
647 else:
647 else:
648 try:
648 try:
649 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
650 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
651 self._remote.commit(
651 self._remote.commit(
652 message=safe_str(merge_message),
652 message=safe_str(merge_message),
653 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
655 return self._identify(), True
655 return self._identify(), True
656 except RepositoryError:
656 except RepositoryError:
657 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
658 self._remote.update(clean=True)
658 self._remote.update(clean=True)
659 raise
659 raise
660
660
661 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
662 source_ref, close_message=''):
662 source_ref, close_message=''):
663 """
663 """
664 Close the branch of the given source_revision
664 Close the branch of the given source_revision
665
665
666 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
667 commit needs to be pushed.
667 commit needs to be pushed.
668 """
668 """
669 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 try:
671 try:
672 self._remote.commit(
672 self._remote.commit(
673 message=safe_str(message),
673 message=safe_str(message),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 close_branch=True)
675 close_branch=True)
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 return self._identify(), True
677 return self._identify(), True
678 except RepositoryError:
678 except RepositoryError:
679 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
680 self._remote.update(clean=True)
680 self._remote.update(clean=True)
681 raise
681 raise
682
682
683 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
684 return (
684 return (
685 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
686 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
687
687
688 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
689 if ref.type == 'branch':
689 if ref.type == 'branch':
690 return ref.name
690 return ref.name
691 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
692
692
693 def _maybe_prepare_merge_workspace(
693 def _maybe_prepare_merge_workspace(
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 shadow_repository_path = self._get_shadow_repository_path(
695 shadow_repository_path = self._get_shadow_repository_path(
696 repo_id, workspace_id)
696 repo_id, workspace_id)
697 if not os.path.exists(shadow_repository_path):
697 if not os.path.exists(shadow_repository_path):
698 self._local_clone(shadow_repository_path)
698 self._local_clone(shadow_repository_path)
699 log.debug(
699 log.debug(
700 'Prepared shadow repository in %s', shadow_repository_path)
700 'Prepared shadow repository in %s', shadow_repository_path)
701
701
702 return shadow_repository_path
702 return shadow_repository_path
703
703
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 source_repo, source_ref, merge_message,
705 source_repo, source_ref, merge_message,
706 merger_name, merger_email, dry_run=False,
706 merger_name, merger_email, dry_run=False,
707 use_rebase=False, close_branch=False):
707 use_rebase=False, close_branch=False):
708
708
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 'rebase' if use_rebase else 'merge', dry_run)
710 'rebase' if use_rebase else 'merge', dry_run)
711 if target_ref.commit_id not in self._heads():
711 if target_ref.commit_id not in self._heads():
712 return MergeResponse(
712 return MergeResponse(
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
714
714
715 try:
715 try:
716 if (target_ref.type == 'branch' and
716 if (target_ref.type == 'branch' and
717 len(self._heads(target_ref.name)) != 1):
717 len(self._heads(target_ref.name)) != 1):
718 return MergeResponse(
718 return MergeResponse(
719 False, False, None,
719 False, False, None,
720 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
720 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
721 except CommitDoesNotExistError:
721 except CommitDoesNotExistError:
722 log.exception('Failure when looking up branch heads on hg target')
722 log.exception('Failure when looking up branch heads on hg target')
723 return MergeResponse(
723 return MergeResponse(
724 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
724 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
725
725
726 shadow_repository_path = self._maybe_prepare_merge_workspace(
726 shadow_repository_path = self._maybe_prepare_merge_workspace(
727 repo_id, workspace_id, target_ref, source_ref)
727 repo_id, workspace_id, target_ref, source_ref)
728 shadow_repo = self._get_shadow_instance(shadow_repository_path)
728 shadow_repo = self._get_shadow_instance(shadow_repository_path)
729
729
730 log.debug('Pulling in target reference %s', target_ref)
730 log.debug('Pulling in target reference %s', target_ref)
731 self._validate_pull_reference(target_ref)
731 self._validate_pull_reference(target_ref)
732 shadow_repo._local_pull(self.path, target_ref)
732 shadow_repo._local_pull(self.path, target_ref)
733 try:
733 try:
734 log.debug('Pulling in source reference %s', source_ref)
734 log.debug('Pulling in source reference %s', source_ref)
735 source_repo._validate_pull_reference(source_ref)
735 source_repo._validate_pull_reference(source_ref)
736 shadow_repo._local_pull(source_repo.path, source_ref)
736 shadow_repo._local_pull(source_repo.path, source_ref)
737 except CommitDoesNotExistError:
737 except CommitDoesNotExistError:
738 log.exception('Failure when doing local pull on hg shadow repo')
738 log.exception('Failure when doing local pull on hg shadow repo')
739 return MergeResponse(
739 return MergeResponse(
740 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
740 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
741
741
742 merge_ref = None
742 merge_ref = None
743 merge_commit_id = None
743 merge_commit_id = None
744 close_commit_id = None
744 close_commit_id = None
745 merge_failure_reason = MergeFailureReason.NONE
745 merge_failure_reason = MergeFailureReason.NONE
746
746
747 # enforce that close branch should be used only in case we source from
747 # enforce that close branch should be used only in case we source from
748 # an actual Branch
748 # an actual Branch
749 close_branch = close_branch and source_ref.type == 'branch'
749 close_branch = close_branch and source_ref.type == 'branch'
750
750
751 # don't allow to close branch if source and target are the same
751 # don't allow to close branch if source and target are the same
752 close_branch = close_branch and source_ref.name != target_ref.name
752 close_branch = close_branch and source_ref.name != target_ref.name
753
753
754 needs_push_on_close = False
754 needs_push_on_close = False
755 if close_branch and not use_rebase and not dry_run:
755 if close_branch and not use_rebase and not dry_run:
756 try:
756 try:
757 close_commit_id, needs_push_on_close = shadow_repo._local_close(
757 close_commit_id, needs_push_on_close = shadow_repo._local_close(
758 target_ref, merger_name, merger_email, source_ref)
758 target_ref, merger_name, merger_email, source_ref)
759 merge_possible = True
759 merge_possible = True
760 except RepositoryError:
760 except RepositoryError:
761 log.exception(
761 log.exception(
762 'Failure when doing close branch on hg shadow repo')
762 'Failure when doing close branch on hg shadow repo')
763 merge_possible = False
763 merge_possible = False
764 merge_failure_reason = MergeFailureReason.MERGE_FAILED
764 merge_failure_reason = MergeFailureReason.MERGE_FAILED
765 else:
765 else:
766 merge_possible = True
766 merge_possible = True
767
767
768 needs_push = False
768 needs_push = False
769 if merge_possible:
769 if merge_possible:
770 try:
770 try:
771 merge_commit_id, needs_push = shadow_repo._local_merge(
771 merge_commit_id, needs_push = shadow_repo._local_merge(
772 target_ref, merge_message, merger_name, merger_email,
772 target_ref, merge_message, merger_name, merger_email,
773 source_ref, use_rebase=use_rebase, dry_run=dry_run)
773 source_ref, use_rebase=use_rebase, dry_run=dry_run)
774 merge_possible = True
774 merge_possible = True
775
775
776 # read the state of the close action, if it
776 # read the state of the close action, if it
777 # maybe required a push
777 # maybe required a push
778 needs_push = needs_push or needs_push_on_close
778 needs_push = needs_push or needs_push_on_close
779
779
780 # Set a bookmark pointing to the merge commit. This bookmark
780 # Set a bookmark pointing to the merge commit. This bookmark
781 # may be used to easily identify the last successful merge
781 # may be used to easily identify the last successful merge
782 # commit in the shadow repository.
782 # commit in the shadow repository.
783 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
783 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
784 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
784 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
785 except SubrepoMergeError:
785 except SubrepoMergeError:
786 log.exception(
786 log.exception(
787 'Subrepo merge error during local merge on hg shadow repo.')
787 'Subrepo merge error during local merge on hg shadow repo.')
788 merge_possible = False
788 merge_possible = False
789 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
789 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
790 needs_push = False
790 needs_push = False
791 except RepositoryError:
791 except RepositoryError:
792 log.exception('Failure when doing local merge on hg shadow repo')
792 log.exception('Failure when doing local merge on hg shadow repo')
793 merge_possible = False
793 merge_possible = False
794 merge_failure_reason = MergeFailureReason.MERGE_FAILED
794 merge_failure_reason = MergeFailureReason.MERGE_FAILED
795 needs_push = False
795 needs_push = False
796
796
797 if merge_possible and not dry_run:
797 if merge_possible and not dry_run:
798 if needs_push:
798 if needs_push:
799 # In case the target is a bookmark, update it, so after pushing
799 # In case the target is a bookmark, update it, so after pushing
800 # the bookmarks is also updated in the target.
800 # the bookmarks is also updated in the target.
801 if target_ref.type == 'book':
801 if target_ref.type == 'book':
802 shadow_repo.bookmark(
802 shadow_repo.bookmark(
803 target_ref.name, revision=merge_commit_id)
803 target_ref.name, revision=merge_commit_id)
804 try:
804 try:
805 shadow_repo_with_hooks = self._get_shadow_instance(
805 shadow_repo_with_hooks = self._get_shadow_instance(
806 shadow_repository_path,
806 shadow_repository_path,
807 enable_hooks=True)
807 enable_hooks=True)
808 # This is the actual merge action, we push from shadow
808 # This is the actual merge action, we push from shadow
809 # into origin.
809 # into origin.
810 # Note: the push_branches option will push any new branch
810 # Note: the push_branches option will push any new branch
811 # defined in the source repository to the target. This may
811 # defined in the source repository to the target. This may
812 # be dangerous as branches are permanent in Mercurial.
812 # be dangerous as branches are permanent in Mercurial.
813 # This feature was requested in issue #441.
813 # This feature was requested in issue #441.
814 shadow_repo_with_hooks._local_push(
814 shadow_repo_with_hooks._local_push(
815 merge_commit_id, self.path, push_branches=True,
815 merge_commit_id, self.path, push_branches=True,
816 enable_hooks=True)
816 enable_hooks=True)
817
817
818 # maybe we also need to push the close_commit_id
818 # maybe we also need to push the close_commit_id
819 if close_commit_id:
819 if close_commit_id:
820 shadow_repo_with_hooks._local_push(
820 shadow_repo_with_hooks._local_push(
821 close_commit_id, self.path, push_branches=True,
821 close_commit_id, self.path, push_branches=True,
822 enable_hooks=True)
822 enable_hooks=True)
823 merge_succeeded = True
823 merge_succeeded = True
824 except RepositoryError:
824 except RepositoryError:
825 log.exception(
825 log.exception(
826 'Failure when doing local push from the shadow '
826 'Failure when doing local push from the shadow '
827 'repository to the target repository.')
827 'repository to the target repository.')
828 merge_succeeded = False
828 merge_succeeded = False
829 merge_failure_reason = MergeFailureReason.PUSH_FAILED
829 merge_failure_reason = MergeFailureReason.PUSH_FAILED
830 else:
830 else:
831 merge_succeeded = True
831 merge_succeeded = True
832 else:
832 else:
833 merge_succeeded = False
833 merge_succeeded = False
834
834
835 return MergeResponse(
835 return MergeResponse(
836 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
836 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
837
837
838 def _get_shadow_instance(
838 def _get_shadow_instance(
839 self, shadow_repository_path, enable_hooks=False):
839 self, shadow_repository_path, enable_hooks=False):
840 config = self.config.copy()
840 config = self.config.copy()
841 if not enable_hooks:
841 if not enable_hooks:
842 config.clear_section('hooks')
842 config.clear_section('hooks')
843 return MercurialRepository(shadow_repository_path, config)
843 return MercurialRepository(shadow_repository_path, config)
844
844
845 def _validate_pull_reference(self, reference):
845 def _validate_pull_reference(self, reference):
846 if not (reference.name in self.bookmarks or
846 if not (reference.name in self.bookmarks or
847 reference.name in self.branches or
847 reference.name in self.branches or
848 self.get_commit(reference.commit_id)):
848 self.get_commit(reference.commit_id)):
849 raise CommitDoesNotExistError(
849 raise CommitDoesNotExistError(
850 'Unknown branch, bookmark or commit id')
850 'Unknown branch, bookmark or commit id')
851
851
852 def _local_pull(self, repository_path, reference):
852 def _local_pull(self, repository_path, reference):
853 """
853 """
854 Fetch a branch, bookmark or commit from a local repository.
854 Fetch a branch, bookmark or commit from a local repository.
855 """
855 """
856 repository_path = os.path.abspath(repository_path)
856 repository_path = os.path.abspath(repository_path)
857 if repository_path == self.path:
857 if repository_path == self.path:
858 raise ValueError('Cannot pull from the same repository')
858 raise ValueError('Cannot pull from the same repository')
859
859
860 reference_type_to_option_name = {
860 reference_type_to_option_name = {
861 'book': 'bookmark',
861 'book': 'bookmark',
862 'branch': 'branch',
862 'branch': 'branch',
863 }
863 }
864 option_name = reference_type_to_option_name.get(
864 option_name = reference_type_to_option_name.get(
865 reference.type, 'revision')
865 reference.type, 'revision')
866
866
867 if option_name == 'revision':
867 if option_name == 'revision':
868 ref = reference.commit_id
868 ref = reference.commit_id
869 else:
869 else:
870 ref = reference.name
870 ref = reference.name
871
871
872 options = {option_name: [ref]}
872 options = {option_name: [ref]}
873 self._remote.pull_cmd(repository_path, hooks=False, **options)
873 self._remote.pull_cmd(repository_path, hooks=False, **options)
874 self._remote.invalidate_vcs_cache()
874 self._remote.invalidate_vcs_cache()
875
875
876 def bookmark(self, bookmark, revision=None):
876 def bookmark(self, bookmark, revision=None):
877 if isinstance(bookmark, unicode):
877 if isinstance(bookmark, unicode):
878 bookmark = safe_str(bookmark)
878 bookmark = safe_str(bookmark)
879 self._remote.bookmark(bookmark, revision=revision)
879 self._remote.bookmark(bookmark, revision=revision)
880 self._remote.invalidate_vcs_cache()
880 self._remote.invalidate_vcs_cache()
881
881
882 def get_path_permissions(self, username):
882 def get_path_permissions(self, username):
883 hgacl_file = os.path.join(self.path, '.hg/hgacl')
883 hgacl_file = os.path.join(self.path, '.hg/hgacl')
884
884
885 def read_patterns(suffix):
885 def read_patterns(suffix):
886 svalue = None
886 svalue = None
887 try:
887 try:
888 svalue = hgacl.get('narrowhgacl', username + suffix)
888 svalue = hgacl.get('narrowhgacl', username + suffix)
889 except configparser.NoOptionError:
889 except configparser.NoOptionError:
890 try:
890 try:
891 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
891 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
892 except configparser.NoOptionError:
892 except configparser.NoOptionError:
893 pass
893 pass
894 if not svalue:
894 if not svalue:
895 return None
895 return None
896 result = ['/']
896 result = ['/']
897 for pattern in svalue.split():
897 for pattern in svalue.split():
898 result.append(pattern)
898 result.append(pattern)
899 if '*' not in pattern and '?' not in pattern:
899 if '*' not in pattern and '?' not in pattern:
900 result.append(pattern + '/*')
900 result.append(pattern + '/*')
901 return result
901 return result
902
902
903 if os.path.exists(hgacl_file):
903 if os.path.exists(hgacl_file):
904 try:
904 try:
905 hgacl = configparser.RawConfigParser()
905 hgacl = configparser.RawConfigParser()
906 hgacl.read(hgacl_file)
906 hgacl.read(hgacl_file)
907
907
908 includes = read_patterns('.includes')
908 includes = read_patterns('.includes')
909 excludes = read_patterns('.excludes')
909 excludes = read_patterns('.excludes')
910 return BasePathPermissionChecker.create_from_patterns(
910 return BasePathPermissionChecker.create_from_patterns(
911 includes, excludes)
911 includes, excludes)
912 except BaseException as e:
912 except BaseException as e:
913 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
913 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
914 hgacl_file, self.name, e)
914 hgacl_file, self.name, e)
915 raise exceptions.RepositoryRequirementError(msg)
915 raise exceptions.RepositoryRequirementError(msg)
916 else:
916 else:
917 return None
917 return None
918
918
919
919
920 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
920 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
921
921
922 def _commit_factory(self, commit_id):
922 def _commit_factory(self, commit_id):
923 return self.repo.get_commit(
923 return self.repo.get_commit(
924 commit_idx=commit_id, pre_load=self.pre_load)
924 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,773 +1,773 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 repo group model for RhodeCode
23 repo group model for RhodeCode
24 """
24 """
25
25
26 import os
26 import os
27 import datetime
27 import datetime
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import shutil
30 import shutil
31 import traceback
31 import traceback
32 import string
32 import string
33
33
34 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.model import BaseModel
37 from rhodecode.model import BaseModel
38 from rhodecode.model.db import (_hash_key,
38 from rhodecode.model.db import (_hash_key,
39 RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
39 RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 UserGroup, Repository)
40 UserGroup, Repository)
41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
42 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.lib.caching_query import FromCache
43 from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time
43 from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 class RepoGroupModel(BaseModel):
48 class RepoGroupModel(BaseModel):
49
49
50 cls = RepoGroup
50 cls = RepoGroup
51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
52 PERSONAL_GROUP_PATTERN = '${username}' # default
52 PERSONAL_GROUP_PATTERN = '${username}' # default
53
53
54 def _get_user_group(self, users_group):
54 def _get_user_group(self, users_group):
55 return self._get_instance(UserGroup, users_group,
55 return self._get_instance(UserGroup, users_group,
56 callback=UserGroup.get_by_group_name)
56 callback=UserGroup.get_by_group_name)
57
57
58 def _get_repo_group(self, repo_group):
58 def _get_repo_group(self, repo_group):
59 return self._get_instance(RepoGroup, repo_group,
59 return self._get_instance(RepoGroup, repo_group,
60 callback=RepoGroup.get_by_group_name)
60 callback=RepoGroup.get_by_group_name)
61
61
62 @LazyProperty
62 @LazyProperty
63 def repos_path(self):
63 def repos_path(self):
64 """
64 """
65 Gets the repositories root path from database
65 Gets the repositories root path from database
66 """
66 """
67
67
68 settings_model = VcsSettingsModel(sa=self.sa)
68 settings_model = VcsSettingsModel(sa=self.sa)
69 return settings_model.get_repos_location()
69 return settings_model.get_repos_location()
70
70
71 def get_by_group_name(self, repo_group_name, cache=None):
71 def get_by_group_name(self, repo_group_name, cache=None):
72 repo = self.sa.query(RepoGroup) \
72 repo = self.sa.query(RepoGroup) \
73 .filter(RepoGroup.group_name == repo_group_name)
73 .filter(RepoGroup.group_name == repo_group_name)
74
74
75 if cache:
75 if cache:
76 name_key = _hash_key(repo_group_name)
76 name_key = _hash_key(repo_group_name)
77 repo = repo.options(
77 repo = repo.options(
78 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
78 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
79 return repo.scalar()
79 return repo.scalar()
80
80
81 def get_default_create_personal_repo_group(self):
81 def get_default_create_personal_repo_group(self):
82 value = SettingsModel().get_setting_by_name(
82 value = SettingsModel().get_setting_by_name(
83 'create_personal_repo_group')
83 'create_personal_repo_group')
84 return value.app_settings_value if value else None or False
84 return value.app_settings_value if value else None or False
85
85
86 def get_personal_group_name_pattern(self):
86 def get_personal_group_name_pattern(self):
87 value = SettingsModel().get_setting_by_name(
87 value = SettingsModel().get_setting_by_name(
88 'personal_repo_group_pattern')
88 'personal_repo_group_pattern')
89 val = value.app_settings_value if value else None
89 val = value.app_settings_value if value else None
90 group_template = val or self.PERSONAL_GROUP_PATTERN
90 group_template = val or self.PERSONAL_GROUP_PATTERN
91
91
92 group_template = group_template.lstrip('/')
92 group_template = group_template.lstrip('/')
93 return group_template
93 return group_template
94
94
95 def get_personal_group_name(self, user):
95 def get_personal_group_name(self, user):
96 template = self.get_personal_group_name_pattern()
96 template = self.get_personal_group_name_pattern()
97 return string.Template(template).safe_substitute(
97 return string.Template(template).safe_substitute(
98 username=user.username,
98 username=user.username,
99 user_id=user.user_id,
99 user_id=user.user_id,
100 )
100 )
101
101
102 def create_personal_repo_group(self, user, commit_early=True):
102 def create_personal_repo_group(self, user, commit_early=True):
103 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
103 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
104 personal_repo_group_name = self.get_personal_group_name(user)
104 personal_repo_group_name = self.get_personal_group_name(user)
105
105
106 # create a new one
106 # create a new one
107 RepoGroupModel().create(
107 RepoGroupModel().create(
108 group_name=personal_repo_group_name,
108 group_name=personal_repo_group_name,
109 group_description=desc,
109 group_description=desc,
110 owner=user.username,
110 owner=user.username,
111 personal=True,
111 personal=True,
112 commit_early=commit_early)
112 commit_early=commit_early)
113
113
114 def _create_default_perms(self, new_group):
114 def _create_default_perms(self, new_group):
115 # create default permission
115 # create default permission
116 default_perm = 'group.read'
116 default_perm = 'group.read'
117 def_user = User.get_default_user()
117 def_user = User.get_default_user()
118 for p in def_user.user_perms:
118 for p in def_user.user_perms:
119 if p.permission.permission_name.startswith('group.'):
119 if p.permission.permission_name.startswith('group.'):
120 default_perm = p.permission.permission_name
120 default_perm = p.permission.permission_name
121 break
121 break
122
122
123 repo_group_to_perm = UserRepoGroupToPerm()
123 repo_group_to_perm = UserRepoGroupToPerm()
124 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
124 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
125
125
126 repo_group_to_perm.group = new_group
126 repo_group_to_perm.group = new_group
127 repo_group_to_perm.user_id = def_user.user_id
127 repo_group_to_perm.user_id = def_user.user_id
128 return repo_group_to_perm
128 return repo_group_to_perm
129
129
130 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
130 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
131 get_object=False):
131 get_object=False):
132 """
132 """
133 Get's the group name and a parent group name from given group name.
133 Get's the group name and a parent group name from given group name.
134 If repo_in_path is set to truth, we asume the full path also includes
134 If repo_in_path is set to truth, we asume the full path also includes
135 repo name, in such case we clean the last element.
135 repo name, in such case we clean the last element.
136
136
137 :param group_name_full:
137 :param group_name_full:
138 """
138 """
139 split_paths = 1
139 split_paths = 1
140 if repo_in_path:
140 if repo_in_path:
141 split_paths = 2
141 split_paths = 2
142 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
142 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
143
143
144 if repo_in_path and len(_parts) > 1:
144 if repo_in_path and len(_parts) > 1:
145 # such case last element is the repo_name
145 # such case last element is the repo_name
146 _parts.pop(-1)
146 _parts.pop(-1)
147 group_name_cleaned = _parts[-1] # just the group name
147 group_name_cleaned = _parts[-1] # just the group name
148 parent_repo_group_name = None
148 parent_repo_group_name = None
149
149
150 if len(_parts) > 1:
150 if len(_parts) > 1:
151 parent_repo_group_name = _parts[0]
151 parent_repo_group_name = _parts[0]
152
152
153 parent_group = None
153 parent_group = None
154 if parent_repo_group_name:
154 if parent_repo_group_name:
155 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
155 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
156
156
157 if get_object:
157 if get_object:
158 return group_name_cleaned, parent_repo_group_name, parent_group
158 return group_name_cleaned, parent_repo_group_name, parent_group
159
159
160 return group_name_cleaned, parent_repo_group_name
160 return group_name_cleaned, parent_repo_group_name
161
161
162 def check_exist_filesystem(self, group_name, exc_on_failure=True):
162 def check_exist_filesystem(self, group_name, exc_on_failure=True):
163 create_path = os.path.join(self.repos_path, group_name)
163 create_path = os.path.join(self.repos_path, group_name)
164 log.debug('creating new group in %s', create_path)
164 log.debug('creating new group in %s', create_path)
165
165
166 if os.path.isdir(create_path):
166 if os.path.isdir(create_path):
167 if exc_on_failure:
167 if exc_on_failure:
168 abs_create_path = os.path.abspath(create_path)
168 abs_create_path = os.path.abspath(create_path)
169 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
169 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
170 return False
170 return False
171 return True
171 return True
172
172
173 def _create_group(self, group_name):
173 def _create_group(self, group_name):
174 """
174 """
175 makes repository group on filesystem
175 makes repository group on filesystem
176
176
177 :param repo_name:
177 :param repo_name:
178 :param parent_id:
178 :param parent_id:
179 """
179 """
180
180
181 self.check_exist_filesystem(group_name)
181 self.check_exist_filesystem(group_name)
182 create_path = os.path.join(self.repos_path, group_name)
182 create_path = os.path.join(self.repos_path, group_name)
183 log.debug('creating new group in %s', create_path)
183 log.debug('creating new group in %s', create_path)
184 os.makedirs(create_path, mode=0755)
184 os.makedirs(create_path, mode=0o755)
185 log.debug('created group in %s', create_path)
185 log.debug('created group in %s', create_path)
186
186
187 def _rename_group(self, old, new):
187 def _rename_group(self, old, new):
188 """
188 """
189 Renames a group on filesystem
189 Renames a group on filesystem
190
190
191 :param group_name:
191 :param group_name:
192 """
192 """
193
193
194 if old == new:
194 if old == new:
195 log.debug('skipping group rename')
195 log.debug('skipping group rename')
196 return
196 return
197
197
198 log.debug('renaming repository group from %s to %s', old, new)
198 log.debug('renaming repository group from %s to %s', old, new)
199
199
200 old_path = os.path.join(self.repos_path, old)
200 old_path = os.path.join(self.repos_path, old)
201 new_path = os.path.join(self.repos_path, new)
201 new_path = os.path.join(self.repos_path, new)
202
202
203 log.debug('renaming repos paths from %s to %s', old_path, new_path)
203 log.debug('renaming repos paths from %s to %s', old_path, new_path)
204
204
205 if os.path.isdir(new_path):
205 if os.path.isdir(new_path):
206 raise Exception('Was trying to rename to already '
206 raise Exception('Was trying to rename to already '
207 'existing dir %s' % new_path)
207 'existing dir %s' % new_path)
208 shutil.move(old_path, new_path)
208 shutil.move(old_path, new_path)
209
209
210 def _delete_filesystem_group(self, group, force_delete=False):
210 def _delete_filesystem_group(self, group, force_delete=False):
211 """
211 """
212 Deletes a group from a filesystem
212 Deletes a group from a filesystem
213
213
214 :param group: instance of group from database
214 :param group: instance of group from database
215 :param force_delete: use shutil rmtree to remove all objects
215 :param force_delete: use shutil rmtree to remove all objects
216 """
216 """
217 paths = group.full_path.split(RepoGroup.url_sep())
217 paths = group.full_path.split(RepoGroup.url_sep())
218 paths = os.sep.join(paths)
218 paths = os.sep.join(paths)
219
219
220 rm_path = os.path.join(self.repos_path, paths)
220 rm_path = os.path.join(self.repos_path, paths)
221 log.info("Removing group %s", rm_path)
221 log.info("Removing group %s", rm_path)
222 # delete only if that path really exists
222 # delete only if that path really exists
223 if os.path.isdir(rm_path):
223 if os.path.isdir(rm_path):
224 if force_delete:
224 if force_delete:
225 shutil.rmtree(rm_path)
225 shutil.rmtree(rm_path)
226 else:
226 else:
227 # archive that group`
227 # archive that group`
228 _now = datetime.datetime.now()
228 _now = datetime.datetime.now()
229 _ms = str(_now.microsecond).rjust(6, '0')
229 _ms = str(_now.microsecond).rjust(6, '0')
230 _d = 'rm__%s_GROUP_%s' % (
230 _d = 'rm__%s_GROUP_%s' % (
231 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
231 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
232 shutil.move(rm_path, os.path.join(self.repos_path, _d))
232 shutil.move(rm_path, os.path.join(self.repos_path, _d))
233
233
234 def create(self, group_name, group_description, owner, just_db=False,
234 def create(self, group_name, group_description, owner, just_db=False,
235 copy_permissions=False, personal=None, commit_early=True):
235 copy_permissions=False, personal=None, commit_early=True):
236
236
237 (group_name_cleaned,
237 (group_name_cleaned,
238 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
238 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
239
239
240 parent_group = None
240 parent_group = None
241 if parent_group_name:
241 if parent_group_name:
242 parent_group = self._get_repo_group(parent_group_name)
242 parent_group = self._get_repo_group(parent_group_name)
243 if not parent_group:
243 if not parent_group:
244 # we tried to create a nested group, but the parent is not
244 # we tried to create a nested group, but the parent is not
245 # existing
245 # existing
246 raise ValueError(
246 raise ValueError(
247 'Parent group `%s` given in `%s` group name '
247 'Parent group `%s` given in `%s` group name '
248 'is not yet existing.' % (parent_group_name, group_name))
248 'is not yet existing.' % (parent_group_name, group_name))
249
249
250 # because we are doing a cleanup, we need to check if such directory
250 # because we are doing a cleanup, we need to check if such directory
251 # already exists. If we don't do that we can accidentally delete
251 # already exists. If we don't do that we can accidentally delete
252 # existing directory via cleanup that can cause data issues, since
252 # existing directory via cleanup that can cause data issues, since
253 # delete does a folder rename to special syntax later cleanup
253 # delete does a folder rename to special syntax later cleanup
254 # functions can delete this
254 # functions can delete this
255 cleanup_group = self.check_exist_filesystem(group_name,
255 cleanup_group = self.check_exist_filesystem(group_name,
256 exc_on_failure=False)
256 exc_on_failure=False)
257 user = self._get_user(owner)
257 user = self._get_user(owner)
258 if not user:
258 if not user:
259 raise ValueError('Owner %s not found as rhodecode user', owner)
259 raise ValueError('Owner %s not found as rhodecode user', owner)
260
260
261 try:
261 try:
262 new_repo_group = RepoGroup()
262 new_repo_group = RepoGroup()
263 new_repo_group.user = user
263 new_repo_group.user = user
264 new_repo_group.group_description = group_description or group_name
264 new_repo_group.group_description = group_description or group_name
265 new_repo_group.parent_group = parent_group
265 new_repo_group.parent_group = parent_group
266 new_repo_group.group_name = group_name
266 new_repo_group.group_name = group_name
267 new_repo_group.personal = personal
267 new_repo_group.personal = personal
268
268
269 self.sa.add(new_repo_group)
269 self.sa.add(new_repo_group)
270
270
271 # create an ADMIN permission for owner except if we're super admin,
271 # create an ADMIN permission for owner except if we're super admin,
272 # later owner should go into the owner field of groups
272 # later owner should go into the owner field of groups
273 if not user.is_admin:
273 if not user.is_admin:
274 self.grant_user_permission(repo_group=new_repo_group,
274 self.grant_user_permission(repo_group=new_repo_group,
275 user=owner, perm='group.admin')
275 user=owner, perm='group.admin')
276
276
277 if parent_group and copy_permissions:
277 if parent_group and copy_permissions:
278 # copy permissions from parent
278 # copy permissions from parent
279 user_perms = UserRepoGroupToPerm.query() \
279 user_perms = UserRepoGroupToPerm.query() \
280 .filter(UserRepoGroupToPerm.group == parent_group).all()
280 .filter(UserRepoGroupToPerm.group == parent_group).all()
281
281
282 group_perms = UserGroupRepoGroupToPerm.query() \
282 group_perms = UserGroupRepoGroupToPerm.query() \
283 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
283 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
284
284
285 for perm in user_perms:
285 for perm in user_perms:
286 # don't copy over the permission for user who is creating
286 # don't copy over the permission for user who is creating
287 # this group, if he is not super admin he get's admin
287 # this group, if he is not super admin he get's admin
288 # permission set above
288 # permission set above
289 if perm.user != user or user.is_admin:
289 if perm.user != user or user.is_admin:
290 UserRepoGroupToPerm.create(
290 UserRepoGroupToPerm.create(
291 perm.user, new_repo_group, perm.permission)
291 perm.user, new_repo_group, perm.permission)
292
292
293 for perm in group_perms:
293 for perm in group_perms:
294 UserGroupRepoGroupToPerm.create(
294 UserGroupRepoGroupToPerm.create(
295 perm.users_group, new_repo_group, perm.permission)
295 perm.users_group, new_repo_group, perm.permission)
296 else:
296 else:
297 perm_obj = self._create_default_perms(new_repo_group)
297 perm_obj = self._create_default_perms(new_repo_group)
298 self.sa.add(perm_obj)
298 self.sa.add(perm_obj)
299
299
300 # now commit the changes, earlier so we are sure everything is in
300 # now commit the changes, earlier so we are sure everything is in
301 # the database.
301 # the database.
302 if commit_early:
302 if commit_early:
303 self.sa.commit()
303 self.sa.commit()
304 if not just_db:
304 if not just_db:
305 self._create_group(new_repo_group.group_name)
305 self._create_group(new_repo_group.group_name)
306
306
307 # trigger the post hook
307 # trigger the post hook
308 from rhodecode.lib.hooks_base import log_create_repository_group
308 from rhodecode.lib.hooks_base import log_create_repository_group
309 repo_group = RepoGroup.get_by_group_name(group_name)
309 repo_group = RepoGroup.get_by_group_name(group_name)
310 log_create_repository_group(
310 log_create_repository_group(
311 created_by=user.username, **repo_group.get_dict())
311 created_by=user.username, **repo_group.get_dict())
312
312
313 # Trigger create event.
313 # Trigger create event.
314 events.trigger(events.RepoGroupCreateEvent(repo_group))
314 events.trigger(events.RepoGroupCreateEvent(repo_group))
315
315
316 return new_repo_group
316 return new_repo_group
317 except Exception:
317 except Exception:
318 self.sa.rollback()
318 self.sa.rollback()
319 log.exception('Exception occurred when creating repository group, '
319 log.exception('Exception occurred when creating repository group, '
320 'doing cleanup...')
320 'doing cleanup...')
321 # rollback things manually !
321 # rollback things manually !
322 repo_group = RepoGroup.get_by_group_name(group_name)
322 repo_group = RepoGroup.get_by_group_name(group_name)
323 if repo_group:
323 if repo_group:
324 RepoGroup.delete(repo_group.group_id)
324 RepoGroup.delete(repo_group.group_id)
325 self.sa.commit()
325 self.sa.commit()
326 if cleanup_group:
326 if cleanup_group:
327 RepoGroupModel()._delete_filesystem_group(repo_group)
327 RepoGroupModel()._delete_filesystem_group(repo_group)
328 raise
328 raise
329
329
330 def update_permissions(
330 def update_permissions(
331 self, repo_group, perm_additions=None, perm_updates=None,
331 self, repo_group, perm_additions=None, perm_updates=None,
332 perm_deletions=None, recursive=None, check_perms=True,
332 perm_deletions=None, recursive=None, check_perms=True,
333 cur_user=None):
333 cur_user=None):
334 from rhodecode.model.repo import RepoModel
334 from rhodecode.model.repo import RepoModel
335 from rhodecode.lib.auth import HasUserGroupPermissionAny
335 from rhodecode.lib.auth import HasUserGroupPermissionAny
336
336
337 if not perm_additions:
337 if not perm_additions:
338 perm_additions = []
338 perm_additions = []
339 if not perm_updates:
339 if not perm_updates:
340 perm_updates = []
340 perm_updates = []
341 if not perm_deletions:
341 if not perm_deletions:
342 perm_deletions = []
342 perm_deletions = []
343
343
344 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
344 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
345
345
346 changes = {
346 changes = {
347 'added': [],
347 'added': [],
348 'updated': [],
348 'updated': [],
349 'deleted': []
349 'deleted': []
350 }
350 }
351
351
352 def _set_perm_user(obj, user, perm):
352 def _set_perm_user(obj, user, perm):
353 if isinstance(obj, RepoGroup):
353 if isinstance(obj, RepoGroup):
354 self.grant_user_permission(
354 self.grant_user_permission(
355 repo_group=obj, user=user, perm=perm)
355 repo_group=obj, user=user, perm=perm)
356 elif isinstance(obj, Repository):
356 elif isinstance(obj, Repository):
357 # private repos will not allow to change the default
357 # private repos will not allow to change the default
358 # permissions using recursive mode
358 # permissions using recursive mode
359 if obj.private and user == User.DEFAULT_USER:
359 if obj.private and user == User.DEFAULT_USER:
360 return
360 return
361
361
362 # we set group permission but we have to switch to repo
362 # we set group permission but we have to switch to repo
363 # permission
363 # permission
364 perm = perm.replace('group.', 'repository.')
364 perm = perm.replace('group.', 'repository.')
365 RepoModel().grant_user_permission(
365 RepoModel().grant_user_permission(
366 repo=obj, user=user, perm=perm)
366 repo=obj, user=user, perm=perm)
367
367
368 def _set_perm_group(obj, users_group, perm):
368 def _set_perm_group(obj, users_group, perm):
369 if isinstance(obj, RepoGroup):
369 if isinstance(obj, RepoGroup):
370 self.grant_user_group_permission(
370 self.grant_user_group_permission(
371 repo_group=obj, group_name=users_group, perm=perm)
371 repo_group=obj, group_name=users_group, perm=perm)
372 elif isinstance(obj, Repository):
372 elif isinstance(obj, Repository):
373 # we set group permission but we have to switch to repo
373 # we set group permission but we have to switch to repo
374 # permission
374 # permission
375 perm = perm.replace('group.', 'repository.')
375 perm = perm.replace('group.', 'repository.')
376 RepoModel().grant_user_group_permission(
376 RepoModel().grant_user_group_permission(
377 repo=obj, group_name=users_group, perm=perm)
377 repo=obj, group_name=users_group, perm=perm)
378
378
379 def _revoke_perm_user(obj, user):
379 def _revoke_perm_user(obj, user):
380 if isinstance(obj, RepoGroup):
380 if isinstance(obj, RepoGroup):
381 self.revoke_user_permission(repo_group=obj, user=user)
381 self.revoke_user_permission(repo_group=obj, user=user)
382 elif isinstance(obj, Repository):
382 elif isinstance(obj, Repository):
383 RepoModel().revoke_user_permission(repo=obj, user=user)
383 RepoModel().revoke_user_permission(repo=obj, user=user)
384
384
385 def _revoke_perm_group(obj, user_group):
385 def _revoke_perm_group(obj, user_group):
386 if isinstance(obj, RepoGroup):
386 if isinstance(obj, RepoGroup):
387 self.revoke_user_group_permission(
387 self.revoke_user_group_permission(
388 repo_group=obj, group_name=user_group)
388 repo_group=obj, group_name=user_group)
389 elif isinstance(obj, Repository):
389 elif isinstance(obj, Repository):
390 RepoModel().revoke_user_group_permission(
390 RepoModel().revoke_user_group_permission(
391 repo=obj, group_name=user_group)
391 repo=obj, group_name=user_group)
392
392
393 # start updates
393 # start updates
394 log.debug('Now updating permissions for %s in recursive mode:%s',
394 log.debug('Now updating permissions for %s in recursive mode:%s',
395 repo_group, recursive)
395 repo_group, recursive)
396
396
397 # initialize check function, we'll call that multiple times
397 # initialize check function, we'll call that multiple times
398 has_group_perm = HasUserGroupPermissionAny(*req_perms)
398 has_group_perm = HasUserGroupPermissionAny(*req_perms)
399
399
400 for obj in repo_group.recursive_groups_and_repos():
400 for obj in repo_group.recursive_groups_and_repos():
401 # iterated obj is an instance of a repos group or repository in
401 # iterated obj is an instance of a repos group or repository in
402 # that group, recursive option can be: none, repos, groups, all
402 # that group, recursive option can be: none, repos, groups, all
403 if recursive == 'all':
403 if recursive == 'all':
404 obj = obj
404 obj = obj
405 elif recursive == 'repos':
405 elif recursive == 'repos':
406 # skip groups, other than this one
406 # skip groups, other than this one
407 if isinstance(obj, RepoGroup) and not obj == repo_group:
407 if isinstance(obj, RepoGroup) and not obj == repo_group:
408 continue
408 continue
409 elif recursive == 'groups':
409 elif recursive == 'groups':
410 # skip repos
410 # skip repos
411 if isinstance(obj, Repository):
411 if isinstance(obj, Repository):
412 continue
412 continue
413 else: # recursive == 'none':
413 else: # recursive == 'none':
414 # DEFAULT option - don't apply to iterated objects
414 # DEFAULT option - don't apply to iterated objects
415 # also we do a break at the end of this loop. if we are not
415 # also we do a break at the end of this loop. if we are not
416 # in recursive mode
416 # in recursive mode
417 obj = repo_group
417 obj = repo_group
418
418
419 change_obj = obj.get_api_data()
419 change_obj = obj.get_api_data()
420
420
421 # update permissions
421 # update permissions
422 for member_id, perm, member_type in perm_updates:
422 for member_id, perm, member_type in perm_updates:
423 member_id = int(member_id)
423 member_id = int(member_id)
424 if member_type == 'user':
424 if member_type == 'user':
425 member_name = User.get(member_id).username
425 member_name = User.get(member_id).username
426 # this updates also current one if found
426 # this updates also current one if found
427 _set_perm_user(obj, user=member_id, perm=perm)
427 _set_perm_user(obj, user=member_id, perm=perm)
428 elif member_type == 'user_group':
428 elif member_type == 'user_group':
429 member_name = UserGroup.get(member_id).users_group_name
429 member_name = UserGroup.get(member_id).users_group_name
430 if not check_perms or has_group_perm(member_name,
430 if not check_perms or has_group_perm(member_name,
431 user=cur_user):
431 user=cur_user):
432 _set_perm_group(obj, users_group=member_id, perm=perm)
432 _set_perm_group(obj, users_group=member_id, perm=perm)
433 else:
433 else:
434 raise ValueError("member_type must be 'user' or 'user_group' "
434 raise ValueError("member_type must be 'user' or 'user_group' "
435 "got {} instead".format(member_type))
435 "got {} instead".format(member_type))
436
436
437 changes['updated'].append(
437 changes['updated'].append(
438 {'change_obj': change_obj, 'type': member_type,
438 {'change_obj': change_obj, 'type': member_type,
439 'id': member_id, 'name': member_name, 'new_perm': perm})
439 'id': member_id, 'name': member_name, 'new_perm': perm})
440
440
441 # set new permissions
441 # set new permissions
442 for member_id, perm, member_type in perm_additions:
442 for member_id, perm, member_type in perm_additions:
443 member_id = int(member_id)
443 member_id = int(member_id)
444 if member_type == 'user':
444 if member_type == 'user':
445 member_name = User.get(member_id).username
445 member_name = User.get(member_id).username
446 _set_perm_user(obj, user=member_id, perm=perm)
446 _set_perm_user(obj, user=member_id, perm=perm)
447 elif member_type == 'user_group':
447 elif member_type == 'user_group':
448 # check if we have permissions to alter this usergroup
448 # check if we have permissions to alter this usergroup
449 member_name = UserGroup.get(member_id).users_group_name
449 member_name = UserGroup.get(member_id).users_group_name
450 if not check_perms or has_group_perm(member_name,
450 if not check_perms or has_group_perm(member_name,
451 user=cur_user):
451 user=cur_user):
452 _set_perm_group(obj, users_group=member_id, perm=perm)
452 _set_perm_group(obj, users_group=member_id, perm=perm)
453 else:
453 else:
454 raise ValueError("member_type must be 'user' or 'user_group' "
454 raise ValueError("member_type must be 'user' or 'user_group' "
455 "got {} instead".format(member_type))
455 "got {} instead".format(member_type))
456
456
457 changes['added'].append(
457 changes['added'].append(
458 {'change_obj': change_obj, 'type': member_type,
458 {'change_obj': change_obj, 'type': member_type,
459 'id': member_id, 'name': member_name, 'new_perm': perm})
459 'id': member_id, 'name': member_name, 'new_perm': perm})
460
460
461 # delete permissions
461 # delete permissions
462 for member_id, perm, member_type in perm_deletions:
462 for member_id, perm, member_type in perm_deletions:
463 member_id = int(member_id)
463 member_id = int(member_id)
464 if member_type == 'user':
464 if member_type == 'user':
465 member_name = User.get(member_id).username
465 member_name = User.get(member_id).username
466 _revoke_perm_user(obj, user=member_id)
466 _revoke_perm_user(obj, user=member_id)
467 elif member_type == 'user_group':
467 elif member_type == 'user_group':
468 # check if we have permissions to alter this usergroup
468 # check if we have permissions to alter this usergroup
469 member_name = UserGroup.get(member_id).users_group_name
469 member_name = UserGroup.get(member_id).users_group_name
470 if not check_perms or has_group_perm(member_name,
470 if not check_perms or has_group_perm(member_name,
471 user=cur_user):
471 user=cur_user):
472 _revoke_perm_group(obj, user_group=member_id)
472 _revoke_perm_group(obj, user_group=member_id)
473 else:
473 else:
474 raise ValueError("member_type must be 'user' or 'user_group' "
474 raise ValueError("member_type must be 'user' or 'user_group' "
475 "got {} instead".format(member_type))
475 "got {} instead".format(member_type))
476
476
477 changes['deleted'].append(
477 changes['deleted'].append(
478 {'change_obj': change_obj, 'type': member_type,
478 {'change_obj': change_obj, 'type': member_type,
479 'id': member_id, 'name': member_name, 'new_perm': perm})
479 'id': member_id, 'name': member_name, 'new_perm': perm})
480
480
481 # if it's not recursive call for all,repos,groups
481 # if it's not recursive call for all,repos,groups
482 # break the loop and don't proceed with other changes
482 # break the loop and don't proceed with other changes
483 if recursive not in ['all', 'repos', 'groups']:
483 if recursive not in ['all', 'repos', 'groups']:
484 break
484 break
485
485
486 return changes
486 return changes
487
487
488 def update(self, repo_group, form_data):
488 def update(self, repo_group, form_data):
489 try:
489 try:
490 repo_group = self._get_repo_group(repo_group)
490 repo_group = self._get_repo_group(repo_group)
491 old_path = repo_group.full_path
491 old_path = repo_group.full_path
492
492
493 # change properties
493 # change properties
494 if 'group_description' in form_data:
494 if 'group_description' in form_data:
495 repo_group.group_description = form_data['group_description']
495 repo_group.group_description = form_data['group_description']
496
496
497 if 'enable_locking' in form_data:
497 if 'enable_locking' in form_data:
498 repo_group.enable_locking = form_data['enable_locking']
498 repo_group.enable_locking = form_data['enable_locking']
499
499
500 if 'group_parent_id' in form_data:
500 if 'group_parent_id' in form_data:
501 parent_group = (
501 parent_group = (
502 self._get_repo_group(form_data['group_parent_id']))
502 self._get_repo_group(form_data['group_parent_id']))
503 repo_group.group_parent_id = (
503 repo_group.group_parent_id = (
504 parent_group.group_id if parent_group else None)
504 parent_group.group_id if parent_group else None)
505 repo_group.parent_group = parent_group
505 repo_group.parent_group = parent_group
506
506
507 # mikhail: to update the full_path, we have to explicitly
507 # mikhail: to update the full_path, we have to explicitly
508 # update group_name
508 # update group_name
509 group_name = form_data.get('group_name', repo_group.name)
509 group_name = form_data.get('group_name', repo_group.name)
510 repo_group.group_name = repo_group.get_new_name(group_name)
510 repo_group.group_name = repo_group.get_new_name(group_name)
511
511
512 new_path = repo_group.full_path
512 new_path = repo_group.full_path
513
513
514 if 'user' in form_data:
514 if 'user' in form_data:
515 repo_group.user = User.get_by_username(form_data['user'])
515 repo_group.user = User.get_by_username(form_data['user'])
516 repo_group.updated_on = datetime.datetime.now()
516 repo_group.updated_on = datetime.datetime.now()
517 self.sa.add(repo_group)
517 self.sa.add(repo_group)
518
518
519 # iterate over all members of this groups and do fixes
519 # iterate over all members of this groups and do fixes
520 # set locking if given
520 # set locking if given
521 # if obj is a repoGroup also fix the name of the group according
521 # if obj is a repoGroup also fix the name of the group according
522 # to the parent
522 # to the parent
523 # if obj is a Repo fix it's name
523 # if obj is a Repo fix it's name
524 # this can be potentially heavy operation
524 # this can be potentially heavy operation
525 for obj in repo_group.recursive_groups_and_repos():
525 for obj in repo_group.recursive_groups_and_repos():
526 # set the value from it's parent
526 # set the value from it's parent
527 obj.enable_locking = repo_group.enable_locking
527 obj.enable_locking = repo_group.enable_locking
528 if isinstance(obj, RepoGroup):
528 if isinstance(obj, RepoGroup):
529 new_name = obj.get_new_name(obj.name)
529 new_name = obj.get_new_name(obj.name)
530 log.debug('Fixing group %s to new name %s',
530 log.debug('Fixing group %s to new name %s',
531 obj.group_name, new_name)
531 obj.group_name, new_name)
532 obj.group_name = new_name
532 obj.group_name = new_name
533 obj.updated_on = datetime.datetime.now()
533 obj.updated_on = datetime.datetime.now()
534 elif isinstance(obj, Repository):
534 elif isinstance(obj, Repository):
535 # we need to get all repositories from this new group and
535 # we need to get all repositories from this new group and
536 # rename them accordingly to new group path
536 # rename them accordingly to new group path
537 new_name = obj.get_new_name(obj.just_name)
537 new_name = obj.get_new_name(obj.just_name)
538 log.debug('Fixing repo %s to new name %s',
538 log.debug('Fixing repo %s to new name %s',
539 obj.repo_name, new_name)
539 obj.repo_name, new_name)
540 obj.repo_name = new_name
540 obj.repo_name = new_name
541 obj.updated_on = datetime.datetime.now()
541 obj.updated_on = datetime.datetime.now()
542 self.sa.add(obj)
542 self.sa.add(obj)
543
543
544 self._rename_group(old_path, new_path)
544 self._rename_group(old_path, new_path)
545
545
546 # Trigger update event.
546 # Trigger update event.
547 events.trigger(events.RepoGroupUpdateEvent(repo_group))
547 events.trigger(events.RepoGroupUpdateEvent(repo_group))
548
548
549 return repo_group
549 return repo_group
550 except Exception:
550 except Exception:
551 log.error(traceback.format_exc())
551 log.error(traceback.format_exc())
552 raise
552 raise
553
553
554 def delete(self, repo_group, force_delete=False, fs_remove=True):
554 def delete(self, repo_group, force_delete=False, fs_remove=True):
555 repo_group = self._get_repo_group(repo_group)
555 repo_group = self._get_repo_group(repo_group)
556 if not repo_group:
556 if not repo_group:
557 return False
557 return False
558 try:
558 try:
559 self.sa.delete(repo_group)
559 self.sa.delete(repo_group)
560 if fs_remove:
560 if fs_remove:
561 self._delete_filesystem_group(repo_group, force_delete)
561 self._delete_filesystem_group(repo_group, force_delete)
562 else:
562 else:
563 log.debug('skipping removal from filesystem')
563 log.debug('skipping removal from filesystem')
564
564
565 # Trigger delete event.
565 # Trigger delete event.
566 events.trigger(events.RepoGroupDeleteEvent(repo_group))
566 events.trigger(events.RepoGroupDeleteEvent(repo_group))
567 return True
567 return True
568
568
569 except Exception:
569 except Exception:
570 log.error('Error removing repo_group %s', repo_group)
570 log.error('Error removing repo_group %s', repo_group)
571 raise
571 raise
572
572
573 def grant_user_permission(self, repo_group, user, perm):
573 def grant_user_permission(self, repo_group, user, perm):
574 """
574 """
575 Grant permission for user on given repository group, or update
575 Grant permission for user on given repository group, or update
576 existing one if found
576 existing one if found
577
577
578 :param repo_group: Instance of RepoGroup, repositories_group_id,
578 :param repo_group: Instance of RepoGroup, repositories_group_id,
579 or repositories_group name
579 or repositories_group name
580 :param user: Instance of User, user_id or username
580 :param user: Instance of User, user_id or username
581 :param perm: Instance of Permission, or permission_name
581 :param perm: Instance of Permission, or permission_name
582 """
582 """
583
583
584 repo_group = self._get_repo_group(repo_group)
584 repo_group = self._get_repo_group(repo_group)
585 user = self._get_user(user)
585 user = self._get_user(user)
586 permission = self._get_perm(perm)
586 permission = self._get_perm(perm)
587
587
588 # check if we have that permission already
588 # check if we have that permission already
589 obj = self.sa.query(UserRepoGroupToPerm)\
589 obj = self.sa.query(UserRepoGroupToPerm)\
590 .filter(UserRepoGroupToPerm.user == user)\
590 .filter(UserRepoGroupToPerm.user == user)\
591 .filter(UserRepoGroupToPerm.group == repo_group)\
591 .filter(UserRepoGroupToPerm.group == repo_group)\
592 .scalar()
592 .scalar()
593 if obj is None:
593 if obj is None:
594 # create new !
594 # create new !
595 obj = UserRepoGroupToPerm()
595 obj = UserRepoGroupToPerm()
596 obj.group = repo_group
596 obj.group = repo_group
597 obj.user = user
597 obj.user = user
598 obj.permission = permission
598 obj.permission = permission
599 self.sa.add(obj)
599 self.sa.add(obj)
600 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
600 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
601 action_logger_generic(
601 action_logger_generic(
602 'granted permission: {} to user: {} on repogroup: {}'.format(
602 'granted permission: {} to user: {} on repogroup: {}'.format(
603 perm, user, repo_group), namespace='security.repogroup')
603 perm, user, repo_group), namespace='security.repogroup')
604 return obj
604 return obj
605
605
606 def revoke_user_permission(self, repo_group, user):
606 def revoke_user_permission(self, repo_group, user):
607 """
607 """
608 Revoke permission for user on given repository group
608 Revoke permission for user on given repository group
609
609
610 :param repo_group: Instance of RepoGroup, repositories_group_id,
610 :param repo_group: Instance of RepoGroup, repositories_group_id,
611 or repositories_group name
611 or repositories_group name
612 :param user: Instance of User, user_id or username
612 :param user: Instance of User, user_id or username
613 """
613 """
614
614
615 repo_group = self._get_repo_group(repo_group)
615 repo_group = self._get_repo_group(repo_group)
616 user = self._get_user(user)
616 user = self._get_user(user)
617
617
618 obj = self.sa.query(UserRepoGroupToPerm)\
618 obj = self.sa.query(UserRepoGroupToPerm)\
619 .filter(UserRepoGroupToPerm.user == user)\
619 .filter(UserRepoGroupToPerm.user == user)\
620 .filter(UserRepoGroupToPerm.group == repo_group)\
620 .filter(UserRepoGroupToPerm.group == repo_group)\
621 .scalar()
621 .scalar()
622 if obj:
622 if obj:
623 self.sa.delete(obj)
623 self.sa.delete(obj)
624 log.debug('Revoked perm on %s on %s', repo_group, user)
624 log.debug('Revoked perm on %s on %s', repo_group, user)
625 action_logger_generic(
625 action_logger_generic(
626 'revoked permission from user: {} on repogroup: {}'.format(
626 'revoked permission from user: {} on repogroup: {}'.format(
627 user, repo_group), namespace='security.repogroup')
627 user, repo_group), namespace='security.repogroup')
628
628
629 def grant_user_group_permission(self, repo_group, group_name, perm):
629 def grant_user_group_permission(self, repo_group, group_name, perm):
630 """
630 """
631 Grant permission for user group on given repository group, or update
631 Grant permission for user group on given repository group, or update
632 existing one if found
632 existing one if found
633
633
634 :param repo_group: Instance of RepoGroup, repositories_group_id,
634 :param repo_group: Instance of RepoGroup, repositories_group_id,
635 or repositories_group name
635 or repositories_group name
636 :param group_name: Instance of UserGroup, users_group_id,
636 :param group_name: Instance of UserGroup, users_group_id,
637 or user group name
637 or user group name
638 :param perm: Instance of Permission, or permission_name
638 :param perm: Instance of Permission, or permission_name
639 """
639 """
640 repo_group = self._get_repo_group(repo_group)
640 repo_group = self._get_repo_group(repo_group)
641 group_name = self._get_user_group(group_name)
641 group_name = self._get_user_group(group_name)
642 permission = self._get_perm(perm)
642 permission = self._get_perm(perm)
643
643
644 # check if we have that permission already
644 # check if we have that permission already
645 obj = self.sa.query(UserGroupRepoGroupToPerm)\
645 obj = self.sa.query(UserGroupRepoGroupToPerm)\
646 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
646 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
647 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
647 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
648 .scalar()
648 .scalar()
649
649
650 if obj is None:
650 if obj is None:
651 # create new
651 # create new
652 obj = UserGroupRepoGroupToPerm()
652 obj = UserGroupRepoGroupToPerm()
653
653
654 obj.group = repo_group
654 obj.group = repo_group
655 obj.users_group = group_name
655 obj.users_group = group_name
656 obj.permission = permission
656 obj.permission = permission
657 self.sa.add(obj)
657 self.sa.add(obj)
658 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
658 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
659 action_logger_generic(
659 action_logger_generic(
660 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
660 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
661 perm, group_name, repo_group), namespace='security.repogroup')
661 perm, group_name, repo_group), namespace='security.repogroup')
662 return obj
662 return obj
663
663
664 def revoke_user_group_permission(self, repo_group, group_name):
664 def revoke_user_group_permission(self, repo_group, group_name):
665 """
665 """
666 Revoke permission for user group on given repository group
666 Revoke permission for user group on given repository group
667
667
668 :param repo_group: Instance of RepoGroup, repositories_group_id,
668 :param repo_group: Instance of RepoGroup, repositories_group_id,
669 or repositories_group name
669 or repositories_group name
670 :param group_name: Instance of UserGroup, users_group_id,
670 :param group_name: Instance of UserGroup, users_group_id,
671 or user group name
671 or user group name
672 """
672 """
673 repo_group = self._get_repo_group(repo_group)
673 repo_group = self._get_repo_group(repo_group)
674 group_name = self._get_user_group(group_name)
674 group_name = self._get_user_group(group_name)
675
675
676 obj = self.sa.query(UserGroupRepoGroupToPerm)\
676 obj = self.sa.query(UserGroupRepoGroupToPerm)\
677 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
677 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
678 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
678 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
679 .scalar()
679 .scalar()
680 if obj:
680 if obj:
681 self.sa.delete(obj)
681 self.sa.delete(obj)
682 log.debug('Revoked perm to %s on %s', repo_group, group_name)
682 log.debug('Revoked perm to %s on %s', repo_group, group_name)
683 action_logger_generic(
683 action_logger_generic(
684 'revoked permission from usergroup: {} on repogroup: {}'.format(
684 'revoked permission from usergroup: {} on repogroup: {}'.format(
685 group_name, repo_group), namespace='security.repogroup')
685 group_name, repo_group), namespace='security.repogroup')
686
686
687 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
687 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
688 super_user_actions=False):
688 super_user_actions=False):
689
689
690 from pyramid.threadlocal import get_current_request
690 from pyramid.threadlocal import get_current_request
691 _render = get_current_request().get_partial_renderer(
691 _render = get_current_request().get_partial_renderer(
692 'rhodecode:templates/data_table/_dt_elements.mako')
692 'rhodecode:templates/data_table/_dt_elements.mako')
693 c = _render.get_call_context()
693 c = _render.get_call_context()
694 h = _render.get_helpers()
694 h = _render.get_helpers()
695
695
696 def quick_menu(repo_group_name):
696 def quick_menu(repo_group_name):
697 return _render('quick_repo_group_menu', repo_group_name)
697 return _render('quick_repo_group_menu', repo_group_name)
698
698
699 def repo_group_lnk(repo_group_name):
699 def repo_group_lnk(repo_group_name):
700 return _render('repo_group_name', repo_group_name)
700 return _render('repo_group_name', repo_group_name)
701
701
702 def last_change(last_change):
702 def last_change(last_change):
703 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
703 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
704 last_change = last_change + datetime.timedelta(seconds=
704 last_change = last_change + datetime.timedelta(seconds=
705 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
705 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
706 return _render("last_change", last_change)
706 return _render("last_change", last_change)
707
707
708 def desc(desc, personal):
708 def desc(desc, personal):
709 return _render(
709 return _render(
710 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
710 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
711
711
712 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
712 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
713 return _render(
713 return _render(
714 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
714 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
715
715
716 def repo_group_name(repo_group_name, children_groups):
716 def repo_group_name(repo_group_name, children_groups):
717 return _render("repo_group_name", repo_group_name, children_groups)
717 return _render("repo_group_name", repo_group_name, children_groups)
718
718
719 def user_profile(username):
719 def user_profile(username):
720 return _render('user_profile', username)
720 return _render('user_profile', username)
721
721
722 repo_group_data = []
722 repo_group_data = []
723 for group in repo_group_list:
723 for group in repo_group_list:
724
724
725 row = {
725 row = {
726 "menu": quick_menu(group.group_name),
726 "menu": quick_menu(group.group_name),
727 "name": repo_group_lnk(group.group_name),
727 "name": repo_group_lnk(group.group_name),
728 "name_raw": group.group_name,
728 "name_raw": group.group_name,
729 "last_change": last_change(group.last_db_change),
729 "last_change": last_change(group.last_db_change),
730 "last_change_raw": datetime_to_time(group.last_db_change),
730 "last_change_raw": datetime_to_time(group.last_db_change),
731 "desc": desc(group.description_safe, group.personal),
731 "desc": desc(group.description_safe, group.personal),
732 "top_level_repos": 0,
732 "top_level_repos": 0,
733 "owner": user_profile(group.user.username)
733 "owner": user_profile(group.user.username)
734 }
734 }
735 if admin:
735 if admin:
736 repo_count = group.repositories.count()
736 repo_count = group.repositories.count()
737 children_groups = map(
737 children_groups = map(
738 h.safe_unicode,
738 h.safe_unicode,
739 itertools.chain((g.name for g in group.parents),
739 itertools.chain((g.name for g in group.parents),
740 (x.name for x in [group])))
740 (x.name for x in [group])))
741 row.update({
741 row.update({
742 "action": repo_group_actions(
742 "action": repo_group_actions(
743 group.group_id, group.group_name, repo_count),
743 group.group_id, group.group_name, repo_count),
744 "top_level_repos": repo_count,
744 "top_level_repos": repo_count,
745 "name": repo_group_name(group.group_name, children_groups),
745 "name": repo_group_name(group.group_name, children_groups),
746
746
747 })
747 })
748 repo_group_data.append(row)
748 repo_group_data.append(row)
749
749
750 return repo_group_data
750 return repo_group_data
751
751
752 def _get_defaults(self, repo_group_name):
752 def _get_defaults(self, repo_group_name):
753 repo_group = RepoGroup.get_by_group_name(repo_group_name)
753 repo_group = RepoGroup.get_by_group_name(repo_group_name)
754
754
755 if repo_group is None:
755 if repo_group is None:
756 return None
756 return None
757
757
758 defaults = repo_group.get_dict()
758 defaults = repo_group.get_dict()
759 defaults['repo_group_name'] = repo_group.name
759 defaults['repo_group_name'] = repo_group.name
760 defaults['repo_group_description'] = repo_group.group_description
760 defaults['repo_group_description'] = repo_group.group_description
761 defaults['repo_group_enable_locking'] = repo_group.enable_locking
761 defaults['repo_group_enable_locking'] = repo_group.enable_locking
762
762
763 # we use -1 as this is how in HTML, we mark an empty group
763 # we use -1 as this is how in HTML, we mark an empty group
764 defaults['repo_group'] = defaults['group_parent_id'] or -1
764 defaults['repo_group'] = defaults['group_parent_id'] or -1
765
765
766 # fill owner
766 # fill owner
767 if repo_group.user:
767 if repo_group.user:
768 defaults.update({'user': repo_group.user.username})
768 defaults.update({'user': repo_group.user.username})
769 else:
769 else:
770 replacement_user = User.get_first_super_admin().username
770 replacement_user = User.get_first_super_admin().username
771 defaults.update({'user': replacement_user})
771 defaults.update({'user': replacement_user})
772
772
773 return defaults
773 return defaults
@@ -1,1288 +1,1288 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 def repo_path_generator():
44 def repo_path_generator():
45 """
45 """
46 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
47 """
47 """
48 i = 0
48 i = 0
49 while True:
49 while True:
50 i += 1
50 i += 1
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52
52
53
53
54 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
55
55
56
56
57 class TestGitRepository:
57 class TestGitRepository:
58
58
59 # pylint: disable=protected-access
59 # pylint: disable=protected-access
60
60
61 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
64 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
65 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
66
66
67 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
68 def prepare(self, request, baseapp):
68 def prepare(self, request, baseapp):
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70
70
71 def get_clone_repo(self):
71 def get_clone_repo(self):
72 """
72 """
73 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
74 """
74 """
75 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
78
78
79 return repo_clone
79 return repo_clone
80
80
81 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
82 """
82 """
83 Return a non bare empty repo.
83 Return a non bare empty repo.
84 """
84 """
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86
86
87 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
88 wrong_repo_path = '/tmp/errorrepo_git'
88 wrong_repo_path = '/tmp/errorrepo_git'
89 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
90 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
91
91
92 def test_repo_clone(self):
92 def test_repo_clone(self):
93 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
94 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
100 for commit in repo.get_commits():
100 for commit in repo.get_commits():
101 raw_id = commit.raw_id
101 raw_id = commit.raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103
103
104 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
105 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
106 GitRepository(
106 GitRepository(
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108
108
109 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
110 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 repo_clone = GitRepository(
112 repo_clone = GitRepository(
113 clone_path,
113 clone_path,
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116
116
117 # check if current workdir was updated
117 # check if current workdir was updated
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
120
120
121 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
122 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 repo_clone = GitRepository(
124 repo_clone = GitRepository(
125 clone_path,
125 clone_path,
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 # Make sure it's not bare repo
130 # Make sure it's not bare repo
131 assert not repo_clone.bare
131 assert not repo_clone.bare
132 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
133
133
134 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
135 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 repo_clone = GitRepository(
137 repo_clone = GitRepository(
138 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
139 assert repo_clone.bare
139 assert repo_clone.bare
140
140
141 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 assert not repo.bare
143 assert not repo.bare
144
144
145 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 assert repo.bare
147 assert repo.bare
148
148
149 def test_update_server_info(self):
149 def test_update_server_info(self):
150 self.repo._update_server_info()
150 self.repo._update_server_info()
151
151
152 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
153 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
154 # by the git backend.
154 # by the git backend.
155 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
156 target_repo = vcsbackend_git.create_repo(bare=True)
156 target_repo = vcsbackend_git.create_repo(bare=True)
157 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
158 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161
161
162 def test_commit_ids(self):
162 def test_commit_ids(self):
163 # there are 112 commits (by now)
163 # there are 112 commits (by now)
164 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 '8430a588b43b5d6da365400117c89400326e7992',
174 '8430a588b43b5d6da365400117c89400326e7992',
175 'd955cd312c17b02143c04fa1099a352b04368118',
175 'd955cd312c17b02143c04fa1099a352b04368118',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 assert subset.issubset(set(self.repo.commit_ids))
190 assert subset.issubset(set(self.repo.commit_ids))
191
191
192 def test_slicing(self):
192 def test_slicing(self):
193 # 4 1 5 10 95
193 # 4 1 5 10 95
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 (10, 20, 10), (5, 100, 95)]:
195 (10, 20, 10), (5, 100, 95)]:
196 commit_ids = list(self.repo[sfrom:sto])
196 commit_ids = list(self.repo[sfrom:sto])
197 assert len(commit_ids) == size
197 assert len(commit_ids) == size
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200
200
201 def test_branches(self):
201 def test_branches(self):
202 # TODO: Need more tests here
202 # TODO: Need more tests here
203 # Removed (those are 'remotes' branches for cloned repo)
203 # Removed (those are 'remotes' branches for cloned repo)
204 # assert 'master' in self.repo.branches
204 # assert 'master' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
207 for __, commit_id in self.repo.branches.items():
207 for __, commit_id in self.repo.branches.items():
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209
209
210 def test_tags(self):
210 def test_tags(self):
211 # TODO: Need more tests here
211 # TODO: Need more tests here
212 assert 'v0.1.1' in self.repo.tags
212 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
214 for __, commit_id in self.repo.tags.items():
214 for __, commit_id in self.repo.tags.items():
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216
216
217 def _test_single_commit_cache(self, commit_id):
217 def _test_single_commit_cache(self, commit_id):
218 commit = self.repo.get_commit(commit_id)
218 commit = self.repo.get_commit(commit_id)
219 assert commit_id in self.repo.commits
219 assert commit_id in self.repo.commits
220 assert commit is self.repo.commits[commit_id]
220 assert commit is self.repo.commits[commit_id]
221
221
222 def test_initial_commit(self):
222 def test_initial_commit(self):
223 commit_id = self.repo.commit_ids[0]
223 commit_id = self.repo.commit_ids[0]
224 init_commit = self.repo.get_commit(commit_id)
224 init_commit = self.repo.get_commit(commit_id)
225 init_author = init_commit.author
225 init_author = init_commit.author
226
226
227 assert init_commit.message == 'initial import\n'
227 assert init_commit.message == 'initial import\n'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == init_commit.committer
229 assert init_author == init_commit.committer
230 for path in ('vcs/__init__.py',
230 for path in ('vcs/__init__.py',
231 'vcs/backends/BaseRepository.py',
231 'vcs/backends/BaseRepository.py',
232 'vcs/backends/__init__.py'):
232 'vcs/backends/__init__.py'):
233 assert isinstance(init_commit.get_node(path), FileNode)
233 assert isinstance(init_commit.get_node(path), FileNode)
234 for path in ('', 'vcs', 'vcs/backends'):
234 for path in ('', 'vcs', 'vcs/backends'):
235 assert isinstance(init_commit.get_node(path), DirNode)
235 assert isinstance(init_commit.get_node(path), DirNode)
236
236
237 with pytest.raises(NodeDoesNotExistError):
237 with pytest.raises(NodeDoesNotExistError):
238 init_commit.get_node(path='foobar')
238 init_commit.get_node(path='foobar')
239
239
240 node = init_commit.get_node('vcs/')
240 node = init_commit.get_node('vcs/')
241 assert hasattr(node, 'kind')
241 assert hasattr(node, 'kind')
242 assert node.kind == NodeKind.DIR
242 assert node.kind == NodeKind.DIR
243
243
244 node = init_commit.get_node('vcs')
244 node = init_commit.get_node('vcs')
245 assert hasattr(node, 'kind')
245 assert hasattr(node, 'kind')
246 assert node.kind == NodeKind.DIR
246 assert node.kind == NodeKind.DIR
247
247
248 node = init_commit.get_node('vcs/__init__.py')
248 node = init_commit.get_node('vcs/__init__.py')
249 assert hasattr(node, 'kind')
249 assert hasattr(node, 'kind')
250 assert node.kind == NodeKind.FILE
250 assert node.kind == NodeKind.FILE
251
251
252 def test_not_existing_commit(self):
252 def test_not_existing_commit(self):
253 with pytest.raises(RepositoryError):
253 with pytest.raises(RepositoryError):
254 self.repo.get_commit('f' * 40)
254 self.repo.get_commit('f' * 40)
255
255
256 def test_commit10(self):
256 def test_commit10(self):
257
257
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 README = """===
259 README = """===
260 VCS
260 VCS
261 ===
261 ===
262
262
263 Various Version Control System management abstraction layer for Python.
263 Various Version Control System management abstraction layer for Python.
264
264
265 Introduction
265 Introduction
266 ------------
266 ------------
267
267
268 TODO: To be written...
268 TODO: To be written...
269
269
270 """
270 """
271 node = commit10.get_node('README.rst')
271 node = commit10.get_node('README.rst')
272 assert node.kind == NodeKind.FILE
272 assert node.kind == NodeKind.FILE
273 assert node.content == README
273 assert node.content == README
274
274
275 def test_head(self):
275 def test_head(self):
276 assert self.repo.head == self.repo.get_commit().raw_id
276 assert self.repo.head == self.repo.get_commit().raw_id
277
277
278 def test_checkout_with_create(self):
278 def test_checkout_with_create(self):
279 repo_clone = self.get_clone_repo()
279 repo_clone = self.get_clone_repo()
280
280
281 new_branch = 'new_branch'
281 new_branch = 'new_branch'
282 assert repo_clone._current_branch() == 'master'
282 assert repo_clone._current_branch() == 'master'
283 assert set(repo_clone.branches) == {'master'}
283 assert set(repo_clone.branches) == {'master'}
284 repo_clone._checkout(new_branch, create=True)
284 repo_clone._checkout(new_branch, create=True)
285
285
286 # Branches is a lazy property so we need to recrete the Repo object.
286 # Branches is a lazy property so we need to recrete the Repo object.
287 repo_clone = GitRepository(repo_clone.path)
287 repo_clone = GitRepository(repo_clone.path)
288 assert set(repo_clone.branches) == {'master', new_branch}
288 assert set(repo_clone.branches) == {'master', new_branch}
289 assert repo_clone._current_branch() == new_branch
289 assert repo_clone._current_branch() == new_branch
290
290
291 def test_checkout(self):
291 def test_checkout(self):
292 repo_clone = self.get_clone_repo()
292 repo_clone = self.get_clone_repo()
293
293
294 repo_clone._checkout('new_branch', create=True)
294 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('master')
295 repo_clone._checkout('master')
296
296
297 assert repo_clone._current_branch() == 'master'
297 assert repo_clone._current_branch() == 'master'
298
298
299 def test_checkout_same_branch(self):
299 def test_checkout_same_branch(self):
300 repo_clone = self.get_clone_repo()
300 repo_clone = self.get_clone_repo()
301
301
302 repo_clone._checkout('master')
302 repo_clone._checkout('master')
303 assert repo_clone._current_branch() == 'master'
303 assert repo_clone._current_branch() == 'master'
304
304
305 def test_checkout_branch_already_exists(self):
305 def test_checkout_branch_already_exists(self):
306 repo_clone = self.get_clone_repo()
306 repo_clone = self.get_clone_repo()
307
307
308 with pytest.raises(RepositoryError):
308 with pytest.raises(RepositoryError):
309 repo_clone._checkout('master', create=True)
309 repo_clone._checkout('master', create=True)
310
310
311 def test_checkout_bare_repo(self):
311 def test_checkout_bare_repo(self):
312 with pytest.raises(RepositoryError):
312 with pytest.raises(RepositoryError):
313 self.repo._checkout('master')
313 self.repo._checkout('master')
314
314
315 def test_current_branch_bare_repo(self):
315 def test_current_branch_bare_repo(self):
316 with pytest.raises(RepositoryError):
316 with pytest.raises(RepositoryError):
317 self.repo._current_branch()
317 self.repo._current_branch()
318
318
319 def test_current_branch_empty_repo(self):
319 def test_current_branch_empty_repo(self):
320 repo = self.get_empty_repo()
320 repo = self.get_empty_repo()
321 assert repo._current_branch() is None
321 assert repo._current_branch() is None
322
322
323 def test_local_clone(self):
323 def test_local_clone(self):
324 clone_path = next(REPO_PATH_GENERATOR)
324 clone_path = next(REPO_PATH_GENERATOR)
325 self.repo._local_clone(clone_path, 'master')
325 self.repo._local_clone(clone_path, 'master')
326 repo_clone = GitRepository(clone_path)
326 repo_clone = GitRepository(clone_path)
327
327
328 assert self.repo.commit_ids == repo_clone.commit_ids
328 assert self.repo.commit_ids == repo_clone.commit_ids
329
329
330 def test_local_clone_with_specific_branch(self):
330 def test_local_clone_with_specific_branch(self):
331 source_repo = self.get_clone_repo()
331 source_repo = self.get_clone_repo()
332
332
333 # Create a new branch in source repo
333 # Create a new branch in source repo
334 new_branch_commit = source_repo.commit_ids[-3]
334 new_branch_commit = source_repo.commit_ids[-3]
335 source_repo._checkout(new_branch_commit)
335 source_repo._checkout(new_branch_commit)
336 source_repo._checkout('new_branch', create=True)
336 source_repo._checkout('new_branch', create=True)
337
337
338 clone_path = next(REPO_PATH_GENERATOR)
338 clone_path = next(REPO_PATH_GENERATOR)
339 source_repo._local_clone(clone_path, 'new_branch')
339 source_repo._local_clone(clone_path, 'new_branch')
340 repo_clone = GitRepository(clone_path)
340 repo_clone = GitRepository(clone_path)
341
341
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343
343
344 clone_path = next(REPO_PATH_GENERATOR)
344 clone_path = next(REPO_PATH_GENERATOR)
345 source_repo._local_clone(clone_path, 'master')
345 source_repo._local_clone(clone_path, 'master')
346 repo_clone = GitRepository(clone_path)
346 repo_clone = GitRepository(clone_path)
347
347
348 assert source_repo.commit_ids == repo_clone.commit_ids
348 assert source_repo.commit_ids == repo_clone.commit_ids
349
349
350 def test_local_clone_fails_if_target_exists(self):
350 def test_local_clone_fails_if_target_exists(self):
351 with pytest.raises(RepositoryError):
351 with pytest.raises(RepositoryError):
352 self.repo._local_clone(self.repo.path, 'master')
352 self.repo._local_clone(self.repo.path, 'master')
353
353
354 def test_local_fetch(self):
354 def test_local_fetch(self):
355 target_repo = self.get_empty_repo()
355 target_repo = self.get_empty_repo()
356 source_repo = self.get_clone_repo()
356 source_repo = self.get_clone_repo()
357
357
358 # Create a new branch in source repo
358 # Create a new branch in source repo
359 master_commit = source_repo.commit_ids[-1]
359 master_commit = source_repo.commit_ids[-1]
360 new_branch_commit = source_repo.commit_ids[-3]
360 new_branch_commit = source_repo.commit_ids[-3]
361 source_repo._checkout(new_branch_commit)
361 source_repo._checkout(new_branch_commit)
362 source_repo._checkout('new_branch', create=True)
362 source_repo._checkout('new_branch', create=True)
363
363
364 target_repo._local_fetch(source_repo.path, 'new_branch')
364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366
366
367 target_repo._local_fetch(source_repo.path, 'master')
367 target_repo._local_fetch(source_repo.path, 'master')
368 assert target_repo._last_fetch_heads() == [master_commit]
368 assert target_repo._last_fetch_heads() == [master_commit]
369
369
370 def test_local_fetch_from_bare_repo(self):
370 def test_local_fetch_from_bare_repo(self):
371 target_repo = self.get_empty_repo()
371 target_repo = self.get_empty_repo()
372 target_repo._local_fetch(self.repo.path, 'master')
372 target_repo._local_fetch(self.repo.path, 'master')
373
373
374 master_commit = self.repo.commit_ids[-1]
374 master_commit = self.repo.commit_ids[-1]
375 assert target_repo._last_fetch_heads() == [master_commit]
375 assert target_repo._last_fetch_heads() == [master_commit]
376
376
377 def test_local_fetch_from_same_repo(self):
377 def test_local_fetch_from_same_repo(self):
378 with pytest.raises(ValueError):
378 with pytest.raises(ValueError):
379 self.repo._local_fetch(self.repo.path, 'master')
379 self.repo._local_fetch(self.repo.path, 'master')
380
380
381 def test_local_fetch_branch_does_not_exist(self):
381 def test_local_fetch_branch_does_not_exist(self):
382 target_repo = self.get_empty_repo()
382 target_repo = self.get_empty_repo()
383
383
384 with pytest.raises(RepositoryError):
384 with pytest.raises(RepositoryError):
385 target_repo._local_fetch(self.repo.path, 'new_branch')
385 target_repo._local_fetch(self.repo.path, 'new_branch')
386
386
387 def test_local_pull(self):
387 def test_local_pull(self):
388 target_repo = self.get_empty_repo()
388 target_repo = self.get_empty_repo()
389 source_repo = self.get_clone_repo()
389 source_repo = self.get_clone_repo()
390
390
391 # Create a new branch in source repo
391 # Create a new branch in source repo
392 master_commit = source_repo.commit_ids[-1]
392 master_commit = source_repo.commit_ids[-1]
393 new_branch_commit = source_repo.commit_ids[-3]
393 new_branch_commit = source_repo.commit_ids[-3]
394 source_repo._checkout(new_branch_commit)
394 source_repo._checkout(new_branch_commit)
395 source_repo._checkout('new_branch', create=True)
395 source_repo._checkout('new_branch', create=True)
396
396
397 target_repo._local_pull(source_repo.path, 'new_branch')
397 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo = GitRepository(target_repo.path)
398 target_repo = GitRepository(target_repo.path)
399 assert target_repo.head == new_branch_commit
399 assert target_repo.head == new_branch_commit
400
400
401 target_repo._local_pull(source_repo.path, 'master')
401 target_repo._local_pull(source_repo.path, 'master')
402 target_repo = GitRepository(target_repo.path)
402 target_repo = GitRepository(target_repo.path)
403 assert target_repo.head == master_commit
403 assert target_repo.head == master_commit
404
404
405 def test_local_pull_in_bare_repo(self):
405 def test_local_pull_in_bare_repo(self):
406 with pytest.raises(RepositoryError):
406 with pytest.raises(RepositoryError):
407 self.repo._local_pull(self.repo.path, 'master')
407 self.repo._local_pull(self.repo.path, 'master')
408
408
409 def test_local_merge(self):
409 def test_local_merge(self):
410 target_repo = self.get_empty_repo()
410 target_repo = self.get_empty_repo()
411 source_repo = self.get_clone_repo()
411 source_repo = self.get_clone_repo()
412
412
413 # Create a new branch in source repo
413 # Create a new branch in source repo
414 master_commit = source_repo.commit_ids[-1]
414 master_commit = source_repo.commit_ids[-1]
415 new_branch_commit = source_repo.commit_ids[-3]
415 new_branch_commit = source_repo.commit_ids[-3]
416 source_repo._checkout(new_branch_commit)
416 source_repo._checkout(new_branch_commit)
417 source_repo._checkout('new_branch', create=True)
417 source_repo._checkout('new_branch', create=True)
418
418
419 # This is required as one cannot do a -ff-only merge in an empty repo.
419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 target_repo._local_pull(source_repo.path, 'new_branch')
420 target_repo._local_pull(source_repo.path, 'new_branch')
421
421
422 target_repo._local_fetch(source_repo.path, 'master')
422 target_repo._local_fetch(source_repo.path, 'master')
423 merge_message = 'Merge message\n\nDescription:...'
423 merge_message = 'Merge message\n\nDescription:...'
424 user_name = 'Albert Einstein'
424 user_name = 'Albert Einstein'
425 user_email = 'albert@einstein.com'
425 user_email = 'albert@einstein.com'
426 target_repo._local_merge(merge_message, user_name, user_email,
426 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._last_fetch_heads())
427 target_repo._last_fetch_heads())
428
428
429 target_repo = GitRepository(target_repo.path)
429 target_repo = GitRepository(target_repo.path)
430 assert target_repo.commit_ids[-2] == master_commit
430 assert target_repo.commit_ids[-2] == master_commit
431 last_commit = target_repo.get_commit(target_repo.head)
431 last_commit = target_repo.get_commit(target_repo.head)
432 assert last_commit.message.strip() == merge_message
432 assert last_commit.message.strip() == merge_message
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434
434
435 assert not os.path.exists(
435 assert not os.path.exists(
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437
437
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441
441
442 target_repo._local_fetch(self.repo.path, 'master')
442 target_repo._local_fetch(self.repo.path, 'master')
443 with pytest.raises(RepositoryError):
443 with pytest.raises(RepositoryError):
444 target_repo._local_merge(
444 target_repo._local_merge(
445 'merge_message', 'user name', 'user@name.com',
445 'merge_message', 'user name', 'user@name.com',
446 target_repo._last_fetch_heads())
446 target_repo._last_fetch_heads())
447
447
448 # Check we are not left in an intermediate merge state
448 # Check we are not left in an intermediate merge state
449 assert not os.path.exists(
449 assert not os.path.exists(
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451
451
452 def test_local_merge_into_empty_repo(self):
452 def test_local_merge_into_empty_repo(self):
453 target_repo = self.get_empty_repo()
453 target_repo = self.get_empty_repo()
454
454
455 # This is required as one cannot do a -ff-only merge in an empty repo.
455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 target_repo._local_fetch(self.repo.path, 'master')
456 target_repo._local_fetch(self.repo.path, 'master')
457 with pytest.raises(RepositoryError):
457 with pytest.raises(RepositoryError):
458 target_repo._local_merge(
458 target_repo._local_merge(
459 'merge_message', 'user name', 'user@name.com',
459 'merge_message', 'user name', 'user@name.com',
460 target_repo._last_fetch_heads())
460 target_repo._last_fetch_heads())
461
461
462 def test_local_merge_in_bare_repo(self):
462 def test_local_merge_in_bare_repo(self):
463 with pytest.raises(RepositoryError):
463 with pytest.raises(RepositoryError):
464 self.repo._local_merge(
464 self.repo._local_merge(
465 'merge_message', 'user name', 'user@name.com', None)
465 'merge_message', 'user name', 'user@name.com', None)
466
466
467 def test_local_push_non_bare(self):
467 def test_local_push_non_bare(self):
468 target_repo = self.get_empty_repo()
468 target_repo = self.get_empty_repo()
469
469
470 pushed_branch = 'pushed_branch'
470 pushed_branch = 'pushed_branch'
471 self.repo._local_push('master', target_repo.path, pushed_branch)
471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # report any branches.
473 # report any branches.
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 f.write('ref: refs/heads/%s' % pushed_branch)
475 f.write('ref: refs/heads/%s' % pushed_branch)
476
476
477 target_repo = GitRepository(target_repo.path)
477 target_repo = GitRepository(target_repo.path)
478
478
479 assert (target_repo.branches[pushed_branch] ==
479 assert (target_repo.branches[pushed_branch] ==
480 self.repo.branches['master'])
480 self.repo.branches['master'])
481
481
482 def test_local_push_bare(self):
482 def test_local_push_bare(self):
483 target_repo = self.get_empty_repo(bare=True)
483 target_repo = self.get_empty_repo(bare=True)
484
484
485 pushed_branch = 'pushed_branch'
485 pushed_branch = 'pushed_branch'
486 self.repo._local_push('master', target_repo.path, pushed_branch)
486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # report any branches.
488 # report any branches.
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 f.write('ref: refs/heads/%s' % pushed_branch)
490 f.write('ref: refs/heads/%s' % pushed_branch)
491
491
492 target_repo = GitRepository(target_repo.path)
492 target_repo = GitRepository(target_repo.path)
493
493
494 assert (target_repo.branches[pushed_branch] ==
494 assert (target_repo.branches[pushed_branch] ==
495 self.repo.branches['master'])
495 self.repo.branches['master'])
496
496
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 target_repo = self.get_clone_repo()
498 target_repo = self.get_clone_repo()
499
499
500 pushed_branch = 'pushed_branch'
500 pushed_branch = 'pushed_branch'
501 # Create a new branch in source repo
501 # Create a new branch in source repo
502 new_branch_commit = target_repo.commit_ids[-3]
502 new_branch_commit = target_repo.commit_ids[-3]
503 target_repo._checkout(new_branch_commit)
503 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(pushed_branch, create=True)
504 target_repo._checkout(pushed_branch, create=True)
505
505
506 self.repo._local_push('master', target_repo.path, pushed_branch)
506 self.repo._local_push('master', target_repo.path, pushed_branch)
507
507
508 target_repo = GitRepository(target_repo.path)
508 target_repo = GitRepository(target_repo.path)
509
509
510 assert (target_repo.branches[pushed_branch] ==
510 assert (target_repo.branches[pushed_branch] ==
511 self.repo.branches['master'])
511 self.repo.branches['master'])
512
512
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 with pytest.raises(RepositoryError):
515 with pytest.raises(RepositoryError):
516 self.repo._local_push('master', target_repo.path, 'master')
516 self.repo._local_push('master', target_repo.path, 'master')
517
517
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 target_repo = self.get_empty_repo(bare=True)
519 target_repo = self.get_empty_repo(bare=True)
520
520
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 self.repo._local_push(
522 self.repo._local_push(
523 'master', target_repo.path, 'master', enable_hooks=True)
523 'master', target_repo.path, 'master', enable_hooks=True)
524 env = run_mock.call_args[1]['extra_env']
524 env = run_mock.call_args[1]['extra_env']
525 assert 'RC_SKIP_HOOKS' not in env
525 assert 'RC_SKIP_HOOKS' not in env
526
526
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 path_components = (
528 path_components = (
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 hook_path = os.path.join(repo_path, *path_components)
530 hook_path = os.path.join(repo_path, *path_components)
531 with open(hook_path, 'w') as f:
531 with open(hook_path, 'w') as f:
532 script_lines = [
532 script_lines = [
533 '#!%s' % sys.executable,
533 '#!%s' % sys.executable,
534 'import os',
534 'import os',
535 'import sys',
535 'import sys',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 ' sys.exit(0)',
537 ' sys.exit(0)',
538 'sys.exit(1)',
538 'sys.exit(1)',
539 ]
539 ]
540 f.write('\n'.join(script_lines))
540 f.write('\n'.join(script_lines))
541 os.chmod(hook_path, 0755)
541 os.chmod(hook_path, 0o755)
542
542
543 def test_local_push_does_not_execute_hook(self):
543 def test_local_push_does_not_execute_hook(self):
544 target_repo = self.get_empty_repo()
544 target_repo = self.get_empty_repo()
545
545
546 pushed_branch = 'pushed_branch'
546 pushed_branch = 'pushed_branch'
547 self._add_failing_hook(target_repo.path, 'pre-receive')
547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self.repo._local_push('master', target_repo.path, pushed_branch)
548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # report any branches.
550 # report any branches.
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 f.write('ref: refs/heads/%s' % pushed_branch)
552 f.write('ref: refs/heads/%s' % pushed_branch)
553
553
554 target_repo = GitRepository(target_repo.path)
554 target_repo = GitRepository(target_repo.path)
555
555
556 assert (target_repo.branches[pushed_branch] ==
556 assert (target_repo.branches[pushed_branch] ==
557 self.repo.branches['master'])
557 self.repo.branches['master'])
558
558
559 def test_local_push_executes_hook(self):
559 def test_local_push_executes_hook(self):
560 target_repo = self.get_empty_repo(bare=True)
560 target_repo = self.get_empty_repo(bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 with pytest.raises(RepositoryError):
562 with pytest.raises(RepositoryError):
563 self.repo._local_push(
563 self.repo._local_push(
564 'master', target_repo.path, 'master', enable_hooks=True)
564 'master', target_repo.path, 'master', enable_hooks=True)
565
565
566 def test_maybe_prepare_merge_workspace(self):
566 def test_maybe_prepare_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 2, 'pr2', Reference('branch', 'master', 'unused'),
568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 Reference('branch', 'master', 'unused'))
569 Reference('branch', 'master', 'unused'))
570
570
571 assert os.path.isdir(workspace)
571 assert os.path.isdir(workspace)
572 workspace_repo = GitRepository(workspace)
572 workspace_repo = GitRepository(workspace)
573 assert workspace_repo.branches == self.repo.branches
573 assert workspace_repo.branches == self.repo.branches
574
574
575 # Calling it a second time should also succeed
575 # Calling it a second time should also succeed
576 workspace = self.repo._maybe_prepare_merge_workspace(
576 workspace = self.repo._maybe_prepare_merge_workspace(
577 2, 'pr2', Reference('branch', 'master', 'unused'),
577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 Reference('branch', 'master', 'unused'))
578 Reference('branch', 'master', 'unused'))
579 assert os.path.isdir(workspace)
579 assert os.path.isdir(workspace)
580
580
581 def test_maybe_prepare_merge_workspace_different_refs(self):
581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 workspace = self.repo._maybe_prepare_merge_workspace(
582 workspace = self.repo._maybe_prepare_merge_workspace(
583 2, 'pr2', Reference('branch', 'master', 'unused'),
583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 Reference('branch', 'develop', 'unused'))
584 Reference('branch', 'develop', 'unused'))
585
585
586 assert os.path.isdir(workspace)
586 assert os.path.isdir(workspace)
587 workspace_repo = GitRepository(workspace)
587 workspace_repo = GitRepository(workspace)
588 assert workspace_repo.branches == self.repo.branches
588 assert workspace_repo.branches == self.repo.branches
589
589
590 # Calling it a second time should also succeed
590 # Calling it a second time should also succeed
591 workspace = self.repo._maybe_prepare_merge_workspace(
591 workspace = self.repo._maybe_prepare_merge_workspace(
592 2, 'pr2', Reference('branch', 'master', 'unused'),
592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 Reference('branch', 'develop', 'unused'))
593 Reference('branch', 'develop', 'unused'))
594 assert os.path.isdir(workspace)
594 assert os.path.isdir(workspace)
595
595
596 def test_cleanup_merge_workspace(self):
596 def test_cleanup_merge_workspace(self):
597 workspace = self.repo._maybe_prepare_merge_workspace(
597 workspace = self.repo._maybe_prepare_merge_workspace(
598 2, 'pr3', Reference('branch', 'master', 'unused'),
598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 Reference('branch', 'master', 'unused'))
599 Reference('branch', 'master', 'unused'))
600 self.repo.cleanup_merge_workspace(2, 'pr3')
600 self.repo.cleanup_merge_workspace(2, 'pr3')
601
601
602 assert not os.path.exists(workspace)
602 assert not os.path.exists(workspace)
603
603
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 # No assert: because in case of an inexistent workspace this function
605 # No assert: because in case of an inexistent workspace this function
606 # should still succeed.
606 # should still succeed.
607 self.repo.cleanup_merge_workspace(1, 'pr4')
607 self.repo.cleanup_merge_workspace(1, 'pr4')
608
608
609 def test_set_refs(self):
609 def test_set_refs(self):
610 test_ref = 'refs/test-refs/abcde'
610 test_ref = 'refs/test-refs/abcde'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612
612
613 self.repo.set_refs(test_ref, test_commit_id)
613 self.repo.set_refs(test_ref, test_commit_id)
614 stdout, _ = self.repo.run_git_command(['show-ref'])
614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 assert test_ref in stdout
615 assert test_ref in stdout
616 assert test_commit_id in stdout
616 assert test_commit_id in stdout
617
617
618 def test_remove_ref(self):
618 def test_remove_ref(self):
619 test_ref = 'refs/test-refs/abcde'
619 test_ref = 'refs/test-refs/abcde'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 self.repo.set_refs(test_ref, test_commit_id)
621 self.repo.set_refs(test_ref, test_commit_id)
622 stdout, _ = self.repo.run_git_command(['show-ref'])
622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 assert test_ref in stdout
623 assert test_ref in stdout
624 assert test_commit_id in stdout
624 assert test_commit_id in stdout
625
625
626 self.repo.remove_ref(test_ref)
626 self.repo.remove_ref(test_ref)
627 stdout, _ = self.repo.run_git_command(['show-ref'])
627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 assert test_ref not in stdout
628 assert test_ref not in stdout
629 assert test_commit_id not in stdout
629 assert test_commit_id not in stdout
630
630
631
631
632 class TestGitCommit(object):
632 class TestGitCommit(object):
633
633
634 @pytest.fixture(autouse=True)
634 @pytest.fixture(autouse=True)
635 def prepare(self):
635 def prepare(self):
636 self.repo = GitRepository(TEST_GIT_REPO)
636 self.repo = GitRepository(TEST_GIT_REPO)
637
637
638 def test_default_commit(self):
638 def test_default_commit(self):
639 tip = self.repo.get_commit()
639 tip = self.repo.get_commit()
640 assert tip == self.repo.get_commit(None)
640 assert tip == self.repo.get_commit(None)
641 assert tip == self.repo.get_commit('tip')
641 assert tip == self.repo.get_commit('tip')
642
642
643 def test_root_node(self):
643 def test_root_node(self):
644 tip = self.repo.get_commit()
644 tip = self.repo.get_commit()
645 assert tip.root is tip.get_node('')
645 assert tip.root is tip.get_node('')
646
646
647 def test_lazy_fetch(self):
647 def test_lazy_fetch(self):
648 """
648 """
649 Test if commit's nodes expands and are cached as we walk through
649 Test if commit's nodes expands and are cached as we walk through
650 the commit. This test is somewhat hard to write as order of tests
650 the commit. This test is somewhat hard to write as order of tests
651 is a key here. Written by running command after command in a shell.
651 is a key here. Written by running command after command in a shell.
652 """
652 """
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 assert commit_id in self.repo.commit_ids
654 assert commit_id in self.repo.commit_ids
655 commit = self.repo.get_commit(commit_id)
655 commit = self.repo.get_commit(commit_id)
656 assert len(commit.nodes) == 0
656 assert len(commit.nodes) == 0
657 root = commit.root
657 root = commit.root
658 assert len(commit.nodes) == 1
658 assert len(commit.nodes) == 1
659 assert len(root.nodes) == 8
659 assert len(root.nodes) == 8
660 # accessing root.nodes updates commit.nodes
660 # accessing root.nodes updates commit.nodes
661 assert len(commit.nodes) == 9
661 assert len(commit.nodes) == 9
662
662
663 docs = root.get_node('docs')
663 docs = root.get_node('docs')
664 # we haven't yet accessed anything new as docs dir was already cached
664 # we haven't yet accessed anything new as docs dir was already cached
665 assert len(commit.nodes) == 9
665 assert len(commit.nodes) == 9
666 assert len(docs.nodes) == 8
666 assert len(docs.nodes) == 8
667 # accessing docs.nodes updates commit.nodes
667 # accessing docs.nodes updates commit.nodes
668 assert len(commit.nodes) == 17
668 assert len(commit.nodes) == 17
669
669
670 assert docs is commit.get_node('docs')
670 assert docs is commit.get_node('docs')
671 assert docs is root.nodes[0]
671 assert docs is root.nodes[0]
672 assert docs is root.dirs[0]
672 assert docs is root.dirs[0]
673 assert docs is commit.get_node('docs')
673 assert docs is commit.get_node('docs')
674
674
675 def test_nodes_with_commit(self):
675 def test_nodes_with_commit(self):
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 commit = self.repo.get_commit(commit_id)
677 commit = self.repo.get_commit(commit_id)
678 root = commit.root
678 root = commit.root
679 docs = root.get_node('docs')
679 docs = root.get_node('docs')
680 assert docs is commit.get_node('docs')
680 assert docs is commit.get_node('docs')
681 api = docs.get_node('api')
681 api = docs.get_node('api')
682 assert api is commit.get_node('docs/api')
682 assert api is commit.get_node('docs/api')
683 index = api.get_node('index.rst')
683 index = api.get_node('index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
685 assert index is commit.get_node('docs')\
685 assert index is commit.get_node('docs')\
686 .get_node('api')\
686 .get_node('api')\
687 .get_node('index.rst')
687 .get_node('index.rst')
688
688
689 def test_branch_and_tags(self):
689 def test_branch_and_tags(self):
690 """
690 """
691 rev0 = self.repo.commit_ids[0]
691 rev0 = self.repo.commit_ids[0]
692 commit0 = self.repo.get_commit(rev0)
692 commit0 = self.repo.get_commit(rev0)
693 assert commit0.branch == 'master'
693 assert commit0.branch == 'master'
694 assert commit0.tags == []
694 assert commit0.tags == []
695
695
696 rev10 = self.repo.commit_ids[10]
696 rev10 = self.repo.commit_ids[10]
697 commit10 = self.repo.get_commit(rev10)
697 commit10 = self.repo.get_commit(rev10)
698 assert commit10.branch == 'master'
698 assert commit10.branch == 'master'
699 assert commit10.tags == []
699 assert commit10.tags == []
700
700
701 rev44 = self.repo.commit_ids[44]
701 rev44 = self.repo.commit_ids[44]
702 commit44 = self.repo.get_commit(rev44)
702 commit44 = self.repo.get_commit(rev44)
703 assert commit44.branch == 'web-branch'
703 assert commit44.branch == 'web-branch'
704
704
705 tip = self.repo.get_commit('tip')
705 tip = self.repo.get_commit('tip')
706 assert 'tip' in tip.tags
706 assert 'tip' in tip.tags
707 """
707 """
708 # Those tests would fail - branches are now going
708 # Those tests would fail - branches are now going
709 # to be changed at main API in order to support git backend
709 # to be changed at main API in order to support git backend
710 pass
710 pass
711
711
712 def test_file_size(self):
712 def test_file_size(self):
713 to_check = (
713 to_check = (
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 'vcs/backends/BaseRepository.py', 502),
715 'vcs/backends/BaseRepository.py', 502),
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 'vcs/backends/hg.py', 854),
717 'vcs/backends/hg.py', 854),
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 'setup.py', 1068),
719 'setup.py', 1068),
720
720
721 ('d955cd312c17b02143c04fa1099a352b04368118',
721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 'vcs/backends/base.py', 2921),
722 'vcs/backends/base.py', 2921),
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 'vcs/backends/base.py', 3936),
724 'vcs/backends/base.py', 3936),
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 'vcs/backends/base.py', 6189),
726 'vcs/backends/base.py', 6189),
727 )
727 )
728 for commit_id, path, size in to_check:
728 for commit_id, path, size in to_check:
729 node = self.repo.get_commit(commit_id).get_node(path)
729 node = self.repo.get_commit(commit_id).get_node(path)
730 assert node.is_file()
730 assert node.is_file()
731 assert node.size == size
731 assert node.size == size
732
732
733 def test_file_history_from_commits(self):
733 def test_file_history_from_commits(self):
734 node = self.repo[10].get_node('setup.py')
734 node = self.repo[10].get_node('setup.py')
735 commit_ids = [commit.raw_id for commit in node.history]
735 commit_ids = [commit.raw_id for commit in node.history]
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737
737
738 node = self.repo[20].get_node('setup.py')
738 node = self.repo[20].get_node('setup.py')
739 node_ids = [commit.raw_id for commit in node.history]
739 node_ids = [commit.raw_id for commit in node.history]
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742
742
743 # special case we check history from commit that has this particular
743 # special case we check history from commit that has this particular
744 # file changed this means we check if it's included as well
744 # file changed this means we check if it's included as well
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 .get_node('setup.py')
746 .get_node('setup.py')
747 node_ids = [commit.raw_id for commit in node.history]
747 node_ids = [commit.raw_id for commit in node.history]
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750
750
751 def test_file_history(self):
751 def test_file_history(self):
752 # we can only check if those commits are present in the history
752 # we can only check if those commits are present in the history
753 # as we cannot update this test every time file is changed
753 # as we cannot update this test every time file is changed
754 files = {
754 files = {
755 'setup.py': [
755 'setup.py': [
756 '54386793436c938cff89326944d4c2702340037d',
756 '54386793436c938cff89326944d4c2702340037d',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 ],
765 ],
766 'vcs/nodes.py': [
766 'vcs/nodes.py': [
767 '33fa3223355104431402a888fa77a4e9956feb3e',
767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 '4313566d2e417cb382948f8d9d7c765330356054',
772 '4313566d2e417cb382948f8d9d7c765330356054',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 '54386793436c938cff89326944d4c2702340037d',
774 '54386793436c938cff89326944d4c2702340037d',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 ],
798 ],
799 'vcs/backends/git.py': [
799 'vcs/backends/git.py': [
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 '9a751d84d8e9408e736329767387f41b36935153',
801 '9a751d84d8e9408e736329767387f41b36935153',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 ],
808 ],
809 }
809 }
810 for path, commit_ids in files.items():
810 for path, commit_ids in files.items():
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 node_ids = [commit.raw_id for commit in node.history]
812 node_ids = [commit.raw_id for commit in node.history]
813 assert set(commit_ids).issubset(set(node_ids)), (
813 assert set(commit_ids).issubset(set(node_ids)), (
814 "We assumed that %s is subset of commit_ids for which file %s "
814 "We assumed that %s is subset of commit_ids for which file %s "
815 "has been changed, and history of that node returned: %s"
815 "has been changed, and history of that node returned: %s"
816 % (commit_ids, path, node_ids))
816 % (commit_ids, path, node_ids))
817
817
818 def test_file_annotate(self):
818 def test_file_annotate(self):
819 files = {
819 files = {
820 'vcs/backends/__init__.py': {
820 'vcs/backends/__init__.py': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 'lines_no': 1,
822 'lines_no': 1,
823 'commits': [
823 'commits': [
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 ],
825 ],
826 },
826 },
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 'lines_no': 21,
828 'lines_no': 21,
829 'commits': [
829 'commits': [
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 ],
851 ],
852 },
852 },
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 'lines_no': 32,
854 'lines_no': 32,
855 'commits': [
855 'commits': [
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 ],
888 ],
889 },
889 },
890 },
890 },
891 }
891 }
892
892
893 for fname, commit_dict in files.items():
893 for fname, commit_dict in files.items():
894 for commit_id, __ in commit_dict.items():
894 for commit_id, __ in commit_dict.items():
895 commit = self.repo.get_commit(commit_id)
895 commit = self.repo.get_commit(commit_id)
896
896
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 assert l1_1 == l1_2
899 assert l1_1 == l1_2
900 l1 = l1_1
900 l1 = l1_1
901 l2 = files[fname][commit_id]['commits']
901 l2 = files[fname][commit_id]['commits']
902 assert l1 == l2, (
902 assert l1 == l2, (
903 "The lists of commit_ids for %s@commit_id %s"
903 "The lists of commit_ids for %s@commit_id %s"
904 "from annotation list should match each other, "
904 "from annotation list should match each other, "
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906
906
907 def test_files_state(self):
907 def test_files_state(self):
908 """
908 """
909 Tests state of FileNodes.
909 Tests state of FileNodes.
910 """
910 """
911 node = self.repo\
911 node = self.repo\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 .get_node('vcs/utils/diffs.py')
913 .get_node('vcs/utils/diffs.py')
914 assert node.state, NodeState.ADDED
914 assert node.state, NodeState.ADDED
915 assert node.added
915 assert node.added
916 assert not node.changed
916 assert not node.changed
917 assert not node.not_changed
917 assert not node.not_changed
918 assert not node.removed
918 assert not node.removed
919
919
920 node = self.repo\
920 node = self.repo\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 .get_node('.hgignore')
922 .get_node('.hgignore')
923 assert node.state, NodeState.CHANGED
923 assert node.state, NodeState.CHANGED
924 assert not node.added
924 assert not node.added
925 assert node.changed
925 assert node.changed
926 assert not node.not_changed
926 assert not node.not_changed
927 assert not node.removed
927 assert not node.removed
928
928
929 node = self.repo\
929 node = self.repo\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 .get_node('setup.py')
931 .get_node('setup.py')
932 assert node.state, NodeState.NOT_CHANGED
932 assert node.state, NodeState.NOT_CHANGED
933 assert not node.added
933 assert not node.added
934 assert not node.changed
934 assert not node.changed
935 assert node.not_changed
935 assert node.not_changed
936 assert not node.removed
936 assert not node.removed
937
937
938 # If node has REMOVED state then trying to fetch it would raise
938 # If node has REMOVED state then trying to fetch it would raise
939 # CommitError exception
939 # CommitError exception
940 commit = self.repo.get_commit(
940 commit = self.repo.get_commit(
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 path = 'vcs/backends/BaseRepository.py'
942 path = 'vcs/backends/BaseRepository.py'
943 with pytest.raises(NodeDoesNotExistError):
943 with pytest.raises(NodeDoesNotExistError):
944 commit.get_node(path)
944 commit.get_node(path)
945 # but it would be one of ``removed`` (commit's attribute)
945 # but it would be one of ``removed`` (commit's attribute)
946 assert path in [rf.path for rf in commit.removed]
946 assert path in [rf.path for rf in commit.removed]
947
947
948 commit = self.repo.get_commit(
948 commit = self.repo.get_commit(
949 '54386793436c938cff89326944d4c2702340037d')
949 '54386793436c938cff89326944d4c2702340037d')
950 changed = [
950 changed = [
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 'vcs/nodes.py']
952 'vcs/nodes.py']
953 assert set(changed) == set([f.path for f in commit.changed])
953 assert set(changed) == set([f.path for f in commit.changed])
954
954
955 def test_unicode_branch_refs(self):
955 def test_unicode_branch_refs(self):
956 unicode_branches = {
956 unicode_branches = {
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 }
959 }
960 with mock.patch(
960 with mock.patch(
961 ("rhodecode.lib.vcs.backends.git.repository"
961 ("rhodecode.lib.vcs.backends.git.repository"
962 ".GitRepository._refs"),
962 ".GitRepository._refs"),
963 unicode_branches):
963 unicode_branches):
964 branches = self.repo.branches
964 branches = self.repo.branches
965
965
966 assert 'unicode' in branches
966 assert 'unicode' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968
968
969 def test_unicode_tag_refs(self):
969 def test_unicode_tag_refs(self):
970 unicode_tags = {
970 unicode_tags = {
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 }
973 }
974 with mock.patch(
974 with mock.patch(
975 ("rhodecode.lib.vcs.backends.git.repository"
975 ("rhodecode.lib.vcs.backends.git.repository"
976 ".GitRepository._refs"),
976 ".GitRepository._refs"),
977 unicode_tags):
977 unicode_tags):
978 tags = self.repo.tags
978 tags = self.repo.tags
979
979
980 assert 'unicode' in tags
980 assert 'unicode' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982
982
983 def test_commit_message_is_unicode(self):
983 def test_commit_message_is_unicode(self):
984 for commit in self.repo:
984 for commit in self.repo:
985 assert type(commit.message) == unicode
985 assert type(commit.message) == unicode
986
986
987 def test_commit_author_is_unicode(self):
987 def test_commit_author_is_unicode(self):
988 for commit in self.repo:
988 for commit in self.repo:
989 assert type(commit.author) == unicode
989 assert type(commit.author) == unicode
990
990
991 def test_repo_files_content_is_unicode(self):
991 def test_repo_files_content_is_unicode(self):
992 commit = self.repo.get_commit()
992 commit = self.repo.get_commit()
993 for node in commit.get_node('/'):
993 for node in commit.get_node('/'):
994 if node.is_file():
994 if node.is_file():
995 assert type(node.content) == unicode
995 assert type(node.content) == unicode
996
996
997 def test_wrong_path(self):
997 def test_wrong_path(self):
998 # There is 'setup.py' in the root dir but not there:
998 # There is 'setup.py' in the root dir but not there:
999 path = 'foo/bar/setup.py'
999 path = 'foo/bar/setup.py'
1000 tip = self.repo.get_commit()
1000 tip = self.repo.get_commit()
1001 with pytest.raises(VCSError):
1001 with pytest.raises(VCSError):
1002 tip.get_node(path)
1002 tip.get_node(path)
1003
1003
1004 @pytest.mark.parametrize("author_email, commit_id", [
1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 ('lukasz.balcerzak@python-center.pl',
1006 ('lukasz.balcerzak@python-center.pl',
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 ])
1009 ])
1010 def test_author_email(self, author_email, commit_id):
1010 def test_author_email(self, author_email, commit_id):
1011 commit = self.repo.get_commit(commit_id)
1011 commit = self.repo.get_commit(commit_id)
1012 assert author_email == commit.author_email
1012 assert author_email == commit.author_email
1013
1013
1014 @pytest.mark.parametrize("author, commit_id", [
1014 @pytest.mark.parametrize("author, commit_id", [
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 ])
1018 ])
1019 def test_author_username(self, author, commit_id):
1019 def test_author_username(self, author, commit_id):
1020 commit = self.repo.get_commit(commit_id)
1020 commit = self.repo.get_commit(commit_id)
1021 assert author == commit.author_name
1021 assert author == commit.author_name
1022
1022
1023
1023
1024 class TestLargeFileRepo(object):
1024 class TestLargeFileRepo(object):
1025
1025
1026 def test_large_file(self, backend_git):
1026 def test_large_file(self, backend_git):
1027 conf = make_db_config()
1027 conf = make_db_config()
1028 repo = backend_git.create_test_repo('largefiles', conf)
1028 repo = backend_git.create_test_repo('largefiles', conf)
1029
1029
1030 tip = repo.scm_instance().get_commit()
1030 tip = repo.scm_instance().get_commit()
1031
1031
1032 # extract stored LF node into the origin cache
1032 # extract stored LF node into the origin cache
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034
1034
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 oid_path = os.path.join(lfs_store, oid)
1036 oid_path = os.path.join(lfs_store, oid)
1037 oid_destination = os.path.join(
1037 oid_destination = os.path.join(
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 shutil.copy(oid_path, oid_destination)
1039 shutil.copy(oid_path, oid_destination)
1040
1040
1041 node = tip.get_node('1MB.zip')
1041 node = tip.get_node('1MB.zip')
1042
1042
1043 lf_node = node.get_largefile_node()
1043 lf_node = node.get_largefile_node()
1044
1044
1045 assert lf_node.is_largefile() is True
1045 assert lf_node.is_largefile() is True
1046 assert lf_node.size == 1024000
1046 assert lf_node.size == 1024000
1047 assert lf_node.name == '1MB.zip'
1047 assert lf_node.name == '1MB.zip'
1048
1048
1049
1049
1050 @pytest.mark.usefixtures("vcs_repository_support")
1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052
1052
1053 @classmethod
1053 @classmethod
1054 def _get_commits(cls):
1054 def _get_commits(cls):
1055 return [
1055 return [
1056 {
1056 {
1057 'message': 'Initial',
1057 'message': 'Initial',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 'date': datetime.datetime(2010, 1, 1, 20),
1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 'added': [
1060 'added': [
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 FileNode(
1062 FileNode(
1063 'foobar/static/admin', content='admin',
1063 'foobar/static/admin', content='admin',
1064 mode=0120000), # this is a link
1064 mode=0o120000), # this is a link
1065 FileNode('foo', content='foo'),
1065 FileNode('foo', content='foo'),
1066 ],
1066 ],
1067 },
1067 },
1068 {
1068 {
1069 'message': 'Second',
1069 'message': 'Second',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 'date': datetime.datetime(2010, 1, 1, 22),
1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 'added': [
1072 'added': [
1073 FileNode('foo2', content='foo2'),
1073 FileNode('foo2', content='foo2'),
1074 ],
1074 ],
1075 },
1075 },
1076 ]
1076 ]
1077
1077
1078 def test_paths_slow_traversing(self):
1078 def test_paths_slow_traversing(self):
1079 commit = self.repo.get_commit()
1079 commit = self.repo.get_commit()
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 .get_node('admin').get_node('base.js').content == 'base'
1081 .get_node('admin').get_node('base.js').content == 'base'
1082
1082
1083 def test_paths_fast_traversing(self):
1083 def test_paths_fast_traversing(self):
1084 commit = self.repo.get_commit()
1084 commit = self.repo.get_commit()
1085 assert (
1085 assert (
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 'base')
1087 'base')
1088
1088
1089 def test_get_diff_runs_git_command_with_hashes(self):
1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1092 self.repo.run_git_command.assert_called_once_with(
1092 self.repo.run_git_command.assert_called_once_with(
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 '--abbrev=40', self.repo._get_commit_id(0),
1094 '--abbrev=40', self.repo._get_commit_id(0),
1095 self.repo._get_commit_id(1)])
1095 self.repo._get_commit_id(1)])
1096
1096
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 self.repo.run_git_command.assert_called_once_with(
1100 self.repo.run_git_command.assert_called_once_with(
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1103
1103
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 self.repo.run_git_command.assert_called_once_with(
1107 self.repo.run_git_command.assert_called_once_with(
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._get_commit_id(0),
1109 '--abbrev=40', self.repo._get_commit_id(0),
1110 self.repo._get_commit_id(1), '--', 'foo'])
1110 self.repo._get_commit_id(1), '--', 'foo'])
1111
1111
1112
1112
1113 @pytest.mark.usefixtures("vcs_repository_support")
1113 @pytest.mark.usefixtures("vcs_repository_support")
1114 class TestGitRegression(BackendTestMixin):
1114 class TestGitRegression(BackendTestMixin):
1115
1115
1116 @classmethod
1116 @classmethod
1117 def _get_commits(cls):
1117 def _get_commits(cls):
1118 return [
1118 return [
1119 {
1119 {
1120 'message': 'Initial',
1120 'message': 'Initial',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1122 'date': datetime.datetime(2010, 1, 1, 20),
1122 'date': datetime.datetime(2010, 1, 1, 20),
1123 'added': [
1123 'added': [
1124 FileNode('bot/__init__.py', content='base'),
1124 FileNode('bot/__init__.py', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1127 ],
1127 ],
1128 },
1128 },
1129 {
1129 {
1130 'message': 'Second',
1130 'message': 'Second',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1132 'date': datetime.datetime(2010, 1, 1, 22),
1132 'date': datetime.datetime(2010, 1, 1, 22),
1133 'added': [
1133 'added': [
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 FileNode(
1136 FileNode(
1137 'bot/build/static/templates/f.html', content='foo2'),
1137 'bot/build/static/templates/f.html', content='foo2'),
1138 FileNode(
1138 FileNode(
1139 'bot/build/static/templates/f1.html', content='foo2'),
1139 'bot/build/static/templates/f1.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 ],
1142 ],
1143 },
1143 },
1144 ]
1144 ]
1145
1145
1146 @pytest.mark.parametrize("path, expected_paths", [
1146 @pytest.mark.parametrize("path, expected_paths", [
1147 ('bot', [
1147 ('bot', [
1148 'bot/build',
1148 'bot/build',
1149 'bot/templates',
1149 'bot/templates',
1150 'bot/__init__.py']),
1150 'bot/__init__.py']),
1151 ('bot/build', [
1151 ('bot/build', [
1152 'bot/build/migrations',
1152 'bot/build/migrations',
1153 'bot/build/static',
1153 'bot/build/static',
1154 'bot/build/templates']),
1154 'bot/build/templates']),
1155 ('bot/build/static', [
1155 ('bot/build/static', [
1156 'bot/build/static/templates']),
1156 'bot/build/static/templates']),
1157 ('bot/build/static/templates', [
1157 ('bot/build/static/templates', [
1158 'bot/build/static/templates/f.html',
1158 'bot/build/static/templates/f.html',
1159 'bot/build/static/templates/f1.html']),
1159 'bot/build/static/templates/f1.html']),
1160 ('bot/build/templates', [
1160 ('bot/build/templates', [
1161 'bot/build/templates/err.html',
1161 'bot/build/templates/err.html',
1162 'bot/build/templates/err2.html']),
1162 'bot/build/templates/err2.html']),
1163 ('bot/templates/', [
1163 ('bot/templates/', [
1164 'bot/templates/404.html',
1164 'bot/templates/404.html',
1165 'bot/templates/500.html']),
1165 'bot/templates/500.html']),
1166 ])
1166 ])
1167 def test_similar_paths(self, path, expected_paths):
1167 def test_similar_paths(self, path, expected_paths):
1168 commit = self.repo.get_commit()
1168 commit = self.repo.get_commit()
1169 paths = [n.path for n in commit.get_nodes(path)]
1169 paths = [n.path for n in commit.get_nodes(path)]
1170 assert paths == expected_paths
1170 assert paths == expected_paths
1171
1171
1172
1172
1173 class TestDiscoverGitVersion(object):
1173 class TestDiscoverGitVersion(object):
1174
1174
1175 def test_returns_git_version(self, baseapp):
1175 def test_returns_git_version(self, baseapp):
1176 version = discover_git_version()
1176 version = discover_git_version()
1177 assert version
1177 assert version
1178
1178
1179 def test_returns_empty_string_without_vcsserver(self):
1179 def test_returns_empty_string_without_vcsserver(self):
1180 mock_connection = mock.Mock()
1180 mock_connection = mock.Mock()
1181 mock_connection.discover_git_version = mock.Mock(
1181 mock_connection.discover_git_version = mock.Mock(
1182 side_effect=Exception)
1182 side_effect=Exception)
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 version = discover_git_version()
1184 version = discover_git_version()
1185 assert version == ''
1185 assert version == ''
1186
1186
1187
1187
1188 class TestGetSubmoduleUrl(object):
1188 class TestGetSubmoduleUrl(object):
1189 def test_submodules_file_found(self):
1189 def test_submodules_file_found(self):
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 node = mock.Mock()
1191 node = mock.Mock()
1192 with mock.patch.object(
1192 with mock.patch.object(
1193 commit, 'get_node', return_value=node) as get_node_mock:
1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 node.content = (
1194 node.content = (
1195 '[submodule "subrepo1"]\n'
1195 '[submodule "subrepo1"]\n'
1196 '\tpath = subrepo1\n'
1196 '\tpath = subrepo1\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 )
1198 )
1199 result = commit._get_submodule_url('subrepo1')
1199 result = commit._get_submodule_url('subrepo1')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 assert result == 'https://code.rhodecode.com/dulwich'
1201 assert result == 'https://code.rhodecode.com/dulwich'
1202
1202
1203 def test_complex_submodule_path(self):
1203 def test_complex_submodule_path(self):
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 node = mock.Mock()
1205 node = mock.Mock()
1206 with mock.patch.object(
1206 with mock.patch.object(
1207 commit, 'get_node', return_value=node) as get_node_mock:
1207 commit, 'get_node', return_value=node) as get_node_mock:
1208 node.content = (
1208 node.content = (
1209 '[submodule "complex/subrepo/path"]\n'
1209 '[submodule "complex/subrepo/path"]\n'
1210 '\tpath = complex/subrepo/path\n'
1210 '\tpath = complex/subrepo/path\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1212 )
1212 )
1213 result = commit._get_submodule_url('complex/subrepo/path')
1213 result = commit._get_submodule_url('complex/subrepo/path')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1215 assert result == 'https://code.rhodecode.com/dulwich'
1215 assert result == 'https://code.rhodecode.com/dulwich'
1216
1216
1217 def test_submodules_file_not_found(self):
1217 def test_submodules_file_not_found(self):
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 with mock.patch.object(
1219 with mock.patch.object(
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 result = commit._get_submodule_url('complex/subrepo/path')
1221 result = commit._get_submodule_url('complex/subrepo/path')
1222 assert result is None
1222 assert result is None
1223
1223
1224 def test_path_not_found(self):
1224 def test_path_not_found(self):
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 node = mock.Mock()
1226 node = mock.Mock()
1227 with mock.patch.object(
1227 with mock.patch.object(
1228 commit, 'get_node', return_value=node) as get_node_mock:
1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 node.content = (
1229 node.content = (
1230 '[submodule "subrepo1"]\n'
1230 '[submodule "subrepo1"]\n'
1231 '\tpath = subrepo1\n'
1231 '\tpath = subrepo1\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 )
1233 )
1234 result = commit._get_submodule_url('subrepo2')
1234 result = commit._get_submodule_url('subrepo2')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1236 assert result is None
1236 assert result is None
1237
1237
1238 def test_returns_cached_values(self):
1238 def test_returns_cached_values(self):
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 node = mock.Mock()
1240 node = mock.Mock()
1241 with mock.patch.object(
1241 with mock.patch.object(
1242 commit, 'get_node', return_value=node) as get_node_mock:
1242 commit, 'get_node', return_value=node) as get_node_mock:
1243 node.content = (
1243 node.content = (
1244 '[submodule "subrepo1"]\n'
1244 '[submodule "subrepo1"]\n'
1245 '\tpath = subrepo1\n'
1245 '\tpath = subrepo1\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1247 )
1247 )
1248 for _ in range(3):
1248 for _ in range(3):
1249 commit._get_submodule_url('subrepo1')
1249 commit._get_submodule_url('subrepo1')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1251
1251
1252 def test_get_node_returns_a_link(self):
1252 def test_get_node_returns_a_link(self):
1253 repository = mock.Mock()
1253 repository = mock.Mock()
1254 repository.alias = 'git'
1254 repository.alias = 'git'
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 get_id_patch = mock.patch.object(
1257 get_id_patch = mock.patch.object(
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 get_submodule_patch = mock.patch.object(
1259 get_submodule_patch = mock.patch.object(
1260 commit, '_get_submodule_url', return_value=submodule_url)
1260 commit, '_get_submodule_url', return_value=submodule_url)
1261
1261
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1263 node = commit.get_node('/abcde')
1263 node = commit.get_node('/abcde')
1264
1264
1265 submodule_mock.assert_called_once_with('/abcde')
1265 submodule_mock.assert_called_once_with('/abcde')
1266 assert type(node) == SubModuleNode
1266 assert type(node) == SubModuleNode
1267 assert node.url == submodule_url
1267 assert node.url == submodule_url
1268
1268
1269 def test_get_nodes_returns_links(self):
1269 def test_get_nodes_returns_links(self):
1270 repository = mock.MagicMock()
1270 repository = mock.MagicMock()
1271 repository.alias = 'git'
1271 repository.alias = 'git'
1272 repository._remote.tree_items.return_value = [
1272 repository._remote.tree_items.return_value = [
1273 ('subrepo', 'stat', 1, 'link')
1273 ('subrepo', 'stat', 1, 'link')
1274 ]
1274 ]
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 get_id_patch = mock.patch.object(
1277 get_id_patch = mock.patch.object(
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 get_submodule_patch = mock.patch.object(
1279 get_submodule_patch = mock.patch.object(
1280 commit, '_get_submodule_url', return_value=submodule_url)
1280 commit, '_get_submodule_url', return_value=submodule_url)
1281
1281
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1283 nodes = commit.get_nodes('/abcde')
1283 nodes = commit.get_nodes('/abcde')
1284
1284
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 assert len(nodes) == 1
1286 assert len(nodes) == 1
1287 assert type(nodes[0]) == SubModuleNode
1287 assert type(nodes[0]) == SubModuleNode
1288 assert nodes[0].url == submodule_url
1288 assert nodes[0].url == submodule_url
@@ -1,275 +1,275 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import stat
21 import stat
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.vcs.nodes import DirNode
25 from rhodecode.lib.vcs.nodes import DirNode
26 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib.vcs.nodes import FileNode
27 from rhodecode.lib.vcs.nodes import Node
27 from rhodecode.lib.vcs.nodes import Node
28 from rhodecode.lib.vcs.nodes import NodeError
28 from rhodecode.lib.vcs.nodes import NodeError
29 from rhodecode.lib.vcs.nodes import NodeKind
29 from rhodecode.lib.vcs.nodes import NodeKind
30 from rhodecode.tests.vcs.conftest import BackendTestMixin
30 from rhodecode.tests.vcs.conftest import BackendTestMixin
31
31
32
32
33 @pytest.fixture()
33 @pytest.fixture()
34 def binary_filenode():
34 def binary_filenode():
35 def node_maker(filename):
35 def node_maker(filename):
36 data = (
36 data = (
37 "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00"
37 "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00"
38 "\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7"
38 "\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7"
39 "\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00"
39 "\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00"
40 "\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a"
40 "\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a"
41 "\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&"
41 "\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&"
42 "\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?="
42 "\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?="
43 "\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw."
43 "\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw."
44 "\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/"
44 "\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/"
45 "\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H"
45 "\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H"
46 "\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q["
46 "\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q["
47 "\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?"
47 "\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?"
48 "\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?"
48 "\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?"
49 "\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O"
49 "\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O"
50 "\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad"
50 "\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad"
51 "\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???"
51 "\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???"
52 "\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1"
52 "\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1"
53 "\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J"
53 "\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J"
54 "\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X"
54 "\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X"
55 "\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~"
55 "\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~"
56 "\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u"
56 "\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u"
57 "\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a"
57 "\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a"
58 "\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00"
58 "\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00"
59 "IEND\xaeB`\x82")
59 "IEND\xaeB`\x82")
60 return FileNode(filename, content=data)
60 return FileNode(filename, content=data)
61 return node_maker
61 return node_maker
62
62
63
63
64 class TestNodeBasics:
64 class TestNodeBasics:
65
65
66 @pytest.mark.parametrize("path", ['/foo', '/foo/bar'])
66 @pytest.mark.parametrize("path", ['/foo', '/foo/bar'])
67 @pytest.mark.parametrize(
67 @pytest.mark.parametrize(
68 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
68 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
69 def test_init_wrong_paths(self, path, kind):
69 def test_init_wrong_paths(self, path, kind):
70 """
70 """
71 Cannot innitialize Node objects with path with slash at the beginning.
71 Cannot innitialize Node objects with path with slash at the beginning.
72 """
72 """
73 with pytest.raises(NodeError):
73 with pytest.raises(NodeError):
74 Node(path, kind)
74 Node(path, kind)
75
75
76 @pytest.mark.parametrize("path", ['path', 'some/path'])
76 @pytest.mark.parametrize("path", ['path', 'some/path'])
77 @pytest.mark.parametrize(
77 @pytest.mark.parametrize(
78 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
78 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
79 def test_name(self, path, kind):
79 def test_name(self, path, kind):
80 node = Node(path, kind)
80 node = Node(path, kind)
81 assert node.name == 'path'
81 assert node.name == 'path'
82
82
83 def test_name_root(self):
83 def test_name_root(self):
84 node = Node('', NodeKind.DIR)
84 node = Node('', NodeKind.DIR)
85 assert node.name == ''
85 assert node.name == ''
86
86
87 def test_root_node_cannot_be_file(self):
87 def test_root_node_cannot_be_file(self):
88 with pytest.raises(NodeError):
88 with pytest.raises(NodeError):
89 Node('', NodeKind.FILE)
89 Node('', NodeKind.FILE)
90
90
91 def test_kind_setter(self):
91 def test_kind_setter(self):
92 node = Node('', NodeKind.DIR)
92 node = Node('', NodeKind.DIR)
93 with pytest.raises(NodeError):
93 with pytest.raises(NodeError):
94 node.kind = NodeKind.FILE
94 node.kind = NodeKind.FILE
95
95
96 def test_compare_equal(self):
96 def test_compare_equal(self):
97 node1 = FileNode('test', content='')
97 node1 = FileNode('test', content='')
98 node2 = FileNode('test', content='')
98 node2 = FileNode('test', content='')
99 assert node1 == node2
99 assert node1 == node2
100 assert not node1 != node2
100 assert not node1 != node2
101
101
102 def test_compare_unequal(self):
102 def test_compare_unequal(self):
103 node1 = FileNode('test', content='a')
103 node1 = FileNode('test', content='a')
104 node2 = FileNode('test', content='b')
104 node2 = FileNode('test', content='b')
105 assert node1 != node2
105 assert node1 != node2
106 assert not node1 == node2
106 assert not node1 == node2
107
107
108 @pytest.mark.parametrize("node_path, expected_parent_path", [
108 @pytest.mark.parametrize("node_path, expected_parent_path", [
109 ('', ''),
109 ('', ''),
110 ('some/path/', 'some/'),
110 ('some/path/', 'some/'),
111 ('some/longer/path/', 'some/longer/'),
111 ('some/longer/path/', 'some/longer/'),
112 ])
112 ])
113 def test_parent_path_new(self, node_path, expected_parent_path):
113 def test_parent_path_new(self, node_path, expected_parent_path):
114 """
114 """
115 Tests if node's parent path are properly computed.
115 Tests if node's parent path are properly computed.
116 """
116 """
117 node = Node(node_path, NodeKind.DIR)
117 node = Node(node_path, NodeKind.DIR)
118 parent_path = node.get_parent_path()
118 parent_path = node.get_parent_path()
119 assert (parent_path.endswith('/') or
119 assert (parent_path.endswith('/') or
120 node.is_root() and parent_path == '')
120 node.is_root() and parent_path == '')
121 assert parent_path == expected_parent_path
121 assert parent_path == expected_parent_path
122
122
123 '''
123 '''
124 def _test_trailing_slash(self, path):
124 def _test_trailing_slash(self, path):
125 if not path.endswith('/'):
125 if not path.endswith('/'):
126 pytest.fail("Trailing slash tests needs paths to end with slash")
126 pytest.fail("Trailing slash tests needs paths to end with slash")
127 for kind in NodeKind.FILE, NodeKind.DIR:
127 for kind in NodeKind.FILE, NodeKind.DIR:
128 with pytest.raises(NodeError):
128 with pytest.raises(NodeError):
129 Node(path=path, kind=kind)
129 Node(path=path, kind=kind)
130
130
131 def test_trailing_slash(self):
131 def test_trailing_slash(self):
132 for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
132 for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
133 self._test_trailing_slash(path)
133 self._test_trailing_slash(path)
134 '''
134 '''
135
135
136 def test_is_file(self):
136 def test_is_file(self):
137 node = Node('any', NodeKind.FILE)
137 node = Node('any', NodeKind.FILE)
138 assert node.is_file()
138 assert node.is_file()
139
139
140 node = FileNode('any')
140 node = FileNode('any')
141 assert node.is_file()
141 assert node.is_file()
142 with pytest.raises(AttributeError):
142 with pytest.raises(AttributeError):
143 node.nodes
143 node.nodes
144
144
145 def test_is_dir(self):
145 def test_is_dir(self):
146 node = Node('any_dir', NodeKind.DIR)
146 node = Node('any_dir', NodeKind.DIR)
147 assert node.is_dir()
147 assert node.is_dir()
148
148
149 node = DirNode('any_dir')
149 node = DirNode('any_dir')
150
150
151 assert node.is_dir()
151 assert node.is_dir()
152 with pytest.raises(NodeError):
152 with pytest.raises(NodeError):
153 node.content
153 node.content
154
154
155 def test_dir_node_iter(self):
155 def test_dir_node_iter(self):
156 nodes = [
156 nodes = [
157 DirNode('docs'),
157 DirNode('docs'),
158 DirNode('tests'),
158 DirNode('tests'),
159 FileNode('bar'),
159 FileNode('bar'),
160 FileNode('foo'),
160 FileNode('foo'),
161 FileNode('readme.txt'),
161 FileNode('readme.txt'),
162 FileNode('setup.py'),
162 FileNode('setup.py'),
163 ]
163 ]
164 dirnode = DirNode('', nodes=nodes)
164 dirnode = DirNode('', nodes=nodes)
165 for node in dirnode:
165 for node in dirnode:
166 assert node == dirnode.get_node(node.path)
166 assert node == dirnode.get_node(node.path)
167
167
168 def test_node_state(self):
168 def test_node_state(self):
169 """
169 """
170 Without link to commit nodes should raise NodeError.
170 Without link to commit nodes should raise NodeError.
171 """
171 """
172 node = FileNode('anything')
172 node = FileNode('anything')
173 with pytest.raises(NodeError):
173 with pytest.raises(NodeError):
174 node.state
174 node.state
175 node = DirNode('anything')
175 node = DirNode('anything')
176 with pytest.raises(NodeError):
176 with pytest.raises(NodeError):
177 node.state
177 node.state
178
178
179 def test_file_node_stat(self):
179 def test_file_node_stat(self):
180 node = FileNode('foobar', 'empty... almost')
180 node = FileNode('foobar', 'empty... almost')
181 mode = node.mode # default should be 0100644
181 mode = node.mode # default should be 0100644
182 assert mode & stat.S_IRUSR
182 assert mode & stat.S_IRUSR
183 assert mode & stat.S_IWUSR
183 assert mode & stat.S_IWUSR
184 assert mode & stat.S_IRGRP
184 assert mode & stat.S_IRGRP
185 assert mode & stat.S_IROTH
185 assert mode & stat.S_IROTH
186 assert not mode & stat.S_IWGRP
186 assert not mode & stat.S_IWGRP
187 assert not mode & stat.S_IWOTH
187 assert not mode & stat.S_IWOTH
188 assert not mode & stat.S_IXUSR
188 assert not mode & stat.S_IXUSR
189 assert not mode & stat.S_IXGRP
189 assert not mode & stat.S_IXGRP
190 assert not mode & stat.S_IXOTH
190 assert not mode & stat.S_IXOTH
191
191
192 def test_file_node_is_executable(self):
192 def test_file_node_is_executable(self):
193 node = FileNode('foobar', 'empty... almost', mode=0100755)
193 node = FileNode('foobar', 'empty... almost', mode=0o100755)
194 assert node.is_executable
194 assert node.is_executable
195
195
196 node = FileNode('foobar', 'empty... almost', mode=0100500)
196 node = FileNode('foobar', 'empty... almost', mode=0o100500)
197 assert node.is_executable
197 assert node.is_executable
198
198
199 node = FileNode('foobar', 'empty... almost', mode=0100644)
199 node = FileNode('foobar', 'empty... almost', mode=0o100644)
200 assert not node.is_executable
200 assert not node.is_executable
201
201
202 def test_file_node_is_not_symlink(self):
202 def test_file_node_is_not_symlink(self):
203 node = FileNode('foobar', 'empty...')
203 node = FileNode('foobar', 'empty...')
204 assert not node.is_link()
204 assert not node.is_link()
205
205
206 def test_mimetype(self):
206 def test_mimetype(self):
207 py_node = FileNode('test.py')
207 py_node = FileNode('test.py')
208 tar_node = FileNode('test.tar.gz')
208 tar_node = FileNode('test.tar.gz')
209
209
210 ext = 'CustomExtension'
210 ext = 'CustomExtension'
211
211
212 my_node2 = FileNode('myfile2')
212 my_node2 = FileNode('myfile2')
213 my_node2._mimetype = [ext]
213 my_node2._mimetype = [ext]
214
214
215 my_node3 = FileNode('myfile3')
215 my_node3 = FileNode('myfile3')
216 my_node3._mimetype = [ext, ext]
216 my_node3._mimetype = [ext, ext]
217
217
218 assert py_node.mimetype == 'text/x-python'
218 assert py_node.mimetype == 'text/x-python'
219 assert py_node.get_mimetype() == ('text/x-python', None)
219 assert py_node.get_mimetype() == ('text/x-python', None)
220
220
221 assert tar_node.mimetype == 'application/x-tar'
221 assert tar_node.mimetype == 'application/x-tar'
222 assert tar_node.get_mimetype() == ('application/x-tar', 'gzip')
222 assert tar_node.get_mimetype() == ('application/x-tar', 'gzip')
223
223
224 with pytest.raises(NodeError):
224 with pytest.raises(NodeError):
225 my_node2.get_mimetype()
225 my_node2.get_mimetype()
226
226
227 assert my_node3.mimetype == ext
227 assert my_node3.mimetype == ext
228 assert my_node3.get_mimetype() == [ext, ext]
228 assert my_node3.get_mimetype() == [ext, ext]
229
229
230 def test_lines_counts(self):
230 def test_lines_counts(self):
231 lines = [
231 lines = [
232 'line1\n',
232 'line1\n',
233 'line2\n',
233 'line2\n',
234 'line3\n',
234 'line3\n',
235 '\n',
235 '\n',
236 '\n',
236 '\n',
237 'line4\n',
237 'line4\n',
238 ]
238 ]
239 py_node = FileNode('test.py', ''.join(lines))
239 py_node = FileNode('test.py', ''.join(lines))
240
240
241 assert (len(lines), len(lines)) == py_node.lines()
241 assert (len(lines), len(lines)) == py_node.lines()
242 assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True)
242 assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True)
243
243
244 def test_lines_no_newline(self):
244 def test_lines_no_newline(self):
245 py_node = FileNode('test.py', 'oneline')
245 py_node = FileNode('test.py', 'oneline')
246
246
247 assert (1, 1) == py_node.lines()
247 assert (1, 1) == py_node.lines()
248 assert (1, 1) == py_node.lines(count_empty=True)
248 assert (1, 1) == py_node.lines(count_empty=True)
249
249
250
250
251 class TestNodeContent(object):
251 class TestNodeContent(object):
252
252
253 def test_if_binary(self, binary_filenode):
253 def test_if_binary(self, binary_filenode):
254 filenode = binary_filenode('calendar.jpg')
254 filenode = binary_filenode('calendar.jpg')
255 assert filenode.is_binary
255 assert filenode.is_binary
256
256
257 def test_binary_line_counts(self, binary_filenode):
257 def test_binary_line_counts(self, binary_filenode):
258 tar_node = binary_filenode('archive.tar.gz')
258 tar_node = binary_filenode('archive.tar.gz')
259 assert (0, 0) == tar_node.lines(count_empty=True)
259 assert (0, 0) == tar_node.lines(count_empty=True)
260
260
261 def test_binary_mimetype(self, binary_filenode):
261 def test_binary_mimetype(self, binary_filenode):
262 tar_node = binary_filenode('archive.tar.gz')
262 tar_node = binary_filenode('archive.tar.gz')
263 assert tar_node.mimetype == 'application/x-tar'
263 assert tar_node.mimetype == 'application/x-tar'
264
264
265
265
266 @pytest.mark.usefixtures("vcs_repository_support")
266 @pytest.mark.usefixtures("vcs_repository_support")
267 class TestNodesCommits(BackendTestMixin):
267 class TestNodesCommits(BackendTestMixin):
268
268
269 def test_node_last_commit(self, generate_repo_with_commits):
269 def test_node_last_commit(self, generate_repo_with_commits):
270 repo = generate_repo_with_commits(20)
270 repo = generate_repo_with_commits(20)
271 last_commit = repo.get_commit()
271 last_commit = repo.get_commit()
272
272
273 for x in xrange(3):
273 for x in xrange(3):
274 node = last_commit.get_node('file_%s.txt' % x)
274 node = last_commit.get_node('file_%s.txt' % x)
275 assert node.last_commit == repo[x]
275 assert node.last_commit == repo[x]
General Comments 0
You need to be logged in to leave comments. Login now