##// END OF EJS Templates
code: code fixes and small adjustments
marcink -
r2623:0cd3ef36 default
parent child Browse files
Show More
@@ -0,0 +1,1 b''
1 from pyramid.compat import configparser No newline at end of file
@@ -1,625 +1,628 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23 import operator
23 import operator
24
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26
26
27 from rhodecode.lib import helpers as h, diffs
27 from rhodecode.lib import helpers as h, diffs
28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.model import repo
30 from rhodecode.model import repo
31 from rhodecode.model import repo_group
31 from rhodecode.model import repo_group
32 from rhodecode.model import user_group
32 from rhodecode.model import user_group
33 from rhodecode.model import user
33 from rhodecode.model import user
34 from rhodecode.model.db import User
34 from rhodecode.model.db import User
35 from rhodecode.model.scm import ScmModel
35 from rhodecode.model.scm import ScmModel
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 ADMIN_PREFIX = '/_admin'
40 ADMIN_PREFIX = '/_admin'
41 STATIC_FILE_PREFIX = '/_static'
41 STATIC_FILE_PREFIX = '/_static'
42
42
43 URL_NAME_REQUIREMENTS = {
43 URL_NAME_REQUIREMENTS = {
44 # group name can have a slash in them, but they must not end with a slash
44 # group name can have a slash in them, but they must not end with a slash
45 'group_name': r'.*?[^/]',
45 'group_name': r'.*?[^/]',
46 'repo_group_name': r'.*?[^/]',
46 'repo_group_name': r'.*?[^/]',
47 # repo names can have a slash in them, but they must not end with a slash
47 # repo names can have a slash in them, but they must not end with a slash
48 'repo_name': r'.*?[^/]',
48 'repo_name': r'.*?[^/]',
49 # file path eats up everything at the end
49 # file path eats up everything at the end
50 'f_path': r'.*',
50 'f_path': r'.*',
51 # reference types
51 # reference types
52 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
52 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
53 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
53 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
54 }
54 }
55
55
56
56
57 def add_route_with_slash(config,name, pattern, **kw):
57 def add_route_with_slash(config,name, pattern, **kw):
58 config.add_route(name, pattern, **kw)
58 config.add_route(name, pattern, **kw)
59 if not pattern.endswith('/'):
59 if not pattern.endswith('/'):
60 config.add_route(name + '_slash', pattern + '/', **kw)
60 config.add_route(name + '_slash', pattern + '/', **kw)
61
61
62
62
63 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
63 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
64 """
64 """
65 Adds regex requirements to pyramid routes using a mapping dict
65 Adds regex requirements to pyramid routes using a mapping dict
66 e.g::
66 e.g::
67 add_route_requirements('{repo_name}/settings')
67 add_route_requirements('{repo_name}/settings')
68 """
68 """
69 for key, regex in requirements.items():
69 for key, regex in requirements.items():
70 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
70 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
71 return route_path
71 return route_path
72
72
73
73
74 def get_format_ref_id(repo):
74 def get_format_ref_id(repo):
75 """Returns a `repo` specific reference formatter function"""
75 """Returns a `repo` specific reference formatter function"""
76 if h.is_svn(repo):
76 if h.is_svn(repo):
77 return _format_ref_id_svn
77 return _format_ref_id_svn
78 else:
78 else:
79 return _format_ref_id
79 return _format_ref_id
80
80
81
81
82 def _format_ref_id(name, raw_id):
82 def _format_ref_id(name, raw_id):
83 """Default formatting of a given reference `name`"""
83 """Default formatting of a given reference `name`"""
84 return name
84 return name
85
85
86
86
87 def _format_ref_id_svn(name, raw_id):
87 def _format_ref_id_svn(name, raw_id):
88 """Special way of formatting a reference for Subversion including path"""
88 """Special way of formatting a reference for Subversion including path"""
89 return '%s@%s' % (name, raw_id)
89 return '%s@%s' % (name, raw_id)
90
90
91
91
92 class TemplateArgs(StrictAttributeDict):
92 class TemplateArgs(StrictAttributeDict):
93 pass
93 pass
94
94
95
95
96 class BaseAppView(object):
96 class BaseAppView(object):
97
97
98 def __init__(self, context, request):
98 def __init__(self, context, request):
99 self.request = request
99 self.request = request
100 self.context = context
100 self.context = context
101 self.session = request.session
101 self.session = request.session
102 self._rhodecode_user = request.user # auth user
102 self._rhodecode_user = request.user # auth user
103 self._rhodecode_db_user = self._rhodecode_user.get_instance()
103 self._rhodecode_db_user = self._rhodecode_user.get_instance()
104 self._maybe_needs_password_change(
104 self._maybe_needs_password_change(
105 request.matched_route.name, self._rhodecode_db_user)
105 request.matched_route.name, self._rhodecode_db_user)
106
106
107 def _maybe_needs_password_change(self, view_name, user_obj):
107 def _maybe_needs_password_change(self, view_name, user_obj):
108 log.debug('Checking if user %s needs password change on view %s',
108 log.debug('Checking if user %s needs password change on view %s',
109 user_obj, view_name)
109 user_obj, view_name)
110 skip_user_views = [
110 skip_user_views = [
111 'logout', 'login',
111 'logout', 'login',
112 'my_account_password', 'my_account_password_update'
112 'my_account_password', 'my_account_password_update'
113 ]
113 ]
114
114
115 if not user_obj:
115 if not user_obj:
116 return
116 return
117
117
118 if user_obj.username == User.DEFAULT_USER:
118 if user_obj.username == User.DEFAULT_USER:
119 return
119 return
120
120
121 now = time.time()
121 now = time.time()
122 should_change = user_obj.user_data.get('force_password_change')
122 should_change = user_obj.user_data.get('force_password_change')
123 change_after = safe_int(should_change) or 0
123 change_after = safe_int(should_change) or 0
124 if should_change and now > change_after:
124 if should_change and now > change_after:
125 log.debug('User %s requires password change', user_obj)
125 log.debug('User %s requires password change', user_obj)
126 h.flash('You are required to change your password', 'warning',
126 h.flash('You are required to change your password', 'warning',
127 ignore_duplicate=True)
127 ignore_duplicate=True)
128
128
129 if view_name not in skip_user_views:
129 if view_name not in skip_user_views:
130 raise HTTPFound(
130 raise HTTPFound(
131 self.request.route_path('my_account_password'))
131 self.request.route_path('my_account_password'))
132
132
133 def _log_creation_exception(self, e, repo_name):
133 def _log_creation_exception(self, e, repo_name):
134 _ = self.request.translate
134 _ = self.request.translate
135 reason = None
135 reason = None
136 if len(e.args) == 2:
136 if len(e.args) == 2:
137 reason = e.args[1]
137 reason = e.args[1]
138
138
139 if reason == 'INVALID_CERTIFICATE':
139 if reason == 'INVALID_CERTIFICATE':
140 log.exception(
140 log.exception(
141 'Exception creating a repository: invalid certificate')
141 'Exception creating a repository: invalid certificate')
142 msg = (_('Error creating repository %s: invalid certificate')
142 msg = (_('Error creating repository %s: invalid certificate')
143 % repo_name)
143 % repo_name)
144 else:
144 else:
145 log.exception("Exception creating a repository")
145 log.exception("Exception creating a repository")
146 msg = (_('Error creating repository %s')
146 msg = (_('Error creating repository %s')
147 % repo_name)
147 % repo_name)
148 return msg
148 return msg
149
149
150 def _get_local_tmpl_context(self, include_app_defaults=True):
150 def _get_local_tmpl_context(self, include_app_defaults=True):
151 c = TemplateArgs()
151 c = TemplateArgs()
152 c.auth_user = self.request.user
152 c.auth_user = self.request.user
153 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
153 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
154 c.rhodecode_user = self.request.user
154 c.rhodecode_user = self.request.user
155
155
156 if include_app_defaults:
156 if include_app_defaults:
157 from rhodecode.lib.base import attach_context_attributes
157 from rhodecode.lib.base import attach_context_attributes
158 attach_context_attributes(c, self.request, self.request.user.user_id)
158 attach_context_attributes(c, self.request, self.request.user.user_id)
159
159
160 return c
160 return c
161
161
162 def _get_template_context(self, tmpl_args, **kwargs):
162 def _get_template_context(self, tmpl_args, **kwargs):
163
163
164 local_tmpl_args = {
164 local_tmpl_args = {
165 'defaults': {},
165 'defaults': {},
166 'errors': {},
166 'errors': {},
167 'c': tmpl_args
167 'c': tmpl_args
168 }
168 }
169 local_tmpl_args.update(kwargs)
169 local_tmpl_args.update(kwargs)
170 return local_tmpl_args
170 return local_tmpl_args
171
171
172 def load_default_context(self):
172 def load_default_context(self):
173 """
173 """
174 example:
174 example:
175
175
176 def load_default_context(self):
176 def load_default_context(self):
177 c = self._get_local_tmpl_context()
177 c = self._get_local_tmpl_context()
178 c.custom_var = 'foobar'
178 c.custom_var = 'foobar'
179
179
180 return c
180 return c
181 """
181 """
182 raise NotImplementedError('Needs implementation in view class')
182 raise NotImplementedError('Needs implementation in view class')
183
183
184
184
185 class RepoAppView(BaseAppView):
185 class RepoAppView(BaseAppView):
186
186
187 def __init__(self, context, request):
187 def __init__(self, context, request):
188 super(RepoAppView, self).__init__(context, request)
188 super(RepoAppView, self).__init__(context, request)
189 self.db_repo = request.db_repo
189 self.db_repo = request.db_repo
190 self.db_repo_name = self.db_repo.repo_name
190 self.db_repo_name = self.db_repo.repo_name
191 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
191 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
192
192
193 def _handle_missing_requirements(self, error):
193 def _handle_missing_requirements(self, error):
194 log.error(
194 log.error(
195 'Requirements are missing for repository %s: %s',
195 'Requirements are missing for repository %s: %s',
196 self.db_repo_name, error.message)
196 self.db_repo_name, error.message)
197
197
198 def _get_local_tmpl_context(self, include_app_defaults=True):
198 def _get_local_tmpl_context(self, include_app_defaults=True):
199 _ = self.request.translate
199 _ = self.request.translate
200 c = super(RepoAppView, self)._get_local_tmpl_context(
200 c = super(RepoAppView, self)._get_local_tmpl_context(
201 include_app_defaults=include_app_defaults)
201 include_app_defaults=include_app_defaults)
202
202
203 # register common vars for this type of view
203 # register common vars for this type of view
204 c.rhodecode_db_repo = self.db_repo
204 c.rhodecode_db_repo = self.db_repo
205 c.repo_name = self.db_repo_name
205 c.repo_name = self.db_repo_name
206 c.repository_pull_requests = self.db_repo_pull_requests
206 c.repository_pull_requests = self.db_repo_pull_requests
207
207
208 c.repository_requirements_missing = False
208 c.repository_requirements_missing = False
209 try:
209 try:
210 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
210 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
211 if self.rhodecode_vcs_repo:
211 if self.rhodecode_vcs_repo:
212 self.path_filter = PathFilter(self.rhodecode_vcs_repo.get_path_permissions(c.auth_user.username))
212 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
213 c.auth_user.username)
214 self.path_filter = PathFilter(path_perms)
213 else:
215 else:
214 self.path_filter = PathFilter(None)
216 self.path_filter = PathFilter(None)
215 except RepositoryRequirementError as e:
217 except RepositoryRequirementError as e:
216 c.repository_requirements_missing = True
218 c.repository_requirements_missing = True
217 self._handle_missing_requirements(e)
219 self._handle_missing_requirements(e)
218 self.rhodecode_vcs_repo = None
220 self.rhodecode_vcs_repo = None
219 self.path_filter = None
221 self.path_filter = None
220
222
221 c.path_filter = self.path_filter # used by atom_feed_entry.mako
223 c.path_filter = self.path_filter # used by atom_feed_entry.mako
222
224
223 if (not c.repository_requirements_missing
225 if (not c.repository_requirements_missing
224 and self.rhodecode_vcs_repo is None):
226 and self.rhodecode_vcs_repo is None):
225 # unable to fetch this repo as vcs instance, report back to user
227 # unable to fetch this repo as vcs instance, report back to user
226 h.flash(_(
228 h.flash(_(
227 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
229 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
228 "Please check if it exist, or is not damaged.") %
230 "Please check if it exist, or is not damaged.") %
229 {'repo_name': c.repo_name},
231 {'repo_name': c.repo_name},
230 category='error', ignore_duplicate=True)
232 category='error', ignore_duplicate=True)
231 raise HTTPFound(h.route_path('home'))
233 raise HTTPFound(h.route_path('home'))
232
234
233 return c
235 return c
234
236
235 def _get_f_path_unchecked(self, matchdict, default=None):
237 def _get_f_path_unchecked(self, matchdict, default=None):
236 """
238 """
237 Should only be used by redirects, everything else should call _get_f_path
239 Should only be used by redirects, everything else should call _get_f_path
238 """
240 """
239 f_path = matchdict.get('f_path')
241 f_path = matchdict.get('f_path')
240 if f_path:
242 if f_path:
241 # fix for multiple initial slashes that causes errors for GIT
243 # fix for multiple initial slashes that causes errors for GIT
242 return f_path.lstrip('/')
244 return f_path.lstrip('/')
243
245
244 return default
246 return default
245
247
246 def _get_f_path(self, matchdict, default=None):
248 def _get_f_path(self, matchdict, default=None):
247 return self.path_filter.assert_path_permissions(self._get_f_path_unchecked(matchdict, default))
249 f_path_match = self._get_f_path_unchecked(matchdict, default)
250 return self.path_filter.assert_path_permissions(f_path_match)
248
251
249
252
250 class PathFilter(object):
253 class PathFilter(object):
251
254
252 # Expects and instance of BasePathPermissionChecker or None
255 # Expects and instance of BasePathPermissionChecker or None
253 def __init__(self, permission_checker):
256 def __init__(self, permission_checker):
254 self.permission_checker = permission_checker
257 self.permission_checker = permission_checker
255
258
256 def assert_path_permissions(self, path):
259 def assert_path_permissions(self, path):
257 if path and self.permission_checker and not self.permission_checker.has_access(path):
260 if path and self.permission_checker and not self.permission_checker.has_access(path):
258 raise HTTPForbidden()
261 raise HTTPForbidden()
259 return path
262 return path
260
263
261 def filter_patchset(self, patchset):
264 def filter_patchset(self, patchset):
262 if not self.permission_checker or not patchset:
265 if not self.permission_checker or not patchset:
263 return patchset, False
266 return patchset, False
264 had_filtered = False
267 had_filtered = False
265 filtered_patchset = []
268 filtered_patchset = []
266 for patch in patchset:
269 for patch in patchset:
267 filename = patch.get('filename', None)
270 filename = patch.get('filename', None)
268 if not filename or self.permission_checker.has_access(filename):
271 if not filename or self.permission_checker.has_access(filename):
269 filtered_patchset.append(patch)
272 filtered_patchset.append(patch)
270 else:
273 else:
271 had_filtered = True
274 had_filtered = True
272 if had_filtered:
275 if had_filtered:
273 if isinstance(patchset, diffs.LimitedDiffContainer):
276 if isinstance(patchset, diffs.LimitedDiffContainer):
274 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
277 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
275 return filtered_patchset, True
278 return filtered_patchset, True
276 else:
279 else:
277 return patchset, False
280 return patchset, False
278
281
279 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
282 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
280 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
283 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
281 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
284 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
282 result.has_hidden_changes = has_hidden_changes
285 result.has_hidden_changes = has_hidden_changes
283 return result
286 return result
284
287
285 def get_raw_patch(self, diff_processor):
288 def get_raw_patch(self, diff_processor):
286 if self.permission_checker is None:
289 if self.permission_checker is None:
287 return diff_processor.as_raw()
290 return diff_processor.as_raw()
288 elif self.permission_checker.has_full_access:
291 elif self.permission_checker.has_full_access:
289 return diff_processor.as_raw()
292 return diff_processor.as_raw()
290 else:
293 else:
291 return '# Repository has user-specific filters, raw patch generation is disabled.'
294 return '# Repository has user-specific filters, raw patch generation is disabled.'
292
295
293 @property
296 @property
294 def is_enabled(self):
297 def is_enabled(self):
295 return self.permission_checker is not None
298 return self.permission_checker is not None
296
299
297
300
298 class RepoGroupAppView(BaseAppView):
301 class RepoGroupAppView(BaseAppView):
299 def __init__(self, context, request):
302 def __init__(self, context, request):
300 super(RepoGroupAppView, self).__init__(context, request)
303 super(RepoGroupAppView, self).__init__(context, request)
301 self.db_repo_group = request.db_repo_group
304 self.db_repo_group = request.db_repo_group
302 self.db_repo_group_name = self.db_repo_group.group_name
305 self.db_repo_group_name = self.db_repo_group.group_name
303
306
304 def _revoke_perms_on_yourself(self, form_result):
307 def _revoke_perms_on_yourself(self, form_result):
305 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
308 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
306 form_result['perm_updates'])
309 form_result['perm_updates'])
307 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
310 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
308 form_result['perm_additions'])
311 form_result['perm_additions'])
309 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
312 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
310 form_result['perm_deletions'])
313 form_result['perm_deletions'])
311 admin_perm = 'group.admin'
314 admin_perm = 'group.admin'
312 if _updates and _updates[0][1] != admin_perm or \
315 if _updates and _updates[0][1] != admin_perm or \
313 _additions and _additions[0][1] != admin_perm or \
316 _additions and _additions[0][1] != admin_perm or \
314 _deletions and _deletions[0][1] != admin_perm:
317 _deletions and _deletions[0][1] != admin_perm:
315 return True
318 return True
316 return False
319 return False
317
320
318
321
319 class UserGroupAppView(BaseAppView):
322 class UserGroupAppView(BaseAppView):
320 def __init__(self, context, request):
323 def __init__(self, context, request):
321 super(UserGroupAppView, self).__init__(context, request)
324 super(UserGroupAppView, self).__init__(context, request)
322 self.db_user_group = request.db_user_group
325 self.db_user_group = request.db_user_group
323 self.db_user_group_name = self.db_user_group.users_group_name
326 self.db_user_group_name = self.db_user_group.users_group_name
324
327
325
328
326 class UserAppView(BaseAppView):
329 class UserAppView(BaseAppView):
327 def __init__(self, context, request):
330 def __init__(self, context, request):
328 super(UserAppView, self).__init__(context, request)
331 super(UserAppView, self).__init__(context, request)
329 self.db_user = request.db_user
332 self.db_user = request.db_user
330 self.db_user_id = self.db_user.user_id
333 self.db_user_id = self.db_user.user_id
331
334
332 _ = self.request.translate
335 _ = self.request.translate
333 if not request.db_user_supports_default:
336 if not request.db_user_supports_default:
334 if self.db_user.username == User.DEFAULT_USER:
337 if self.db_user.username == User.DEFAULT_USER:
335 h.flash(_("Editing user `{}` is disabled.".format(
338 h.flash(_("Editing user `{}` is disabled.".format(
336 User.DEFAULT_USER)), category='warning')
339 User.DEFAULT_USER)), category='warning')
337 raise HTTPFound(h.route_path('users'))
340 raise HTTPFound(h.route_path('users'))
338
341
339
342
340 class DataGridAppView(object):
343 class DataGridAppView(object):
341 """
344 """
342 Common class to have re-usable grid rendering components
345 Common class to have re-usable grid rendering components
343 """
346 """
344
347
345 def _extract_ordering(self, request, column_map=None):
348 def _extract_ordering(self, request, column_map=None):
346 column_map = column_map or {}
349 column_map = column_map or {}
347 column_index = safe_int(request.GET.get('order[0][column]'))
350 column_index = safe_int(request.GET.get('order[0][column]'))
348 order_dir = request.GET.get(
351 order_dir = request.GET.get(
349 'order[0][dir]', 'desc')
352 'order[0][dir]', 'desc')
350 order_by = request.GET.get(
353 order_by = request.GET.get(
351 'columns[%s][data][sort]' % column_index, 'name_raw')
354 'columns[%s][data][sort]' % column_index, 'name_raw')
352
355
353 # translate datatable to DB columns
356 # translate datatable to DB columns
354 order_by = column_map.get(order_by) or order_by
357 order_by = column_map.get(order_by) or order_by
355
358
356 search_q = request.GET.get('search[value]')
359 search_q = request.GET.get('search[value]')
357 return search_q, order_by, order_dir
360 return search_q, order_by, order_dir
358
361
359 def _extract_chunk(self, request):
362 def _extract_chunk(self, request):
360 start = safe_int(request.GET.get('start'), 0)
363 start = safe_int(request.GET.get('start'), 0)
361 length = safe_int(request.GET.get('length'), 25)
364 length = safe_int(request.GET.get('length'), 25)
362 draw = safe_int(request.GET.get('draw'))
365 draw = safe_int(request.GET.get('draw'))
363 return draw, start, length
366 return draw, start, length
364
367
365 def _get_order_col(self, order_by, model):
368 def _get_order_col(self, order_by, model):
366 if isinstance(order_by, basestring):
369 if isinstance(order_by, basestring):
367 try:
370 try:
368 return operator.attrgetter(order_by)(model)
371 return operator.attrgetter(order_by)(model)
369 except AttributeError:
372 except AttributeError:
370 return None
373 return None
371 else:
374 else:
372 return order_by
375 return order_by
373
376
374
377
375 class BaseReferencesView(RepoAppView):
378 class BaseReferencesView(RepoAppView):
376 """
379 """
377 Base for reference view for branches, tags and bookmarks.
380 Base for reference view for branches, tags and bookmarks.
378 """
381 """
379 def load_default_context(self):
382 def load_default_context(self):
380 c = self._get_local_tmpl_context()
383 c = self._get_local_tmpl_context()
381
384
382
385
383 return c
386 return c
384
387
385 def load_refs_context(self, ref_items, partials_template):
388 def load_refs_context(self, ref_items, partials_template):
386 _render = self.request.get_partial_renderer(partials_template)
389 _render = self.request.get_partial_renderer(partials_template)
387 pre_load = ["author", "date", "message"]
390 pre_load = ["author", "date", "message"]
388
391
389 is_svn = h.is_svn(self.rhodecode_vcs_repo)
392 is_svn = h.is_svn(self.rhodecode_vcs_repo)
390 is_hg = h.is_hg(self.rhodecode_vcs_repo)
393 is_hg = h.is_hg(self.rhodecode_vcs_repo)
391
394
392 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
395 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
393
396
394 closed_refs = {}
397 closed_refs = {}
395 if is_hg:
398 if is_hg:
396 closed_refs = self.rhodecode_vcs_repo.branches_closed
399 closed_refs = self.rhodecode_vcs_repo.branches_closed
397
400
398 data = []
401 data = []
399 for ref_name, commit_id in ref_items:
402 for ref_name, commit_id in ref_items:
400 commit = self.rhodecode_vcs_repo.get_commit(
403 commit = self.rhodecode_vcs_repo.get_commit(
401 commit_id=commit_id, pre_load=pre_load)
404 commit_id=commit_id, pre_load=pre_load)
402 closed = ref_name in closed_refs
405 closed = ref_name in closed_refs
403
406
404 # TODO: johbo: Unify generation of reference links
407 # TODO: johbo: Unify generation of reference links
405 use_commit_id = '/' in ref_name or is_svn
408 use_commit_id = '/' in ref_name or is_svn
406
409
407 if use_commit_id:
410 if use_commit_id:
408 files_url = h.route_path(
411 files_url = h.route_path(
409 'repo_files',
412 'repo_files',
410 repo_name=self.db_repo_name,
413 repo_name=self.db_repo_name,
411 f_path=ref_name if is_svn else '',
414 f_path=ref_name if is_svn else '',
412 commit_id=commit_id)
415 commit_id=commit_id)
413
416
414 else:
417 else:
415 files_url = h.route_path(
418 files_url = h.route_path(
416 'repo_files',
419 'repo_files',
417 repo_name=self.db_repo_name,
420 repo_name=self.db_repo_name,
418 f_path=ref_name if is_svn else '',
421 f_path=ref_name if is_svn else '',
419 commit_id=ref_name,
422 commit_id=ref_name,
420 _query=dict(at=ref_name))
423 _query=dict(at=ref_name))
421
424
422 data.append({
425 data.append({
423 "name": _render('name', ref_name, files_url, closed),
426 "name": _render('name', ref_name, files_url, closed),
424 "name_raw": ref_name,
427 "name_raw": ref_name,
425 "date": _render('date', commit.date),
428 "date": _render('date', commit.date),
426 "date_raw": datetime_to_time(commit.date),
429 "date_raw": datetime_to_time(commit.date),
427 "author": _render('author', commit.author),
430 "author": _render('author', commit.author),
428 "commit": _render(
431 "commit": _render(
429 'commit', commit.message, commit.raw_id, commit.idx),
432 'commit', commit.message, commit.raw_id, commit.idx),
430 "commit_raw": commit.idx,
433 "commit_raw": commit.idx,
431 "compare": _render(
434 "compare": _render(
432 'compare', format_ref_id(ref_name, commit.raw_id)),
435 'compare', format_ref_id(ref_name, commit.raw_id)),
433 })
436 })
434
437
435 return data
438 return data
436
439
437
440
438 class RepoRoutePredicate(object):
441 class RepoRoutePredicate(object):
439 def __init__(self, val, config):
442 def __init__(self, val, config):
440 self.val = val
443 self.val = val
441
444
442 def text(self):
445 def text(self):
443 return 'repo_route = %s' % self.val
446 return 'repo_route = %s' % self.val
444
447
445 phash = text
448 phash = text
446
449
447 def __call__(self, info, request):
450 def __call__(self, info, request):
448
451
449 if hasattr(request, 'vcs_call'):
452 if hasattr(request, 'vcs_call'):
450 # skip vcs calls
453 # skip vcs calls
451 return
454 return
452
455
453 repo_name = info['match']['repo_name']
456 repo_name = info['match']['repo_name']
454 repo_model = repo.RepoModel()
457 repo_model = repo.RepoModel()
455 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
458 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
456
459
457 def redirect_if_creating(db_repo):
460 def redirect_if_creating(db_repo):
458 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
461 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
459 raise HTTPFound(
462 raise HTTPFound(
460 request.route_path('repo_creating',
463 request.route_path('repo_creating',
461 repo_name=db_repo.repo_name))
464 repo_name=db_repo.repo_name))
462
465
463 if by_name_match:
466 if by_name_match:
464 # register this as request object we can re-use later
467 # register this as request object we can re-use later
465 request.db_repo = by_name_match
468 request.db_repo = by_name_match
466 redirect_if_creating(by_name_match)
469 redirect_if_creating(by_name_match)
467 return True
470 return True
468
471
469 by_id_match = repo_model.get_repo_by_id(repo_name)
472 by_id_match = repo_model.get_repo_by_id(repo_name)
470 if by_id_match:
473 if by_id_match:
471 request.db_repo = by_id_match
474 request.db_repo = by_id_match
472 redirect_if_creating(by_id_match)
475 redirect_if_creating(by_id_match)
473 return True
476 return True
474
477
475 return False
478 return False
476
479
477
480
478 class RepoTypeRoutePredicate(object):
481 class RepoTypeRoutePredicate(object):
479 def __init__(self, val, config):
482 def __init__(self, val, config):
480 self.val = val or ['hg', 'git', 'svn']
483 self.val = val or ['hg', 'git', 'svn']
481
484
482 def text(self):
485 def text(self):
483 return 'repo_accepted_type = %s' % self.val
486 return 'repo_accepted_type = %s' % self.val
484
487
485 phash = text
488 phash = text
486
489
487 def __call__(self, info, request):
490 def __call__(self, info, request):
488 if hasattr(request, 'vcs_call'):
491 if hasattr(request, 'vcs_call'):
489 # skip vcs calls
492 # skip vcs calls
490 return
493 return
491
494
492 rhodecode_db_repo = request.db_repo
495 rhodecode_db_repo = request.db_repo
493
496
494 log.debug(
497 log.debug(
495 '%s checking repo type for %s in %s',
498 '%s checking repo type for %s in %s',
496 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
499 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
497
500
498 if rhodecode_db_repo.repo_type in self.val:
501 if rhodecode_db_repo.repo_type in self.val:
499 return True
502 return True
500 else:
503 else:
501 log.warning('Current view is not supported for repo type:%s',
504 log.warning('Current view is not supported for repo type:%s',
502 rhodecode_db_repo.repo_type)
505 rhodecode_db_repo.repo_type)
503 #
506 #
504 # h.flash(h.literal(
507 # h.flash(h.literal(
505 # _('Action not supported for %s.' % rhodecode_repo.alias)),
508 # _('Action not supported for %s.' % rhodecode_repo.alias)),
506 # category='warning')
509 # category='warning')
507 # return redirect(
510 # return redirect(
508 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
511 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
509
512
510 return False
513 return False
511
514
512
515
513 class RepoGroupRoutePredicate(object):
516 class RepoGroupRoutePredicate(object):
514 def __init__(self, val, config):
517 def __init__(self, val, config):
515 self.val = val
518 self.val = val
516
519
517 def text(self):
520 def text(self):
518 return 'repo_group_route = %s' % self.val
521 return 'repo_group_route = %s' % self.val
519
522
520 phash = text
523 phash = text
521
524
522 def __call__(self, info, request):
525 def __call__(self, info, request):
523 if hasattr(request, 'vcs_call'):
526 if hasattr(request, 'vcs_call'):
524 # skip vcs calls
527 # skip vcs calls
525 return
528 return
526
529
527 repo_group_name = info['match']['repo_group_name']
530 repo_group_name = info['match']['repo_group_name']
528 repo_group_model = repo_group.RepoGroupModel()
531 repo_group_model = repo_group.RepoGroupModel()
529 by_name_match = repo_group_model.get_by_group_name(
532 by_name_match = repo_group_model.get_by_group_name(
530 repo_group_name, cache=True)
533 repo_group_name, cache=True)
531
534
532 if by_name_match:
535 if by_name_match:
533 # register this as request object we can re-use later
536 # register this as request object we can re-use later
534 request.db_repo_group = by_name_match
537 request.db_repo_group = by_name_match
535 return True
538 return True
536
539
537 return False
540 return False
538
541
539
542
540 class UserGroupRoutePredicate(object):
543 class UserGroupRoutePredicate(object):
541 def __init__(self, val, config):
544 def __init__(self, val, config):
542 self.val = val
545 self.val = val
543
546
544 def text(self):
547 def text(self):
545 return 'user_group_route = %s' % self.val
548 return 'user_group_route = %s' % self.val
546
549
547 phash = text
550 phash = text
548
551
549 def __call__(self, info, request):
552 def __call__(self, info, request):
550 if hasattr(request, 'vcs_call'):
553 if hasattr(request, 'vcs_call'):
551 # skip vcs calls
554 # skip vcs calls
552 return
555 return
553
556
554 user_group_id = info['match']['user_group_id']
557 user_group_id = info['match']['user_group_id']
555 user_group_model = user_group.UserGroup()
558 user_group_model = user_group.UserGroup()
556 by_id_match = user_group_model.get(
559 by_id_match = user_group_model.get(
557 user_group_id, cache=True)
560 user_group_id, cache=True)
558
561
559 if by_id_match:
562 if by_id_match:
560 # register this as request object we can re-use later
563 # register this as request object we can re-use later
561 request.db_user_group = by_id_match
564 request.db_user_group = by_id_match
562 return True
565 return True
563
566
564 return False
567 return False
565
568
566
569
567 class UserRoutePredicateBase(object):
570 class UserRoutePredicateBase(object):
568 supports_default = None
571 supports_default = None
569
572
570 def __init__(self, val, config):
573 def __init__(self, val, config):
571 self.val = val
574 self.val = val
572
575
573 def text(self):
576 def text(self):
574 raise NotImplementedError()
577 raise NotImplementedError()
575
578
576 def __call__(self, info, request):
579 def __call__(self, info, request):
577 if hasattr(request, 'vcs_call'):
580 if hasattr(request, 'vcs_call'):
578 # skip vcs calls
581 # skip vcs calls
579 return
582 return
580
583
581 user_id = info['match']['user_id']
584 user_id = info['match']['user_id']
582 user_model = user.User()
585 user_model = user.User()
583 by_id_match = user_model.get(
586 by_id_match = user_model.get(
584 user_id, cache=True)
587 user_id, cache=True)
585
588
586 if by_id_match:
589 if by_id_match:
587 # register this as request object we can re-use later
590 # register this as request object we can re-use later
588 request.db_user = by_id_match
591 request.db_user = by_id_match
589 request.db_user_supports_default = self.supports_default
592 request.db_user_supports_default = self.supports_default
590 return True
593 return True
591
594
592 return False
595 return False
593
596
594
597
595 class UserRoutePredicate(UserRoutePredicateBase):
598 class UserRoutePredicate(UserRoutePredicateBase):
596 supports_default = False
599 supports_default = False
597
600
598 def text(self):
601 def text(self):
599 return 'user_route = %s' % self.val
602 return 'user_route = %s' % self.val
600
603
601 phash = text
604 phash = text
602
605
603
606
604 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
607 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
605 supports_default = True
608 supports_default = True
606
609
607 def text(self):
610 def text(self):
608 return 'user_with_default_route = %s' % self.val
611 return 'user_with_default_route = %s' % self.val
609
612
610 phash = text
613 phash = text
611
614
612
615
613 def includeme(config):
616 def includeme(config):
614 config.add_route_predicate(
617 config.add_route_predicate(
615 'repo_route', RepoRoutePredicate)
618 'repo_route', RepoRoutePredicate)
616 config.add_route_predicate(
619 config.add_route_predicate(
617 'repo_accepted_types', RepoTypeRoutePredicate)
620 'repo_accepted_types', RepoTypeRoutePredicate)
618 config.add_route_predicate(
621 config.add_route_predicate(
619 'repo_group_route', RepoGroupRoutePredicate)
622 'repo_group_route', RepoGroupRoutePredicate)
620 config.add_route_predicate(
623 config.add_route_predicate(
621 'user_group_route', UserGroupRoutePredicate)
624 'user_group_route', UserGroupRoutePredicate)
622 config.add_route_predicate(
625 config.add_route_predicate(
623 'user_route_with_default', UserRouteWithDefaultPredicate)
626 'user_route_with_default', UserRouteWithDefaultPredicate)
624 config.add_route_predicate(
627 config.add_route_predicate(
625 'user_route', UserRoutePredicate) No newline at end of file
628 'user_route', UserRoutePredicate)
@@ -1,1694 +1,1694 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode.lib.utils2 import safe_str, safe_unicode
37 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 RepositoryError)
46 RepositoryError)
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 FILEMODE_DEFAULT = 0100644
52 FILEMODE_DEFAULT = 0100644
53 FILEMODE_EXECUTABLE = 0100755
53 FILEMODE_EXECUTABLE = 0100755
54
54
55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 MergeResponse = collections.namedtuple(
56 MergeResponse = collections.namedtuple(
57 'MergeResponse',
57 'MergeResponse',
58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59
59
60
60
61 class MergeFailureReason(object):
61 class MergeFailureReason(object):
62 """
62 """
63 Enumeration with all the reasons why the server side merge could fail.
63 Enumeration with all the reasons why the server side merge could fail.
64
64
65 DO NOT change the number of the reasons, as they may be stored in the
65 DO NOT change the number of the reasons, as they may be stored in the
66 database.
66 database.
67
67
68 Changing the name of a reason is acceptable and encouraged to deprecate old
68 Changing the name of a reason is acceptable and encouraged to deprecate old
69 reasons.
69 reasons.
70 """
70 """
71
71
72 # Everything went well.
72 # Everything went well.
73 NONE = 0
73 NONE = 0
74
74
75 # An unexpected exception was raised. Check the logs for more details.
75 # An unexpected exception was raised. Check the logs for more details.
76 UNKNOWN = 1
76 UNKNOWN = 1
77
77
78 # The merge was not successful, there are conflicts.
78 # The merge was not successful, there are conflicts.
79 MERGE_FAILED = 2
79 MERGE_FAILED = 2
80
80
81 # The merge succeeded but we could not push it to the target repository.
81 # The merge succeeded but we could not push it to the target repository.
82 PUSH_FAILED = 3
82 PUSH_FAILED = 3
83
83
84 # The specified target is not a head in the target repository.
84 # The specified target is not a head in the target repository.
85 TARGET_IS_NOT_HEAD = 4
85 TARGET_IS_NOT_HEAD = 4
86
86
87 # The source repository contains more branches than the target. Pushing
87 # The source repository contains more branches than the target. Pushing
88 # the merge will create additional branches in the target.
88 # the merge will create additional branches in the target.
89 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 HG_SOURCE_HAS_MORE_BRANCHES = 5
90
90
91 # The target reference has multiple heads. That does not allow to correctly
91 # The target reference has multiple heads. That does not allow to correctly
92 # identify the target location. This could only happen for mercurial
92 # identify the target location. This could only happen for mercurial
93 # branches.
93 # branches.
94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95
95
96 # The target repository is locked
96 # The target repository is locked
97 TARGET_IS_LOCKED = 7
97 TARGET_IS_LOCKED = 7
98
98
99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # A involved commit could not be found.
100 # A involved commit could not be found.
101 _DEPRECATED_MISSING_COMMIT = 8
101 _DEPRECATED_MISSING_COMMIT = 8
102
102
103 # The target repo reference is missing.
103 # The target repo reference is missing.
104 MISSING_TARGET_REF = 9
104 MISSING_TARGET_REF = 9
105
105
106 # The source repo reference is missing.
106 # The source repo reference is missing.
107 MISSING_SOURCE_REF = 10
107 MISSING_SOURCE_REF = 10
108
108
109 # The merge was not successful, there are conflicts related to sub
109 # The merge was not successful, there are conflicts related to sub
110 # repositories.
110 # repositories.
111 SUBREPO_MERGE_FAILED = 11
111 SUBREPO_MERGE_FAILED = 11
112
112
113
113
114 class UpdateFailureReason(object):
114 class UpdateFailureReason(object):
115 """
115 """
116 Enumeration with all the reasons why the pull request update could fail.
116 Enumeration with all the reasons why the pull request update could fail.
117
117
118 DO NOT change the number of the reasons, as they may be stored in the
118 DO NOT change the number of the reasons, as they may be stored in the
119 database.
119 database.
120
120
121 Changing the name of a reason is acceptable and encouraged to deprecate old
121 Changing the name of a reason is acceptable and encouraged to deprecate old
122 reasons.
122 reasons.
123 """
123 """
124
124
125 # Everything went well.
125 # Everything went well.
126 NONE = 0
126 NONE = 0
127
127
128 # An unexpected exception was raised. Check the logs for more details.
128 # An unexpected exception was raised. Check the logs for more details.
129 UNKNOWN = 1
129 UNKNOWN = 1
130
130
131 # The pull request is up to date.
131 # The pull request is up to date.
132 NO_CHANGE = 2
132 NO_CHANGE = 2
133
133
134 # The pull request has a reference type that is not supported for update.
134 # The pull request has a reference type that is not supported for update.
135 WRONG_REF_TYPE = 3
135 WRONG_REF_TYPE = 3
136
136
137 # Update failed because the target reference is missing.
137 # Update failed because the target reference is missing.
138 MISSING_TARGET_REF = 4
138 MISSING_TARGET_REF = 4
139
139
140 # Update failed because the source reference is missing.
140 # Update failed because the source reference is missing.
141 MISSING_SOURCE_REF = 5
141 MISSING_SOURCE_REF = 5
142
142
143
143
144 class BaseRepository(object):
144 class BaseRepository(object):
145 """
145 """
146 Base Repository for final backends
146 Base Repository for final backends
147
147
148 .. attribute:: DEFAULT_BRANCH_NAME
148 .. attribute:: DEFAULT_BRANCH_NAME
149
149
150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151
151
152 .. attribute:: commit_ids
152 .. attribute:: commit_ids
153
153
154 list of all available commit ids, in ascending order
154 list of all available commit ids, in ascending order
155
155
156 .. attribute:: path
156 .. attribute:: path
157
157
158 absolute path to the repository
158 absolute path to the repository
159
159
160 .. attribute:: bookmarks
160 .. attribute:: bookmarks
161
161
162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 there are no bookmarks or the backend implementation does not support
163 there are no bookmarks or the backend implementation does not support
164 bookmarks.
164 bookmarks.
165
165
166 .. attribute:: tags
166 .. attribute:: tags
167
167
168 Mapping from name to :term:`Commit ID` of the tag.
168 Mapping from name to :term:`Commit ID` of the tag.
169
169
170 """
170 """
171
171
172 DEFAULT_BRANCH_NAME = None
172 DEFAULT_BRANCH_NAME = None
173 DEFAULT_CONTACT = u"Unknown"
173 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_DESCRIPTION = u"unknown"
174 DEFAULT_DESCRIPTION = u"unknown"
175 EMPTY_COMMIT_ID = '0' * 40
175 EMPTY_COMMIT_ID = '0' * 40
176
176
177 path = None
177 path = None
178
178
179 def __init__(self, repo_path, config=None, create=False, **kwargs):
179 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 """
180 """
181 Initializes repository. Raises RepositoryError if repository could
181 Initializes repository. Raises RepositoryError if repository could
182 not be find at the given ``repo_path`` or directory at ``repo_path``
182 not be find at the given ``repo_path`` or directory at ``repo_path``
183 exists and ``create`` is set to True.
183 exists and ``create`` is set to True.
184
184
185 :param repo_path: local path of the repository
185 :param repo_path: local path of the repository
186 :param config: repository configuration
186 :param config: repository configuration
187 :param create=False: if set to True, would try to create repository.
187 :param create=False: if set to True, would try to create repository.
188 :param src_url=None: if set, should be proper url from which repository
188 :param src_url=None: if set, should be proper url from which repository
189 would be cloned; requires ``create`` parameter to be set to True -
189 would be cloned; requires ``create`` parameter to be set to True -
190 raises RepositoryError if src_url is set and create evaluates to
190 raises RepositoryError if src_url is set and create evaluates to
191 False
191 False
192 """
192 """
193 raise NotImplementedError
193 raise NotImplementedError
194
194
195 def __repr__(self):
195 def __repr__(self):
196 return '<%s at %s>' % (self.__class__.__name__, self.path)
196 return '<%s at %s>' % (self.__class__.__name__, self.path)
197
197
198 def __len__(self):
198 def __len__(self):
199 return self.count()
199 return self.count()
200
200
201 def __eq__(self, other):
201 def __eq__(self, other):
202 same_instance = isinstance(other, self.__class__)
202 same_instance = isinstance(other, self.__class__)
203 return same_instance and other.path == self.path
203 return same_instance and other.path == self.path
204
204
205 def __ne__(self, other):
205 def __ne__(self, other):
206 return not self.__eq__(other)
206 return not self.__eq__(other)
207
207
208 @classmethod
208 @classmethod
209 def get_default_config(cls, default=None):
209 def get_default_config(cls, default=None):
210 config = Config()
210 config = Config()
211 if default and isinstance(default, list):
211 if default and isinstance(default, list):
212 for section, key, val in default:
212 for section, key, val in default:
213 config.set(section, key, val)
213 config.set(section, key, val)
214 return config
214 return config
215
215
216 @LazyProperty
216 @LazyProperty
217 def EMPTY_COMMIT(self):
217 def EMPTY_COMMIT(self):
218 return EmptyCommit(self.EMPTY_COMMIT_ID)
218 return EmptyCommit(self.EMPTY_COMMIT_ID)
219
219
220 @LazyProperty
220 @LazyProperty
221 def alias(self):
221 def alias(self):
222 for k, v in settings.BACKENDS.items():
222 for k, v in settings.BACKENDS.items():
223 if v.split('.')[-1] == str(self.__class__.__name__):
223 if v.split('.')[-1] == str(self.__class__.__name__):
224 return k
224 return k
225
225
226 @LazyProperty
226 @LazyProperty
227 def name(self):
227 def name(self):
228 return safe_unicode(os.path.basename(self.path))
228 return safe_unicode(os.path.basename(self.path))
229
229
230 @LazyProperty
230 @LazyProperty
231 def description(self):
231 def description(self):
232 raise NotImplementedError
232 raise NotImplementedError
233
233
234 def refs(self):
234 def refs(self):
235 """
235 """
236 returns a `dict` with branches, bookmarks, tags, and closed_branches
236 returns a `dict` with branches, bookmarks, tags, and closed_branches
237 for this repository
237 for this repository
238 """
238 """
239 return dict(
239 return dict(
240 branches=self.branches,
240 branches=self.branches,
241 branches_closed=self.branches_closed,
241 branches_closed=self.branches_closed,
242 tags=self.tags,
242 tags=self.tags,
243 bookmarks=self.bookmarks
243 bookmarks=self.bookmarks
244 )
244 )
245
245
246 @LazyProperty
246 @LazyProperty
247 def branches(self):
247 def branches(self):
248 """
248 """
249 A `dict` which maps branch names to commit ids.
249 A `dict` which maps branch names to commit ids.
250 """
250 """
251 raise NotImplementedError
251 raise NotImplementedError
252
252
253 @LazyProperty
253 @LazyProperty
254 def branches_closed(self):
254 def branches_closed(self):
255 """
255 """
256 A `dict` which maps tags names to commit ids.
256 A `dict` which maps tags names to commit ids.
257 """
257 """
258 raise NotImplementedError
258 raise NotImplementedError
259
259
260 @LazyProperty
260 @LazyProperty
261 def bookmarks(self):
261 def bookmarks(self):
262 """
262 """
263 A `dict` which maps tags names to commit ids.
263 A `dict` which maps tags names to commit ids.
264 """
264 """
265 raise NotImplementedError
265 raise NotImplementedError
266
266
267 @LazyProperty
267 @LazyProperty
268 def tags(self):
268 def tags(self):
269 """
269 """
270 A `dict` which maps tags names to commit ids.
270 A `dict` which maps tags names to commit ids.
271 """
271 """
272 raise NotImplementedError
272 raise NotImplementedError
273
273
274 @LazyProperty
274 @LazyProperty
275 def size(self):
275 def size(self):
276 """
276 """
277 Returns combined size in bytes for all repository files
277 Returns combined size in bytes for all repository files
278 """
278 """
279 tip = self.get_commit()
279 tip = self.get_commit()
280 return tip.size
280 return tip.size
281
281
282 def size_at_commit(self, commit_id):
282 def size_at_commit(self, commit_id):
283 commit = self.get_commit(commit_id)
283 commit = self.get_commit(commit_id)
284 return commit.size
284 return commit.size
285
285
286 def is_empty(self):
286 def is_empty(self):
287 return not bool(self.commit_ids)
287 return not bool(self.commit_ids)
288
288
289 @staticmethod
289 @staticmethod
290 def check_url(url, config):
290 def check_url(url, config):
291 """
291 """
292 Function will check given url and try to verify if it's a valid
292 Function will check given url and try to verify if it's a valid
293 link.
293 link.
294 """
294 """
295 raise NotImplementedError
295 raise NotImplementedError
296
296
297 @staticmethod
297 @staticmethod
298 def is_valid_repository(path):
298 def is_valid_repository(path):
299 """
299 """
300 Check if given `path` contains a valid repository of this backend
300 Check if given `path` contains a valid repository of this backend
301 """
301 """
302 raise NotImplementedError
302 raise NotImplementedError
303
303
304 # ==========================================================================
304 # ==========================================================================
305 # COMMITS
305 # COMMITS
306 # ==========================================================================
306 # ==========================================================================
307
307
308 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
308 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
309 """
309 """
310 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
310 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
311 are both None, most recent commit is returned.
311 are both None, most recent commit is returned.
312
312
313 :param pre_load: Optional. List of commit attributes to load.
313 :param pre_load: Optional. List of commit attributes to load.
314
314
315 :raises ``EmptyRepositoryError``: if there are no commits
315 :raises ``EmptyRepositoryError``: if there are no commits
316 """
316 """
317 raise NotImplementedError
317 raise NotImplementedError
318
318
319 def __iter__(self):
319 def __iter__(self):
320 for commit_id in self.commit_ids:
320 for commit_id in self.commit_ids:
321 yield self.get_commit(commit_id=commit_id)
321 yield self.get_commit(commit_id=commit_id)
322
322
323 def get_commits(
323 def get_commits(
324 self, start_id=None, end_id=None, start_date=None, end_date=None,
324 self, start_id=None, end_id=None, start_date=None, end_date=None,
325 branch_name=None, show_hidden=False, pre_load=None):
325 branch_name=None, show_hidden=False, pre_load=None):
326 """
326 """
327 Returns iterator of `BaseCommit` objects from start to end
327 Returns iterator of `BaseCommit` objects from start to end
328 not inclusive. This should behave just like a list, ie. end is not
328 not inclusive. This should behave just like a list, ie. end is not
329 inclusive.
329 inclusive.
330
330
331 :param start_id: None or str, must be a valid commit id
331 :param start_id: None or str, must be a valid commit id
332 :param end_id: None or str, must be a valid commit id
332 :param end_id: None or str, must be a valid commit id
333 :param start_date:
333 :param start_date:
334 :param end_date:
334 :param end_date:
335 :param branch_name:
335 :param branch_name:
336 :param show_hidden:
336 :param show_hidden:
337 :param pre_load:
337 :param pre_load:
338 """
338 """
339 raise NotImplementedError
339 raise NotImplementedError
340
340
341 def __getitem__(self, key):
341 def __getitem__(self, key):
342 """
342 """
343 Allows index based access to the commit objects of this repository.
343 Allows index based access to the commit objects of this repository.
344 """
344 """
345 pre_load = ["author", "branch", "date", "message", "parents"]
345 pre_load = ["author", "branch", "date", "message", "parents"]
346 if isinstance(key, slice):
346 if isinstance(key, slice):
347 return self._get_range(key, pre_load)
347 return self._get_range(key, pre_load)
348 return self.get_commit(commit_idx=key, pre_load=pre_load)
348 return self.get_commit(commit_idx=key, pre_load=pre_load)
349
349
350 def _get_range(self, slice_obj, pre_load):
350 def _get_range(self, slice_obj, pre_load):
351 for commit_id in self.commit_ids.__getitem__(slice_obj):
351 for commit_id in self.commit_ids.__getitem__(slice_obj):
352 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
352 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
353
353
354 def count(self):
354 def count(self):
355 return len(self.commit_ids)
355 return len(self.commit_ids)
356
356
357 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
357 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
358 """
358 """
359 Creates and returns a tag for the given ``commit_id``.
359 Creates and returns a tag for the given ``commit_id``.
360
360
361 :param name: name for new tag
361 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param commit_id: commit id for which new tag would be created
363 :param commit_id: commit id for which new tag would be created
364 :param message: message of the tag's commit
364 :param message: message of the tag's commit
365 :param date: date of tag's commit
365 :param date: date of tag's commit
366
366
367 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
368 """
368 """
369 raise NotImplementedError
369 raise NotImplementedError
370
370
371 def remove_tag(self, name, user, message=None, date=None):
371 def remove_tag(self, name, user, message=None, date=None):
372 """
372 """
373 Removes tag with the given ``name``.
373 Removes tag with the given ``name``.
374
374
375 :param name: name of the tag to be removed
375 :param name: name of the tag to be removed
376 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param message: message of the tag's removal commit
377 :param message: message of the tag's removal commit
378 :param date: date of tag's removal commit
378 :param date: date of tag's removal commit
379
379
380 :raises TagDoesNotExistError: if tag with given name does not exists
380 :raises TagDoesNotExistError: if tag with given name does not exists
381 """
381 """
382 raise NotImplementedError
382 raise NotImplementedError
383
383
384 def get_diff(
384 def get_diff(
385 self, commit1, commit2, path=None, ignore_whitespace=False,
385 self, commit1, commit2, path=None, ignore_whitespace=False,
386 context=3, path1=None):
386 context=3, path1=None):
387 """
387 """
388 Returns (git like) *diff*, as plain text. Shows changes introduced by
388 Returns (git like) *diff*, as plain text. Shows changes introduced by
389 `commit2` since `commit1`.
389 `commit2` since `commit1`.
390
390
391 :param commit1: Entry point from which diff is shown. Can be
391 :param commit1: Entry point from which diff is shown. Can be
392 ``self.EMPTY_COMMIT`` - in this case, patch showing all
392 ``self.EMPTY_COMMIT`` - in this case, patch showing all
393 the changes since empty state of the repository until `commit2`
393 the changes since empty state of the repository until `commit2`
394 :param commit2: Until which commit changes should be shown.
394 :param commit2: Until which commit changes should be shown.
395 :param path: Can be set to a path of a file to create a diff of that
395 :param path: Can be set to a path of a file to create a diff of that
396 file. If `path1` is also set, this value is only associated to
396 file. If `path1` is also set, this value is only associated to
397 `commit2`.
397 `commit2`.
398 :param ignore_whitespace: If set to ``True``, would not show whitespace
398 :param ignore_whitespace: If set to ``True``, would not show whitespace
399 changes. Defaults to ``False``.
399 changes. Defaults to ``False``.
400 :param context: How many lines before/after changed lines should be
400 :param context: How many lines before/after changed lines should be
401 shown. Defaults to ``3``.
401 shown. Defaults to ``3``.
402 :param path1: Can be set to a path to associate with `commit1`. This
402 :param path1: Can be set to a path to associate with `commit1`. This
403 parameter works only for backends which support diff generation for
403 parameter works only for backends which support diff generation for
404 different paths. Other backends will raise a `ValueError` if `path1`
404 different paths. Other backends will raise a `ValueError` if `path1`
405 is set and has a different value than `path`.
405 is set and has a different value than `path`.
406 :param file_path: filter this diff by given path pattern
406 :param file_path: filter this diff by given path pattern
407 """
407 """
408 raise NotImplementedError
408 raise NotImplementedError
409
409
410 def strip(self, commit_id, branch=None):
410 def strip(self, commit_id, branch=None):
411 """
411 """
412 Strip given commit_id from the repository
412 Strip given commit_id from the repository
413 """
413 """
414 raise NotImplementedError
414 raise NotImplementedError
415
415
416 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
416 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
417 """
417 """
418 Return a latest common ancestor commit if one exists for this repo
418 Return a latest common ancestor commit if one exists for this repo
419 `commit_id1` vs `commit_id2` from `repo2`.
419 `commit_id1` vs `commit_id2` from `repo2`.
420
420
421 :param commit_id1: Commit it from this repository to use as a
421 :param commit_id1: Commit it from this repository to use as a
422 target for the comparison.
422 target for the comparison.
423 :param commit_id2: Source commit id to use for comparison.
423 :param commit_id2: Source commit id to use for comparison.
424 :param repo2: Source repository to use for comparison.
424 :param repo2: Source repository to use for comparison.
425 """
425 """
426 raise NotImplementedError
426 raise NotImplementedError
427
427
428 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
428 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
429 """
429 """
430 Compare this repository's revision `commit_id1` with `commit_id2`.
430 Compare this repository's revision `commit_id1` with `commit_id2`.
431
431
432 Returns a tuple(commits, ancestor) that would be merged from
432 Returns a tuple(commits, ancestor) that would be merged from
433 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
433 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
434 will be returned as ancestor.
434 will be returned as ancestor.
435
435
436 :param commit_id1: Commit it from this repository to use as a
436 :param commit_id1: Commit it from this repository to use as a
437 target for the comparison.
437 target for the comparison.
438 :param commit_id2: Source commit id to use for comparison.
438 :param commit_id2: Source commit id to use for comparison.
439 :param repo2: Source repository to use for comparison.
439 :param repo2: Source repository to use for comparison.
440 :param merge: If set to ``True`` will do a merge compare which also
440 :param merge: If set to ``True`` will do a merge compare which also
441 returns the common ancestor.
441 returns the common ancestor.
442 :param pre_load: Optional. List of commit attributes to load.
442 :param pre_load: Optional. List of commit attributes to load.
443 """
443 """
444 raise NotImplementedError
444 raise NotImplementedError
445
445
446 def merge(self, target_ref, source_repo, source_ref, workspace_id,
446 def merge(self, target_ref, source_repo, source_ref, workspace_id,
447 user_name='', user_email='', message='', dry_run=False,
447 user_name='', user_email='', message='', dry_run=False,
448 use_rebase=False, close_branch=False):
448 use_rebase=False, close_branch=False):
449 """
449 """
450 Merge the revisions specified in `source_ref` from `source_repo`
450 Merge the revisions specified in `source_ref` from `source_repo`
451 onto the `target_ref` of this repository.
451 onto the `target_ref` of this repository.
452
452
453 `source_ref` and `target_ref` are named tupls with the following
453 `source_ref` and `target_ref` are named tupls with the following
454 fields `type`, `name` and `commit_id`.
454 fields `type`, `name` and `commit_id`.
455
455
456 Returns a MergeResponse named tuple with the following fields
456 Returns a MergeResponse named tuple with the following fields
457 'possible', 'executed', 'source_commit', 'target_commit',
457 'possible', 'executed', 'source_commit', 'target_commit',
458 'merge_commit'.
458 'merge_commit'.
459
459
460 :param target_ref: `target_ref` points to the commit on top of which
460 :param target_ref: `target_ref` points to the commit on top of which
461 the `source_ref` should be merged.
461 the `source_ref` should be merged.
462 :param source_repo: The repository that contains the commits to be
462 :param source_repo: The repository that contains the commits to be
463 merged.
463 merged.
464 :param source_ref: `source_ref` points to the topmost commit from
464 :param source_ref: `source_ref` points to the topmost commit from
465 the `source_repo` which should be merged.
465 the `source_repo` which should be merged.
466 :param workspace_id: `workspace_id` unique identifier.
466 :param workspace_id: `workspace_id` unique identifier.
467 :param user_name: Merge commit `user_name`.
467 :param user_name: Merge commit `user_name`.
468 :param user_email: Merge commit `user_email`.
468 :param user_email: Merge commit `user_email`.
469 :param message: Merge commit `message`.
469 :param message: Merge commit `message`.
470 :param dry_run: If `True` the merge will not take place.
470 :param dry_run: If `True` the merge will not take place.
471 :param use_rebase: If `True` commits from the source will be rebased
471 :param use_rebase: If `True` commits from the source will be rebased
472 on top of the target instead of being merged.
472 on top of the target instead of being merged.
473 :param close_branch: If `True` branch will be close before merging it
473 :param close_branch: If `True` branch will be close before merging it
474 """
474 """
475 if dry_run:
475 if dry_run:
476 message = message or 'dry_run_merge_message'
476 message = message or 'dry_run_merge_message'
477 user_email = user_email or 'dry-run-merge@rhodecode.com'
477 user_email = user_email or 'dry-run-merge@rhodecode.com'
478 user_name = user_name or 'Dry-Run User'
478 user_name = user_name or 'Dry-Run User'
479 else:
479 else:
480 if not user_name:
480 if not user_name:
481 raise ValueError('user_name cannot be empty')
481 raise ValueError('user_name cannot be empty')
482 if not user_email:
482 if not user_email:
483 raise ValueError('user_email cannot be empty')
483 raise ValueError('user_email cannot be empty')
484 if not message:
484 if not message:
485 raise ValueError('message cannot be empty')
485 raise ValueError('message cannot be empty')
486
486
487 shadow_repository_path = self._maybe_prepare_merge_workspace(
487 shadow_repository_path = self._maybe_prepare_merge_workspace(
488 workspace_id, target_ref, source_ref)
488 workspace_id, target_ref, source_ref)
489
489
490 try:
490 try:
491 return self._merge_repo(
491 return self._merge_repo(
492 shadow_repository_path, target_ref, source_repo,
492 shadow_repository_path, target_ref, source_repo,
493 source_ref, message, user_name, user_email, dry_run=dry_run,
493 source_ref, message, user_name, user_email, dry_run=dry_run,
494 use_rebase=use_rebase, close_branch=close_branch)
494 use_rebase=use_rebase, close_branch=close_branch)
495 except RepositoryError:
495 except RepositoryError:
496 log.exception(
496 log.exception(
497 'Unexpected failure when running merge, dry-run=%s',
497 'Unexpected failure when running merge, dry-run=%s',
498 dry_run)
498 dry_run)
499 return MergeResponse(
499 return MergeResponse(
500 False, False, None, MergeFailureReason.UNKNOWN)
500 False, False, None, MergeFailureReason.UNKNOWN)
501
501
502 def _merge_repo(self, shadow_repository_path, target_ref,
502 def _merge_repo(self, shadow_repository_path, target_ref,
503 source_repo, source_ref, merge_message,
503 source_repo, source_ref, merge_message,
504 merger_name, merger_email, dry_run=False,
504 merger_name, merger_email, dry_run=False,
505 use_rebase=False, close_branch=False):
505 use_rebase=False, close_branch=False):
506 """Internal implementation of merge."""
506 """Internal implementation of merge."""
507 raise NotImplementedError
507 raise NotImplementedError
508
508
509 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
509 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
510 """
510 """
511 Create the merge workspace.
511 Create the merge workspace.
512
512
513 :param workspace_id: `workspace_id` unique identifier.
513 :param workspace_id: `workspace_id` unique identifier.
514 """
514 """
515 raise NotImplementedError
515 raise NotImplementedError
516
516
517 def cleanup_merge_workspace(self, workspace_id):
517 def cleanup_merge_workspace(self, workspace_id):
518 """
518 """
519 Remove merge workspace.
519 Remove merge workspace.
520
520
521 This function MUST not fail in case there is no workspace associated to
521 This function MUST not fail in case there is no workspace associated to
522 the given `workspace_id`.
522 the given `workspace_id`.
523
523
524 :param workspace_id: `workspace_id` unique identifier.
524 :param workspace_id: `workspace_id` unique identifier.
525 """
525 """
526 raise NotImplementedError
526 raise NotImplementedError
527
527
528 # ========== #
528 # ========== #
529 # COMMIT API #
529 # COMMIT API #
530 # ========== #
530 # ========== #
531
531
532 @LazyProperty
532 @LazyProperty
533 def in_memory_commit(self):
533 def in_memory_commit(self):
534 """
534 """
535 Returns :class:`InMemoryCommit` object for this repository.
535 Returns :class:`InMemoryCommit` object for this repository.
536 """
536 """
537 raise NotImplementedError
537 raise NotImplementedError
538
538
539 # ======================== #
539 # ======================== #
540 # UTILITIES FOR SUBCLASSES #
540 # UTILITIES FOR SUBCLASSES #
541 # ======================== #
541 # ======================== #
542
542
543 def _validate_diff_commits(self, commit1, commit2):
543 def _validate_diff_commits(self, commit1, commit2):
544 """
544 """
545 Validates that the given commits are related to this repository.
545 Validates that the given commits are related to this repository.
546
546
547 Intended as a utility for sub classes to have a consistent validation
547 Intended as a utility for sub classes to have a consistent validation
548 of input parameters in methods like :meth:`get_diff`.
548 of input parameters in methods like :meth:`get_diff`.
549 """
549 """
550 self._validate_commit(commit1)
550 self._validate_commit(commit1)
551 self._validate_commit(commit2)
551 self._validate_commit(commit2)
552 if (isinstance(commit1, EmptyCommit) and
552 if (isinstance(commit1, EmptyCommit) and
553 isinstance(commit2, EmptyCommit)):
553 isinstance(commit2, EmptyCommit)):
554 raise ValueError("Cannot compare two empty commits")
554 raise ValueError("Cannot compare two empty commits")
555
555
556 def _validate_commit(self, commit):
556 def _validate_commit(self, commit):
557 if not isinstance(commit, BaseCommit):
557 if not isinstance(commit, BaseCommit):
558 raise TypeError(
558 raise TypeError(
559 "%s is not of type BaseCommit" % repr(commit))
559 "%s is not of type BaseCommit" % repr(commit))
560 if commit.repository != self and not isinstance(commit, EmptyCommit):
560 if commit.repository != self and not isinstance(commit, EmptyCommit):
561 raise ValueError(
561 raise ValueError(
562 "Commit %s must be a valid commit from this repository %s, "
562 "Commit %s must be a valid commit from this repository %s, "
563 "related to this repository instead %s." %
563 "related to this repository instead %s." %
564 (commit, self, commit.repository))
564 (commit, self, commit.repository))
565
565
566 def _validate_commit_id(self, commit_id):
566 def _validate_commit_id(self, commit_id):
567 if not isinstance(commit_id, basestring):
567 if not isinstance(commit_id, basestring):
568 raise TypeError("commit_id must be a string value")
568 raise TypeError("commit_id must be a string value")
569
569
570 def _validate_commit_idx(self, commit_idx):
570 def _validate_commit_idx(self, commit_idx):
571 if not isinstance(commit_idx, (int, long)):
571 if not isinstance(commit_idx, (int, long)):
572 raise TypeError("commit_idx must be a numeric value")
572 raise TypeError("commit_idx must be a numeric value")
573
573
574 def _validate_branch_name(self, branch_name):
574 def _validate_branch_name(self, branch_name):
575 if branch_name and branch_name not in self.branches_all:
575 if branch_name and branch_name not in self.branches_all:
576 msg = ("Branch %s not found in %s" % (branch_name, self))
576 msg = ("Branch %s not found in %s" % (branch_name, self))
577 raise BranchDoesNotExistError(msg)
577 raise BranchDoesNotExistError(msg)
578
578
579 #
579 #
580 # Supporting deprecated API parts
580 # Supporting deprecated API parts
581 # TODO: johbo: consider to move this into a mixin
581 # TODO: johbo: consider to move this into a mixin
582 #
582 #
583
583
584 @property
584 @property
585 def EMPTY_CHANGESET(self):
585 def EMPTY_CHANGESET(self):
586 warnings.warn(
586 warnings.warn(
587 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
587 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
588 return self.EMPTY_COMMIT_ID
588 return self.EMPTY_COMMIT_ID
589
589
590 @property
590 @property
591 def revisions(self):
591 def revisions(self):
592 warnings.warn("Use commits attribute instead", DeprecationWarning)
592 warnings.warn("Use commits attribute instead", DeprecationWarning)
593 return self.commit_ids
593 return self.commit_ids
594
594
595 @revisions.setter
595 @revisions.setter
596 def revisions(self, value):
596 def revisions(self, value):
597 warnings.warn("Use commits attribute instead", DeprecationWarning)
597 warnings.warn("Use commits attribute instead", DeprecationWarning)
598 self.commit_ids = value
598 self.commit_ids = value
599
599
600 def get_changeset(self, revision=None, pre_load=None):
600 def get_changeset(self, revision=None, pre_load=None):
601 warnings.warn("Use get_commit instead", DeprecationWarning)
601 warnings.warn("Use get_commit instead", DeprecationWarning)
602 commit_id = None
602 commit_id = None
603 commit_idx = None
603 commit_idx = None
604 if isinstance(revision, basestring):
604 if isinstance(revision, basestring):
605 commit_id = revision
605 commit_id = revision
606 else:
606 else:
607 commit_idx = revision
607 commit_idx = revision
608 return self.get_commit(
608 return self.get_commit(
609 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
609 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
610
610
611 def get_changesets(
611 def get_changesets(
612 self, start=None, end=None, start_date=None, end_date=None,
612 self, start=None, end=None, start_date=None, end_date=None,
613 branch_name=None, pre_load=None):
613 branch_name=None, pre_load=None):
614 warnings.warn("Use get_commits instead", DeprecationWarning)
614 warnings.warn("Use get_commits instead", DeprecationWarning)
615 start_id = self._revision_to_commit(start)
615 start_id = self._revision_to_commit(start)
616 end_id = self._revision_to_commit(end)
616 end_id = self._revision_to_commit(end)
617 return self.get_commits(
617 return self.get_commits(
618 start_id=start_id, end_id=end_id, start_date=start_date,
618 start_id=start_id, end_id=end_id, start_date=start_date,
619 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
619 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
620
620
621 def _revision_to_commit(self, revision):
621 def _revision_to_commit(self, revision):
622 """
622 """
623 Translates a revision to a commit_id
623 Translates a revision to a commit_id
624
624
625 Helps to support the old changeset based API which allows to use
625 Helps to support the old changeset based API which allows to use
626 commit ids and commit indices interchangeable.
626 commit ids and commit indices interchangeable.
627 """
627 """
628 if revision is None:
628 if revision is None:
629 return revision
629 return revision
630
630
631 if isinstance(revision, basestring):
631 if isinstance(revision, basestring):
632 commit_id = revision
632 commit_id = revision
633 else:
633 else:
634 commit_id = self.commit_ids[revision]
634 commit_id = self.commit_ids[revision]
635 return commit_id
635 return commit_id
636
636
637 @property
637 @property
638 def in_memory_changeset(self):
638 def in_memory_changeset(self):
639 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
639 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
640 return self.in_memory_commit
640 return self.in_memory_commit
641
641
642 #
643 def get_path_permissions(self, username):
642 def get_path_permissions(self, username):
644 """
643 """
645
646 Returns a path permission checker or None if not supported
644 Returns a path permission checker or None if not supported
647
645
648 :param username: session user name
646 :param username: session user name
649 :return: an instance of BasePathPermissionChecker or None
647 :return: an instance of BasePathPermissionChecker or None
650 """
648 """
651 return None
649 return None
652
650
653
651
654 class BaseCommit(object):
652 class BaseCommit(object):
655 """
653 """
656 Each backend should implement it's commit representation.
654 Each backend should implement it's commit representation.
657
655
658 **Attributes**
656 **Attributes**
659
657
660 ``repository``
658 ``repository``
661 repository object within which commit exists
659 repository object within which commit exists
662
660
663 ``id``
661 ``id``
664 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
662 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
665 just ``tip``.
663 just ``tip``.
666
664
667 ``raw_id``
665 ``raw_id``
668 raw commit representation (i.e. full 40 length sha for git
666 raw commit representation (i.e. full 40 length sha for git
669 backend)
667 backend)
670
668
671 ``short_id``
669 ``short_id``
672 shortened (if apply) version of ``raw_id``; it would be simple
670 shortened (if apply) version of ``raw_id``; it would be simple
673 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
671 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
674 as ``raw_id`` for subversion
672 as ``raw_id`` for subversion
675
673
676 ``idx``
674 ``idx``
677 commit index
675 commit index
678
676
679 ``files``
677 ``files``
680 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
678 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
681
679
682 ``dirs``
680 ``dirs``
683 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
681 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
684
682
685 ``nodes``
683 ``nodes``
686 combined list of ``Node`` objects
684 combined list of ``Node`` objects
687
685
688 ``author``
686 ``author``
689 author of the commit, as unicode
687 author of the commit, as unicode
690
688
691 ``message``
689 ``message``
692 message of the commit, as unicode
690 message of the commit, as unicode
693
691
694 ``parents``
692 ``parents``
695 list of parent commits
693 list of parent commits
696
694
697 """
695 """
698
696
699 branch = None
697 branch = None
700 """
698 """
701 Depending on the backend this should be set to the branch name of the
699 Depending on the backend this should be set to the branch name of the
702 commit. Backends not supporting branches on commits should leave this
700 commit. Backends not supporting branches on commits should leave this
703 value as ``None``.
701 value as ``None``.
704 """
702 """
705
703
706 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
704 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
707 """
705 """
708 This template is used to generate a default prefix for repository archives
706 This template is used to generate a default prefix for repository archives
709 if no prefix has been specified.
707 if no prefix has been specified.
710 """
708 """
711
709
712 def __str__(self):
710 def __str__(self):
713 return '<%s at %s:%s>' % (
711 return '<%s at %s:%s>' % (
714 self.__class__.__name__, self.idx, self.short_id)
712 self.__class__.__name__, self.idx, self.short_id)
715
713
716 def __repr__(self):
714 def __repr__(self):
717 return self.__str__()
715 return self.__str__()
718
716
719 def __unicode__(self):
717 def __unicode__(self):
720 return u'%s:%s' % (self.idx, self.short_id)
718 return u'%s:%s' % (self.idx, self.short_id)
721
719
722 def __eq__(self, other):
720 def __eq__(self, other):
723 same_instance = isinstance(other, self.__class__)
721 same_instance = isinstance(other, self.__class__)
724 return same_instance and self.raw_id == other.raw_id
722 return same_instance and self.raw_id == other.raw_id
725
723
726 def __json__(self):
724 def __json__(self):
727 parents = []
725 parents = []
728 try:
726 try:
729 for parent in self.parents:
727 for parent in self.parents:
730 parents.append({'raw_id': parent.raw_id})
728 parents.append({'raw_id': parent.raw_id})
731 except NotImplementedError:
729 except NotImplementedError:
732 # empty commit doesn't have parents implemented
730 # empty commit doesn't have parents implemented
733 pass
731 pass
734
732
735 return {
733 return {
736 'short_id': self.short_id,
734 'short_id': self.short_id,
737 'raw_id': self.raw_id,
735 'raw_id': self.raw_id,
738 'revision': self.idx,
736 'revision': self.idx,
739 'message': self.message,
737 'message': self.message,
740 'date': self.date,
738 'date': self.date,
741 'author': self.author,
739 'author': self.author,
742 'parents': parents,
740 'parents': parents,
743 'branch': self.branch
741 'branch': self.branch
744 }
742 }
745
743
746 def _get_refs(self):
744 def _get_refs(self):
747 return {
745 return {
748 'branches': [self.branch],
746 'branches': [self.branch],
749 'bookmarks': getattr(self, 'bookmarks', []),
747 'bookmarks': getattr(self, 'bookmarks', []),
750 'tags': self.tags
748 'tags': self.tags
751 }
749 }
752
750
753 @LazyProperty
751 @LazyProperty
754 def last(self):
752 def last(self):
755 """
753 """
756 ``True`` if this is last commit in repository, ``False``
754 ``True`` if this is last commit in repository, ``False``
757 otherwise; trying to access this attribute while there is no
755 otherwise; trying to access this attribute while there is no
758 commits would raise `EmptyRepositoryError`
756 commits would raise `EmptyRepositoryError`
759 """
757 """
760 if self.repository is None:
758 if self.repository is None:
761 raise CommitError("Cannot check if it's most recent commit")
759 raise CommitError("Cannot check if it's most recent commit")
762 return self.raw_id == self.repository.commit_ids[-1]
760 return self.raw_id == self.repository.commit_ids[-1]
763
761
764 @LazyProperty
762 @LazyProperty
765 def parents(self):
763 def parents(self):
766 """
764 """
767 Returns list of parent commits.
765 Returns list of parent commits.
768 """
766 """
769 raise NotImplementedError
767 raise NotImplementedError
770
768
771 @property
769 @property
772 def merge(self):
770 def merge(self):
773 """
771 """
774 Returns boolean if commit is a merge.
772 Returns boolean if commit is a merge.
775 """
773 """
776 return len(self.parents) > 1
774 return len(self.parents) > 1
777
775
778 @LazyProperty
776 @LazyProperty
779 def children(self):
777 def children(self):
780 """
778 """
781 Returns list of child commits.
779 Returns list of child commits.
782 """
780 """
783 raise NotImplementedError
781 raise NotImplementedError
784
782
785 @LazyProperty
783 @LazyProperty
786 def id(self):
784 def id(self):
787 """
785 """
788 Returns string identifying this commit.
786 Returns string identifying this commit.
789 """
787 """
790 raise NotImplementedError
788 raise NotImplementedError
791
789
792 @LazyProperty
790 @LazyProperty
793 def raw_id(self):
791 def raw_id(self):
794 """
792 """
795 Returns raw string identifying this commit.
793 Returns raw string identifying this commit.
796 """
794 """
797 raise NotImplementedError
795 raise NotImplementedError
798
796
799 @LazyProperty
797 @LazyProperty
800 def short_id(self):
798 def short_id(self):
801 """
799 """
802 Returns shortened version of ``raw_id`` attribute, as string,
800 Returns shortened version of ``raw_id`` attribute, as string,
803 identifying this commit, useful for presentation to users.
801 identifying this commit, useful for presentation to users.
804 """
802 """
805 raise NotImplementedError
803 raise NotImplementedError
806
804
807 @LazyProperty
805 @LazyProperty
808 def idx(self):
806 def idx(self):
809 """
807 """
810 Returns integer identifying this commit.
808 Returns integer identifying this commit.
811 """
809 """
812 raise NotImplementedError
810 raise NotImplementedError
813
811
814 @LazyProperty
812 @LazyProperty
815 def committer(self):
813 def committer(self):
816 """
814 """
817 Returns committer for this commit
815 Returns committer for this commit
818 """
816 """
819 raise NotImplementedError
817 raise NotImplementedError
820
818
821 @LazyProperty
819 @LazyProperty
822 def committer_name(self):
820 def committer_name(self):
823 """
821 """
824 Returns committer name for this commit
822 Returns committer name for this commit
825 """
823 """
826
824
827 return author_name(self.committer)
825 return author_name(self.committer)
828
826
829 @LazyProperty
827 @LazyProperty
830 def committer_email(self):
828 def committer_email(self):
831 """
829 """
832 Returns committer email address for this commit
830 Returns committer email address for this commit
833 """
831 """
834
832
835 return author_email(self.committer)
833 return author_email(self.committer)
836
834
837 @LazyProperty
835 @LazyProperty
838 def author(self):
836 def author(self):
839 """
837 """
840 Returns author for this commit
838 Returns author for this commit
841 """
839 """
842
840
843 raise NotImplementedError
841 raise NotImplementedError
844
842
845 @LazyProperty
843 @LazyProperty
846 def author_name(self):
844 def author_name(self):
847 """
845 """
848 Returns author name for this commit
846 Returns author name for this commit
849 """
847 """
850
848
851 return author_name(self.author)
849 return author_name(self.author)
852
850
853 @LazyProperty
851 @LazyProperty
854 def author_email(self):
852 def author_email(self):
855 """
853 """
856 Returns author email address for this commit
854 Returns author email address for this commit
857 """
855 """
858
856
859 return author_email(self.author)
857 return author_email(self.author)
860
858
861 def get_file_mode(self, path):
859 def get_file_mode(self, path):
862 """
860 """
863 Returns stat mode of the file at `path`.
861 Returns stat mode of the file at `path`.
864 """
862 """
865 raise NotImplementedError
863 raise NotImplementedError
866
864
867 def is_link(self, path):
865 def is_link(self, path):
868 """
866 """
869 Returns ``True`` if given `path` is a symlink
867 Returns ``True`` if given `path` is a symlink
870 """
868 """
871 raise NotImplementedError
869 raise NotImplementedError
872
870
873 def get_file_content(self, path):
871 def get_file_content(self, path):
874 """
872 """
875 Returns content of the file at the given `path`.
873 Returns content of the file at the given `path`.
876 """
874 """
877 raise NotImplementedError
875 raise NotImplementedError
878
876
879 def get_file_size(self, path):
877 def get_file_size(self, path):
880 """
878 """
881 Returns size of the file at the given `path`.
879 Returns size of the file at the given `path`.
882 """
880 """
883 raise NotImplementedError
881 raise NotImplementedError
884
882
885 def get_file_commit(self, path, pre_load=None):
883 def get_file_commit(self, path, pre_load=None):
886 """
884 """
887 Returns last commit of the file at the given `path`.
885 Returns last commit of the file at the given `path`.
888
886
889 :param pre_load: Optional. List of commit attributes to load.
887 :param pre_load: Optional. List of commit attributes to load.
890 """
888 """
891 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
889 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
892 if not commits:
890 if not commits:
893 raise RepositoryError(
891 raise RepositoryError(
894 'Failed to fetch history for path {}. '
892 'Failed to fetch history for path {}. '
895 'Please check if such path exists in your repository'.format(
893 'Please check if such path exists in your repository'.format(
896 path))
894 path))
897 return commits[0]
895 return commits[0]
898
896
899 def get_file_history(self, path, limit=None, pre_load=None):
897 def get_file_history(self, path, limit=None, pre_load=None):
900 """
898 """
901 Returns history of file as reversed list of :class:`BaseCommit`
899 Returns history of file as reversed list of :class:`BaseCommit`
902 objects for which file at given `path` has been modified.
900 objects for which file at given `path` has been modified.
903
901
904 :param limit: Optional. Allows to limit the size of the returned
902 :param limit: Optional. Allows to limit the size of the returned
905 history. This is intended as a hint to the underlying backend, so
903 history. This is intended as a hint to the underlying backend, so
906 that it can apply optimizations depending on the limit.
904 that it can apply optimizations depending on the limit.
907 :param pre_load: Optional. List of commit attributes to load.
905 :param pre_load: Optional. List of commit attributes to load.
908 """
906 """
909 raise NotImplementedError
907 raise NotImplementedError
910
908
911 def get_file_annotate(self, path, pre_load=None):
909 def get_file_annotate(self, path, pre_load=None):
912 """
910 """
913 Returns a generator of four element tuples with
911 Returns a generator of four element tuples with
914 lineno, sha, commit lazy loader and line
912 lineno, sha, commit lazy loader and line
915
913
916 :param pre_load: Optional. List of commit attributes to load.
914 :param pre_load: Optional. List of commit attributes to load.
917 """
915 """
918 raise NotImplementedError
916 raise NotImplementedError
919
917
920 def get_nodes(self, path):
918 def get_nodes(self, path):
921 """
919 """
922 Returns combined ``DirNode`` and ``FileNode`` objects list representing
920 Returns combined ``DirNode`` and ``FileNode`` objects list representing
923 state of commit at the given ``path``.
921 state of commit at the given ``path``.
924
922
925 :raises ``CommitError``: if node at the given ``path`` is not
923 :raises ``CommitError``: if node at the given ``path`` is not
926 instance of ``DirNode``
924 instance of ``DirNode``
927 """
925 """
928 raise NotImplementedError
926 raise NotImplementedError
929
927
930 def get_node(self, path):
928 def get_node(self, path):
931 """
929 """
932 Returns ``Node`` object from the given ``path``.
930 Returns ``Node`` object from the given ``path``.
933
931
934 :raises ``NodeDoesNotExistError``: if there is no node at the given
932 :raises ``NodeDoesNotExistError``: if there is no node at the given
935 ``path``
933 ``path``
936 """
934 """
937 raise NotImplementedError
935 raise NotImplementedError
938
936
939 def get_largefile_node(self, path):
937 def get_largefile_node(self, path):
940 """
938 """
941 Returns the path to largefile from Mercurial/Git-lfs storage.
939 Returns the path to largefile from Mercurial/Git-lfs storage.
942 or None if it's not a largefile node
940 or None if it's not a largefile node
943 """
941 """
944 return None
942 return None
945
943
946 def archive_repo(self, file_path, kind='tgz', subrepos=None,
944 def archive_repo(self, file_path, kind='tgz', subrepos=None,
947 prefix=None, write_metadata=False, mtime=None):
945 prefix=None, write_metadata=False, mtime=None):
948 """
946 """
949 Creates an archive containing the contents of the repository.
947 Creates an archive containing the contents of the repository.
950
948
951 :param file_path: path to the file which to create the archive.
949 :param file_path: path to the file which to create the archive.
952 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
950 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
953 :param prefix: name of root directory in archive.
951 :param prefix: name of root directory in archive.
954 Default is repository name and commit's short_id joined with dash:
952 Default is repository name and commit's short_id joined with dash:
955 ``"{repo_name}-{short_id}"``.
953 ``"{repo_name}-{short_id}"``.
956 :param write_metadata: write a metadata file into archive.
954 :param write_metadata: write a metadata file into archive.
957 :param mtime: custom modification time for archive creation, defaults
955 :param mtime: custom modification time for archive creation, defaults
958 to time.time() if not given.
956 to time.time() if not given.
959
957
960 :raise VCSError: If prefix has a problem.
958 :raise VCSError: If prefix has a problem.
961 """
959 """
962 allowed_kinds = settings.ARCHIVE_SPECS.keys()
960 allowed_kinds = settings.ARCHIVE_SPECS.keys()
963 if kind not in allowed_kinds:
961 if kind not in allowed_kinds:
964 raise ImproperArchiveTypeError(
962 raise ImproperArchiveTypeError(
965 'Archive kind (%s) not supported use one of %s' %
963 'Archive kind (%s) not supported use one of %s' %
966 (kind, allowed_kinds))
964 (kind, allowed_kinds))
967
965
968 prefix = self._validate_archive_prefix(prefix)
966 prefix = self._validate_archive_prefix(prefix)
969
967
970 mtime = mtime or time.mktime(self.date.timetuple())
968 mtime = mtime or time.mktime(self.date.timetuple())
971
969
972 file_info = []
970 file_info = []
973 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
971 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
974 for _r, _d, files in cur_rev.walk('/'):
972 for _r, _d, files in cur_rev.walk('/'):
975 for f in files:
973 for f in files:
976 f_path = os.path.join(prefix, f.path)
974 f_path = os.path.join(prefix, f.path)
977 file_info.append(
975 file_info.append(
978 (f_path, f.mode, f.is_link(), f.raw_bytes))
976 (f_path, f.mode, f.is_link(), f.raw_bytes))
979
977
980 if write_metadata:
978 if write_metadata:
981 metadata = [
979 metadata = [
982 ('repo_name', self.repository.name),
980 ('repo_name', self.repository.name),
983 ('rev', self.raw_id),
981 ('rev', self.raw_id),
984 ('create_time', mtime),
982 ('create_time', mtime),
985 ('branch', self.branch),
983 ('branch', self.branch),
986 ('tags', ','.join(self.tags)),
984 ('tags', ','.join(self.tags)),
987 ]
985 ]
988 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
986 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
989 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
987 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
990
988
991 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
989 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
992
990
993 def _validate_archive_prefix(self, prefix):
991 def _validate_archive_prefix(self, prefix):
994 if prefix is None:
992 if prefix is None:
995 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
993 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
996 repo_name=safe_str(self.repository.name),
994 repo_name=safe_str(self.repository.name),
997 short_id=self.short_id)
995 short_id=self.short_id)
998 elif not isinstance(prefix, str):
996 elif not isinstance(prefix, str):
999 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
997 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1000 elif prefix.startswith('/'):
998 elif prefix.startswith('/'):
1001 raise VCSError("Prefix cannot start with leading slash")
999 raise VCSError("Prefix cannot start with leading slash")
1002 elif prefix.strip() == '':
1000 elif prefix.strip() == '':
1003 raise VCSError("Prefix cannot be empty")
1001 raise VCSError("Prefix cannot be empty")
1004 return prefix
1002 return prefix
1005
1003
1006 @LazyProperty
1004 @LazyProperty
1007 def root(self):
1005 def root(self):
1008 """
1006 """
1009 Returns ``RootNode`` object for this commit.
1007 Returns ``RootNode`` object for this commit.
1010 """
1008 """
1011 return self.get_node('')
1009 return self.get_node('')
1012
1010
1013 def next(self, branch=None):
1011 def next(self, branch=None):
1014 """
1012 """
1015 Returns next commit from current, if branch is gives it will return
1013 Returns next commit from current, if branch is gives it will return
1016 next commit belonging to this branch
1014 next commit belonging to this branch
1017
1015
1018 :param branch: show commits within the given named branch
1016 :param branch: show commits within the given named branch
1019 """
1017 """
1020 indexes = xrange(self.idx + 1, self.repository.count())
1018 indexes = xrange(self.idx + 1, self.repository.count())
1021 return self._find_next(indexes, branch)
1019 return self._find_next(indexes, branch)
1022
1020
1023 def prev(self, branch=None):
1021 def prev(self, branch=None):
1024 """
1022 """
1025 Returns previous commit from current, if branch is gives it will
1023 Returns previous commit from current, if branch is gives it will
1026 return previous commit belonging to this branch
1024 return previous commit belonging to this branch
1027
1025
1028 :param branch: show commit within the given named branch
1026 :param branch: show commit within the given named branch
1029 """
1027 """
1030 indexes = xrange(self.idx - 1, -1, -1)
1028 indexes = xrange(self.idx - 1, -1, -1)
1031 return self._find_next(indexes, branch)
1029 return self._find_next(indexes, branch)
1032
1030
1033 def _find_next(self, indexes, branch=None):
1031 def _find_next(self, indexes, branch=None):
1034 if branch and self.branch != branch:
1032 if branch and self.branch != branch:
1035 raise VCSError('Branch option used on commit not belonging '
1033 raise VCSError('Branch option used on commit not belonging '
1036 'to that branch')
1034 'to that branch')
1037
1035
1038 for next_idx in indexes:
1036 for next_idx in indexes:
1039 commit = self.repository.get_commit(commit_idx=next_idx)
1037 commit = self.repository.get_commit(commit_idx=next_idx)
1040 if branch and branch != commit.branch:
1038 if branch and branch != commit.branch:
1041 continue
1039 continue
1042 return commit
1040 return commit
1043 raise CommitDoesNotExistError
1041 raise CommitDoesNotExistError
1044
1042
1045 def diff(self, ignore_whitespace=True, context=3):
1043 def diff(self, ignore_whitespace=True, context=3):
1046 """
1044 """
1047 Returns a `Diff` object representing the change made by this commit.
1045 Returns a `Diff` object representing the change made by this commit.
1048 """
1046 """
1049 parent = (
1047 parent = (
1050 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1048 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1051 diff = self.repository.get_diff(
1049 diff = self.repository.get_diff(
1052 parent, self,
1050 parent, self,
1053 ignore_whitespace=ignore_whitespace,
1051 ignore_whitespace=ignore_whitespace,
1054 context=context)
1052 context=context)
1055 return diff
1053 return diff
1056
1054
1057 @LazyProperty
1055 @LazyProperty
1058 def added(self):
1056 def added(self):
1059 """
1057 """
1060 Returns list of added ``FileNode`` objects.
1058 Returns list of added ``FileNode`` objects.
1061 """
1059 """
1062 raise NotImplementedError
1060 raise NotImplementedError
1063
1061
1064 @LazyProperty
1062 @LazyProperty
1065 def changed(self):
1063 def changed(self):
1066 """
1064 """
1067 Returns list of modified ``FileNode`` objects.
1065 Returns list of modified ``FileNode`` objects.
1068 """
1066 """
1069 raise NotImplementedError
1067 raise NotImplementedError
1070
1068
1071 @LazyProperty
1069 @LazyProperty
1072 def removed(self):
1070 def removed(self):
1073 """
1071 """
1074 Returns list of removed ``FileNode`` objects.
1072 Returns list of removed ``FileNode`` objects.
1075 """
1073 """
1076 raise NotImplementedError
1074 raise NotImplementedError
1077
1075
1078 @LazyProperty
1076 @LazyProperty
1079 def size(self):
1077 def size(self):
1080 """
1078 """
1081 Returns total number of bytes from contents of all filenodes.
1079 Returns total number of bytes from contents of all filenodes.
1082 """
1080 """
1083 return sum((node.size for node in self.get_filenodes_generator()))
1081 return sum((node.size for node in self.get_filenodes_generator()))
1084
1082
1085 def walk(self, topurl=''):
1083 def walk(self, topurl=''):
1086 """
1084 """
1087 Similar to os.walk method. Insted of filesystem it walks through
1085 Similar to os.walk method. Insted of filesystem it walks through
1088 commit starting at given ``topurl``. Returns generator of tuples
1086 commit starting at given ``topurl``. Returns generator of tuples
1089 (topnode, dirnodes, filenodes).
1087 (topnode, dirnodes, filenodes).
1090 """
1088 """
1091 topnode = self.get_node(topurl)
1089 topnode = self.get_node(topurl)
1092 if not topnode.is_dir():
1090 if not topnode.is_dir():
1093 return
1091 return
1094 yield (topnode, topnode.dirs, topnode.files)
1092 yield (topnode, topnode.dirs, topnode.files)
1095 for dirnode in topnode.dirs:
1093 for dirnode in topnode.dirs:
1096 for tup in self.walk(dirnode.path):
1094 for tup in self.walk(dirnode.path):
1097 yield tup
1095 yield tup
1098
1096
1099 def get_filenodes_generator(self):
1097 def get_filenodes_generator(self):
1100 """
1098 """
1101 Returns generator that yields *all* file nodes.
1099 Returns generator that yields *all* file nodes.
1102 """
1100 """
1103 for topnode, dirs, files in self.walk():
1101 for topnode, dirs, files in self.walk():
1104 for node in files:
1102 for node in files:
1105 yield node
1103 yield node
1106
1104
1107 #
1105 #
1108 # Utilities for sub classes to support consistent behavior
1106 # Utilities for sub classes to support consistent behavior
1109 #
1107 #
1110
1108
1111 def no_node_at_path(self, path):
1109 def no_node_at_path(self, path):
1112 return NodeDoesNotExistError(
1110 return NodeDoesNotExistError(
1113 u"There is no file nor directory at the given path: "
1111 u"There is no file nor directory at the given path: "
1114 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1112 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1115
1113
1116 def _fix_path(self, path):
1114 def _fix_path(self, path):
1117 """
1115 """
1118 Paths are stored without trailing slash so we need to get rid off it if
1116 Paths are stored without trailing slash so we need to get rid off it if
1119 needed.
1117 needed.
1120 """
1118 """
1121 return path.rstrip('/')
1119 return path.rstrip('/')
1122
1120
1123 #
1121 #
1124 # Deprecated API based on changesets
1122 # Deprecated API based on changesets
1125 #
1123 #
1126
1124
1127 @property
1125 @property
1128 def revision(self):
1126 def revision(self):
1129 warnings.warn("Use idx instead", DeprecationWarning)
1127 warnings.warn("Use idx instead", DeprecationWarning)
1130 return self.idx
1128 return self.idx
1131
1129
1132 @revision.setter
1130 @revision.setter
1133 def revision(self, value):
1131 def revision(self, value):
1134 warnings.warn("Use idx instead", DeprecationWarning)
1132 warnings.warn("Use idx instead", DeprecationWarning)
1135 self.idx = value
1133 self.idx = value
1136
1134
1137 def get_file_changeset(self, path):
1135 def get_file_changeset(self, path):
1138 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1136 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1139 return self.get_file_commit(path)
1137 return self.get_file_commit(path)
1140
1138
1141
1139
1142 class BaseChangesetClass(type):
1140 class BaseChangesetClass(type):
1143
1141
1144 def __instancecheck__(self, instance):
1142 def __instancecheck__(self, instance):
1145 return isinstance(instance, BaseCommit)
1143 return isinstance(instance, BaseCommit)
1146
1144
1147
1145
1148 class BaseChangeset(BaseCommit):
1146 class BaseChangeset(BaseCommit):
1149
1147
1150 __metaclass__ = BaseChangesetClass
1148 __metaclass__ = BaseChangesetClass
1151
1149
1152 def __new__(cls, *args, **kwargs):
1150 def __new__(cls, *args, **kwargs):
1153 warnings.warn(
1151 warnings.warn(
1154 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1152 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1155 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1153 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1156
1154
1157
1155
1158 class BaseInMemoryCommit(object):
1156 class BaseInMemoryCommit(object):
1159 """
1157 """
1160 Represents differences between repository's state (most recent head) and
1158 Represents differences between repository's state (most recent head) and
1161 changes made *in place*.
1159 changes made *in place*.
1162
1160
1163 **Attributes**
1161 **Attributes**
1164
1162
1165 ``repository``
1163 ``repository``
1166 repository object for this in-memory-commit
1164 repository object for this in-memory-commit
1167
1165
1168 ``added``
1166 ``added``
1169 list of ``FileNode`` objects marked as *added*
1167 list of ``FileNode`` objects marked as *added*
1170
1168
1171 ``changed``
1169 ``changed``
1172 list of ``FileNode`` objects marked as *changed*
1170 list of ``FileNode`` objects marked as *changed*
1173
1171
1174 ``removed``
1172 ``removed``
1175 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1173 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1176 *removed*
1174 *removed*
1177
1175
1178 ``parents``
1176 ``parents``
1179 list of :class:`BaseCommit` instances representing parents of
1177 list of :class:`BaseCommit` instances representing parents of
1180 in-memory commit. Should always be 2-element sequence.
1178 in-memory commit. Should always be 2-element sequence.
1181
1179
1182 """
1180 """
1183
1181
1184 def __init__(self, repository):
1182 def __init__(self, repository):
1185 self.repository = repository
1183 self.repository = repository
1186 self.added = []
1184 self.added = []
1187 self.changed = []
1185 self.changed = []
1188 self.removed = []
1186 self.removed = []
1189 self.parents = []
1187 self.parents = []
1190
1188
1191 def add(self, *filenodes):
1189 def add(self, *filenodes):
1192 """
1190 """
1193 Marks given ``FileNode`` objects as *to be committed*.
1191 Marks given ``FileNode`` objects as *to be committed*.
1194
1192
1195 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1193 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1196 latest commit
1194 latest commit
1197 :raises ``NodeAlreadyAddedError``: if node with same path is already
1195 :raises ``NodeAlreadyAddedError``: if node with same path is already
1198 marked as *added*
1196 marked as *added*
1199 """
1197 """
1200 # Check if not already marked as *added* first
1198 # Check if not already marked as *added* first
1201 for node in filenodes:
1199 for node in filenodes:
1202 if node.path in (n.path for n in self.added):
1200 if node.path in (n.path for n in self.added):
1203 raise NodeAlreadyAddedError(
1201 raise NodeAlreadyAddedError(
1204 "Such FileNode %s is already marked for addition"
1202 "Such FileNode %s is already marked for addition"
1205 % node.path)
1203 % node.path)
1206 for node in filenodes:
1204 for node in filenodes:
1207 self.added.append(node)
1205 self.added.append(node)
1208
1206
1209 def change(self, *filenodes):
1207 def change(self, *filenodes):
1210 """
1208 """
1211 Marks given ``FileNode`` objects to be *changed* in next commit.
1209 Marks given ``FileNode`` objects to be *changed* in next commit.
1212
1210
1213 :raises ``EmptyRepositoryError``: if there are no commits yet
1211 :raises ``EmptyRepositoryError``: if there are no commits yet
1214 :raises ``NodeAlreadyExistsError``: if node with same path is already
1212 :raises ``NodeAlreadyExistsError``: if node with same path is already
1215 marked to be *changed*
1213 marked to be *changed*
1216 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1214 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1217 marked to be *removed*
1215 marked to be *removed*
1218 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1216 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1219 commit
1217 commit
1220 :raises ``NodeNotChangedError``: if node hasn't really be changed
1218 :raises ``NodeNotChangedError``: if node hasn't really be changed
1221 """
1219 """
1222 for node in filenodes:
1220 for node in filenodes:
1223 if node.path in (n.path for n in self.removed):
1221 if node.path in (n.path for n in self.removed):
1224 raise NodeAlreadyRemovedError(
1222 raise NodeAlreadyRemovedError(
1225 "Node at %s is already marked as removed" % node.path)
1223 "Node at %s is already marked as removed" % node.path)
1226 try:
1224 try:
1227 self.repository.get_commit()
1225 self.repository.get_commit()
1228 except EmptyRepositoryError:
1226 except EmptyRepositoryError:
1229 raise EmptyRepositoryError(
1227 raise EmptyRepositoryError(
1230 "Nothing to change - try to *add* new nodes rather than "
1228 "Nothing to change - try to *add* new nodes rather than "
1231 "changing them")
1229 "changing them")
1232 for node in filenodes:
1230 for node in filenodes:
1233 if node.path in (n.path for n in self.changed):
1231 if node.path in (n.path for n in self.changed):
1234 raise NodeAlreadyChangedError(
1232 raise NodeAlreadyChangedError(
1235 "Node at '%s' is already marked as changed" % node.path)
1233 "Node at '%s' is already marked as changed" % node.path)
1236 self.changed.append(node)
1234 self.changed.append(node)
1237
1235
1238 def remove(self, *filenodes):
1236 def remove(self, *filenodes):
1239 """
1237 """
1240 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1238 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1241 *removed* in next commit.
1239 *removed* in next commit.
1242
1240
1243 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1241 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1244 be *removed*
1242 be *removed*
1245 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1243 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1246 be *changed*
1244 be *changed*
1247 """
1245 """
1248 for node in filenodes:
1246 for node in filenodes:
1249 if node.path in (n.path for n in self.removed):
1247 if node.path in (n.path for n in self.removed):
1250 raise NodeAlreadyRemovedError(
1248 raise NodeAlreadyRemovedError(
1251 "Node is already marked to for removal at %s" % node.path)
1249 "Node is already marked to for removal at %s" % node.path)
1252 if node.path in (n.path for n in self.changed):
1250 if node.path in (n.path for n in self.changed):
1253 raise NodeAlreadyChangedError(
1251 raise NodeAlreadyChangedError(
1254 "Node is already marked to be changed at %s" % node.path)
1252 "Node is already marked to be changed at %s" % node.path)
1255 # We only mark node as *removed* - real removal is done by
1253 # We only mark node as *removed* - real removal is done by
1256 # commit method
1254 # commit method
1257 self.removed.append(node)
1255 self.removed.append(node)
1258
1256
1259 def reset(self):
1257 def reset(self):
1260 """
1258 """
1261 Resets this instance to initial state (cleans ``added``, ``changed``
1259 Resets this instance to initial state (cleans ``added``, ``changed``
1262 and ``removed`` lists).
1260 and ``removed`` lists).
1263 """
1261 """
1264 self.added = []
1262 self.added = []
1265 self.changed = []
1263 self.changed = []
1266 self.removed = []
1264 self.removed = []
1267 self.parents = []
1265 self.parents = []
1268
1266
1269 def get_ipaths(self):
1267 def get_ipaths(self):
1270 """
1268 """
1271 Returns generator of paths from nodes marked as added, changed or
1269 Returns generator of paths from nodes marked as added, changed or
1272 removed.
1270 removed.
1273 """
1271 """
1274 for node in itertools.chain(self.added, self.changed, self.removed):
1272 for node in itertools.chain(self.added, self.changed, self.removed):
1275 yield node.path
1273 yield node.path
1276
1274
1277 def get_paths(self):
1275 def get_paths(self):
1278 """
1276 """
1279 Returns list of paths from nodes marked as added, changed or removed.
1277 Returns list of paths from nodes marked as added, changed or removed.
1280 """
1278 """
1281 return list(self.get_ipaths())
1279 return list(self.get_ipaths())
1282
1280
1283 def check_integrity(self, parents=None):
1281 def check_integrity(self, parents=None):
1284 """
1282 """
1285 Checks in-memory commit's integrity. Also, sets parents if not
1283 Checks in-memory commit's integrity. Also, sets parents if not
1286 already set.
1284 already set.
1287
1285
1288 :raises CommitError: if any error occurs (i.e.
1286 :raises CommitError: if any error occurs (i.e.
1289 ``NodeDoesNotExistError``).
1287 ``NodeDoesNotExistError``).
1290 """
1288 """
1291 if not self.parents:
1289 if not self.parents:
1292 parents = parents or []
1290 parents = parents or []
1293 if len(parents) == 0:
1291 if len(parents) == 0:
1294 try:
1292 try:
1295 parents = [self.repository.get_commit(), None]
1293 parents = [self.repository.get_commit(), None]
1296 except EmptyRepositoryError:
1294 except EmptyRepositoryError:
1297 parents = [None, None]
1295 parents = [None, None]
1298 elif len(parents) == 1:
1296 elif len(parents) == 1:
1299 parents += [None]
1297 parents += [None]
1300 self.parents = parents
1298 self.parents = parents
1301
1299
1302 # Local parents, only if not None
1300 # Local parents, only if not None
1303 parents = [p for p in self.parents if p]
1301 parents = [p for p in self.parents if p]
1304
1302
1305 # Check nodes marked as added
1303 # Check nodes marked as added
1306 for p in parents:
1304 for p in parents:
1307 for node in self.added:
1305 for node in self.added:
1308 try:
1306 try:
1309 p.get_node(node.path)
1307 p.get_node(node.path)
1310 except NodeDoesNotExistError:
1308 except NodeDoesNotExistError:
1311 pass
1309 pass
1312 else:
1310 else:
1313 raise NodeAlreadyExistsError(
1311 raise NodeAlreadyExistsError(
1314 "Node `%s` already exists at %s" % (node.path, p))
1312 "Node `%s` already exists at %s" % (node.path, p))
1315
1313
1316 # Check nodes marked as changed
1314 # Check nodes marked as changed
1317 missing = set(self.changed)
1315 missing = set(self.changed)
1318 not_changed = set(self.changed)
1316 not_changed = set(self.changed)
1319 if self.changed and not parents:
1317 if self.changed and not parents:
1320 raise NodeDoesNotExistError(str(self.changed[0].path))
1318 raise NodeDoesNotExistError(str(self.changed[0].path))
1321 for p in parents:
1319 for p in parents:
1322 for node in self.changed:
1320 for node in self.changed:
1323 try:
1321 try:
1324 old = p.get_node(node.path)
1322 old = p.get_node(node.path)
1325 missing.remove(node)
1323 missing.remove(node)
1326 # if content actually changed, remove node from not_changed
1324 # if content actually changed, remove node from not_changed
1327 if old.content != node.content:
1325 if old.content != node.content:
1328 not_changed.remove(node)
1326 not_changed.remove(node)
1329 except NodeDoesNotExistError:
1327 except NodeDoesNotExistError:
1330 pass
1328 pass
1331 if self.changed and missing:
1329 if self.changed and missing:
1332 raise NodeDoesNotExistError(
1330 raise NodeDoesNotExistError(
1333 "Node `%s` marked as modified but missing in parents: %s"
1331 "Node `%s` marked as modified but missing in parents: %s"
1334 % (node.path, parents))
1332 % (node.path, parents))
1335
1333
1336 if self.changed and not_changed:
1334 if self.changed and not_changed:
1337 raise NodeNotChangedError(
1335 raise NodeNotChangedError(
1338 "Node `%s` wasn't actually changed (parents: %s)"
1336 "Node `%s` wasn't actually changed (parents: %s)"
1339 % (not_changed.pop().path, parents))
1337 % (not_changed.pop().path, parents))
1340
1338
1341 # Check nodes marked as removed
1339 # Check nodes marked as removed
1342 if self.removed and not parents:
1340 if self.removed and not parents:
1343 raise NodeDoesNotExistError(
1341 raise NodeDoesNotExistError(
1344 "Cannot remove node at %s as there "
1342 "Cannot remove node at %s as there "
1345 "were no parents specified" % self.removed[0].path)
1343 "were no parents specified" % self.removed[0].path)
1346 really_removed = set()
1344 really_removed = set()
1347 for p in parents:
1345 for p in parents:
1348 for node in self.removed:
1346 for node in self.removed:
1349 try:
1347 try:
1350 p.get_node(node.path)
1348 p.get_node(node.path)
1351 really_removed.add(node)
1349 really_removed.add(node)
1352 except CommitError:
1350 except CommitError:
1353 pass
1351 pass
1354 not_removed = set(self.removed) - really_removed
1352 not_removed = set(self.removed) - really_removed
1355 if not_removed:
1353 if not_removed:
1356 # TODO: johbo: This code branch does not seem to be covered
1354 # TODO: johbo: This code branch does not seem to be covered
1357 raise NodeDoesNotExistError(
1355 raise NodeDoesNotExistError(
1358 "Cannot remove node at %s from "
1356 "Cannot remove node at %s from "
1359 "following parents: %s" % (not_removed, parents))
1357 "following parents: %s" % (not_removed, parents))
1360
1358
1361 def commit(
1359 def commit(
1362 self, message, author, parents=None, branch=None, date=None,
1360 self, message, author, parents=None, branch=None, date=None,
1363 **kwargs):
1361 **kwargs):
1364 """
1362 """
1365 Performs in-memory commit (doesn't check workdir in any way) and
1363 Performs in-memory commit (doesn't check workdir in any way) and
1366 returns newly created :class:`BaseCommit`. Updates repository's
1364 returns newly created :class:`BaseCommit`. Updates repository's
1367 attribute `commits`.
1365 attribute `commits`.
1368
1366
1369 .. note::
1367 .. note::
1370
1368
1371 While overriding this method each backend's should call
1369 While overriding this method each backend's should call
1372 ``self.check_integrity(parents)`` in the first place.
1370 ``self.check_integrity(parents)`` in the first place.
1373
1371
1374 :param message: message of the commit
1372 :param message: message of the commit
1375 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1373 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1376 :param parents: single parent or sequence of parents from which commit
1374 :param parents: single parent or sequence of parents from which commit
1377 would be derived
1375 would be derived
1378 :param date: ``datetime.datetime`` instance. Defaults to
1376 :param date: ``datetime.datetime`` instance. Defaults to
1379 ``datetime.datetime.now()``.
1377 ``datetime.datetime.now()``.
1380 :param branch: branch name, as string. If none given, default backend's
1378 :param branch: branch name, as string. If none given, default backend's
1381 branch would be used.
1379 branch would be used.
1382
1380
1383 :raises ``CommitError``: if any error occurs while committing
1381 :raises ``CommitError``: if any error occurs while committing
1384 """
1382 """
1385 raise NotImplementedError
1383 raise NotImplementedError
1386
1384
1387
1385
1388 class BaseInMemoryChangesetClass(type):
1386 class BaseInMemoryChangesetClass(type):
1389
1387
1390 def __instancecheck__(self, instance):
1388 def __instancecheck__(self, instance):
1391 return isinstance(instance, BaseInMemoryCommit)
1389 return isinstance(instance, BaseInMemoryCommit)
1392
1390
1393
1391
1394 class BaseInMemoryChangeset(BaseInMemoryCommit):
1392 class BaseInMemoryChangeset(BaseInMemoryCommit):
1395
1393
1396 __metaclass__ = BaseInMemoryChangesetClass
1394 __metaclass__ = BaseInMemoryChangesetClass
1397
1395
1398 def __new__(cls, *args, **kwargs):
1396 def __new__(cls, *args, **kwargs):
1399 warnings.warn(
1397 warnings.warn(
1400 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1398 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1401 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1399 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1402
1400
1403
1401
1404 class EmptyCommit(BaseCommit):
1402 class EmptyCommit(BaseCommit):
1405 """
1403 """
1406 An dummy empty commit. It's possible to pass hash when creating
1404 An dummy empty commit. It's possible to pass hash when creating
1407 an EmptyCommit
1405 an EmptyCommit
1408 """
1406 """
1409
1407
1410 def __init__(
1408 def __init__(
1411 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1409 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1412 message='', author='', date=None):
1410 message='', author='', date=None):
1413 self._empty_commit_id = commit_id
1411 self._empty_commit_id = commit_id
1414 # TODO: johbo: Solve idx parameter, default value does not make
1412 # TODO: johbo: Solve idx parameter, default value does not make
1415 # too much sense
1413 # too much sense
1416 self.idx = idx
1414 self.idx = idx
1417 self.message = message
1415 self.message = message
1418 self.author = author
1416 self.author = author
1419 self.date = date or datetime.datetime.fromtimestamp(0)
1417 self.date = date or datetime.datetime.fromtimestamp(0)
1420 self.repository = repo
1418 self.repository = repo
1421 self.alias = alias
1419 self.alias = alias
1422
1420
1423 @LazyProperty
1421 @LazyProperty
1424 def raw_id(self):
1422 def raw_id(self):
1425 """
1423 """
1426 Returns raw string identifying this commit, useful for web
1424 Returns raw string identifying this commit, useful for web
1427 representation.
1425 representation.
1428 """
1426 """
1429
1427
1430 return self._empty_commit_id
1428 return self._empty_commit_id
1431
1429
1432 @LazyProperty
1430 @LazyProperty
1433 def branch(self):
1431 def branch(self):
1434 if self.alias:
1432 if self.alias:
1435 from rhodecode.lib.vcs.backends import get_backend
1433 from rhodecode.lib.vcs.backends import get_backend
1436 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1434 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1437
1435
1438 @LazyProperty
1436 @LazyProperty
1439 def short_id(self):
1437 def short_id(self):
1440 return self.raw_id[:12]
1438 return self.raw_id[:12]
1441
1439
1442 @LazyProperty
1440 @LazyProperty
1443 def id(self):
1441 def id(self):
1444 return self.raw_id
1442 return self.raw_id
1445
1443
1446 def get_file_commit(self, path):
1444 def get_file_commit(self, path):
1447 return self
1445 return self
1448
1446
1449 def get_file_content(self, path):
1447 def get_file_content(self, path):
1450 return u''
1448 return u''
1451
1449
1452 def get_file_size(self, path):
1450 def get_file_size(self, path):
1453 return 0
1451 return 0
1454
1452
1455
1453
1456 class EmptyChangesetClass(type):
1454 class EmptyChangesetClass(type):
1457
1455
1458 def __instancecheck__(self, instance):
1456 def __instancecheck__(self, instance):
1459 return isinstance(instance, EmptyCommit)
1457 return isinstance(instance, EmptyCommit)
1460
1458
1461
1459
1462 class EmptyChangeset(EmptyCommit):
1460 class EmptyChangeset(EmptyCommit):
1463
1461
1464 __metaclass__ = EmptyChangesetClass
1462 __metaclass__ = EmptyChangesetClass
1465
1463
1466 def __new__(cls, *args, **kwargs):
1464 def __new__(cls, *args, **kwargs):
1467 warnings.warn(
1465 warnings.warn(
1468 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1466 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1469 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1467 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1470
1468
1471 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1469 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1472 alias=None, revision=-1, message='', author='', date=None):
1470 alias=None, revision=-1, message='', author='', date=None):
1473 if requested_revision is not None:
1471 if requested_revision is not None:
1474 warnings.warn(
1472 warnings.warn(
1475 "Parameter requested_revision not supported anymore",
1473 "Parameter requested_revision not supported anymore",
1476 DeprecationWarning)
1474 DeprecationWarning)
1477 super(EmptyChangeset, self).__init__(
1475 super(EmptyChangeset, self).__init__(
1478 commit_id=cs, repo=repo, alias=alias, idx=revision,
1476 commit_id=cs, repo=repo, alias=alias, idx=revision,
1479 message=message, author=author, date=date)
1477 message=message, author=author, date=date)
1480
1478
1481 @property
1479 @property
1482 def revision(self):
1480 def revision(self):
1483 warnings.warn("Use idx instead", DeprecationWarning)
1481 warnings.warn("Use idx instead", DeprecationWarning)
1484 return self.idx
1482 return self.idx
1485
1483
1486 @revision.setter
1484 @revision.setter
1487 def revision(self, value):
1485 def revision(self, value):
1488 warnings.warn("Use idx instead", DeprecationWarning)
1486 warnings.warn("Use idx instead", DeprecationWarning)
1489 self.idx = value
1487 self.idx = value
1490
1488
1491
1489
1492 class EmptyRepository(BaseRepository):
1490 class EmptyRepository(BaseRepository):
1493 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1491 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1494 pass
1492 pass
1495
1493
1496 def get_diff(self, *args, **kwargs):
1494 def get_diff(self, *args, **kwargs):
1497 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1495 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1498 return GitDiff('')
1496 return GitDiff('')
1499
1497
1500
1498
1501 class CollectionGenerator(object):
1499 class CollectionGenerator(object):
1502
1500
1503 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1501 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1504 self.repo = repo
1502 self.repo = repo
1505 self.commit_ids = commit_ids
1503 self.commit_ids = commit_ids
1506 # TODO: (oliver) this isn't currently hooked up
1504 # TODO: (oliver) this isn't currently hooked up
1507 self.collection_size = None
1505 self.collection_size = None
1508 self.pre_load = pre_load
1506 self.pre_load = pre_load
1509
1507
1510 def __len__(self):
1508 def __len__(self):
1511 if self.collection_size is not None:
1509 if self.collection_size is not None:
1512 return self.collection_size
1510 return self.collection_size
1513 return self.commit_ids.__len__()
1511 return self.commit_ids.__len__()
1514
1512
1515 def __iter__(self):
1513 def __iter__(self):
1516 for commit_id in self.commit_ids:
1514 for commit_id in self.commit_ids:
1517 # TODO: johbo: Mercurial passes in commit indices or commit ids
1515 # TODO: johbo: Mercurial passes in commit indices or commit ids
1518 yield self._commit_factory(commit_id)
1516 yield self._commit_factory(commit_id)
1519
1517
1520 def _commit_factory(self, commit_id):
1518 def _commit_factory(self, commit_id):
1521 """
1519 """
1522 Allows backends to override the way commits are generated.
1520 Allows backends to override the way commits are generated.
1523 """
1521 """
1524 return self.repo.get_commit(commit_id=commit_id,
1522 return self.repo.get_commit(commit_id=commit_id,
1525 pre_load=self.pre_load)
1523 pre_load=self.pre_load)
1526
1524
1527 def __getslice__(self, i, j):
1525 def __getslice__(self, i, j):
1528 """
1526 """
1529 Returns an iterator of sliced repository
1527 Returns an iterator of sliced repository
1530 """
1528 """
1531 commit_ids = self.commit_ids[i:j]
1529 commit_ids = self.commit_ids[i:j]
1532 return self.__class__(
1530 return self.__class__(
1533 self.repo, commit_ids, pre_load=self.pre_load)
1531 self.repo, commit_ids, pre_load=self.pre_load)
1534
1532
1535 def __repr__(self):
1533 def __repr__(self):
1536 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1534 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1537
1535
1538
1536
1539 class Config(object):
1537 class Config(object):
1540 """
1538 """
1541 Represents the configuration for a repository.
1539 Represents the configuration for a repository.
1542
1540
1543 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1541 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1544 standard library. It implements only the needed subset.
1542 standard library. It implements only the needed subset.
1545 """
1543 """
1546
1544
1547 def __init__(self):
1545 def __init__(self):
1548 self._values = {}
1546 self._values = {}
1549
1547
1550 def copy(self):
1548 def copy(self):
1551 clone = Config()
1549 clone = Config()
1552 for section, values in self._values.items():
1550 for section, values in self._values.items():
1553 clone._values[section] = values.copy()
1551 clone._values[section] = values.copy()
1554 return clone
1552 return clone
1555
1553
1556 def __repr__(self):
1554 def __repr__(self):
1557 return '<Config(%s sections) at %s>' % (
1555 return '<Config(%s sections) at %s>' % (
1558 len(self._values), hex(id(self)))
1556 len(self._values), hex(id(self)))
1559
1557
1560 def items(self, section):
1558 def items(self, section):
1561 return self._values.get(section, {}).iteritems()
1559 return self._values.get(section, {}).iteritems()
1562
1560
1563 def get(self, section, option):
1561 def get(self, section, option):
1564 return self._values.get(section, {}).get(option)
1562 return self._values.get(section, {}).get(option)
1565
1563
1566 def set(self, section, option, value):
1564 def set(self, section, option, value):
1567 section_values = self._values.setdefault(section, {})
1565 section_values = self._values.setdefault(section, {})
1568 section_values[option] = value
1566 section_values[option] = value
1569
1567
1570 def clear_section(self, section):
1568 def clear_section(self, section):
1571 self._values[section] = {}
1569 self._values[section] = {}
1572
1570
1573 def serialize(self):
1571 def serialize(self):
1574 """
1572 """
1575 Creates a list of three tuples (section, key, value) representing
1573 Creates a list of three tuples (section, key, value) representing
1576 this config object.
1574 this config object.
1577 """
1575 """
1578 items = []
1576 items = []
1579 for section in self._values:
1577 for section in self._values:
1580 for option, value in self._values[section].items():
1578 for option, value in self._values[section].items():
1581 items.append(
1579 items.append(
1582 (safe_str(section), safe_str(option), safe_str(value)))
1580 (safe_str(section), safe_str(option), safe_str(value)))
1583 return items
1581 return items
1584
1582
1585
1583
1586 class Diff(object):
1584 class Diff(object):
1587 """
1585 """
1588 Represents a diff result from a repository backend.
1586 Represents a diff result from a repository backend.
1589
1587
1590 Subclasses have to provide a backend specific value for
1588 Subclasses have to provide a backend specific value for
1591 :attr:`_header_re` and :attr:`_meta_re`.
1589 :attr:`_header_re` and :attr:`_meta_re`.
1592 """
1590 """
1593 _meta_re = None
1591 _meta_re = None
1594 _header_re = None
1592 _header_re = None
1595
1593
1596 def __init__(self, raw_diff):
1594 def __init__(self, raw_diff):
1597 self.raw = raw_diff
1595 self.raw = raw_diff
1598
1596
1599 def chunks(self):
1597 def chunks(self):
1600 """
1598 """
1601 split the diff in chunks of separate --git a/file b/file chunks
1599 split the diff in chunks of separate --git a/file b/file chunks
1602 to make diffs consistent we must prepend with \n, and make sure
1600 to make diffs consistent we must prepend with \n, and make sure
1603 we can detect last chunk as this was also has special rule
1601 we can detect last chunk as this was also has special rule
1604 """
1602 """
1605
1603
1606 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1604 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1607 header = diff_parts[0]
1605 header = diff_parts[0]
1608
1606
1609 if self._meta_re:
1607 if self._meta_re:
1610 match = self._meta_re.match(header)
1608 match = self._meta_re.match(header)
1611
1609
1612 chunks = diff_parts[1:]
1610 chunks = diff_parts[1:]
1613 total_chunks = len(chunks)
1611 total_chunks = len(chunks)
1614
1612
1615 return (
1613 return (
1616 DiffChunk(chunk, self, cur_chunk == total_chunks)
1614 DiffChunk(chunk, self, cur_chunk == total_chunks)
1617 for cur_chunk, chunk in enumerate(chunks, start=1))
1615 for cur_chunk, chunk in enumerate(chunks, start=1))
1618
1616
1619
1617
1620 class DiffChunk(object):
1618 class DiffChunk(object):
1621
1619
1622 def __init__(self, chunk, diff, last_chunk):
1620 def __init__(self, chunk, diff, last_chunk):
1623 self._diff = diff
1621 self._diff = diff
1624
1622
1625 # since we split by \ndiff --git that part is lost from original diff
1623 # since we split by \ndiff --git that part is lost from original diff
1626 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1624 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1627 if not last_chunk:
1625 if not last_chunk:
1628 chunk += '\n'
1626 chunk += '\n'
1629
1627
1630 match = self._diff._header_re.match(chunk)
1628 match = self._diff._header_re.match(chunk)
1631 self.header = match.groupdict()
1629 self.header = match.groupdict()
1632 self.diff = chunk[match.end():]
1630 self.diff = chunk[match.end():]
1633 self.raw = chunk
1631 self.raw = chunk
1634
1632
1635
1633
1636 class BasePathPermissionChecker(object):
1634 class BasePathPermissionChecker(object):
1637
1635
1638 @staticmethod
1636 @staticmethod
1639 def create_from_patterns(includes, excludes):
1637 def create_from_patterns(includes, excludes):
1640 if includes and '*' in includes and not excludes:
1638 if includes and '*' in includes and not excludes:
1641 return AllPathPermissionChecker()
1639 return AllPathPermissionChecker()
1642 elif excludes and '*' in excludes:
1640 elif excludes and '*' in excludes:
1643 return NonePathPermissionChecker()
1641 return NonePathPermissionChecker()
1644 else:
1642 else:
1645 return PatternPathPermissionChecker(includes, excludes)
1643 return PatternPathPermissionChecker(includes, excludes)
1646
1644
1647 @property
1645 @property
1648 def has_full_access(self):
1646 def has_full_access(self):
1649 raise NotImplemented()
1647 raise NotImplemented()
1650
1648
1651 def has_access(self, path):
1649 def has_access(self, path):
1652 raise NotImplemented()
1650 raise NotImplemented()
1653
1651
1654
1652
1655 class AllPathPermissionChecker(BasePathPermissionChecker):
1653 class AllPathPermissionChecker(BasePathPermissionChecker):
1656
1654
1657 @property
1655 @property
1658 def has_full_access(self):
1656 def has_full_access(self):
1659 return True
1657 return True
1660
1658
1661 def has_access(self, path):
1659 def has_access(self, path):
1662 return True
1660 return True
1663
1661
1664
1662
1665 class NonePathPermissionChecker(BasePathPermissionChecker):
1663 class NonePathPermissionChecker(BasePathPermissionChecker):
1666
1664
1667 @property
1665 @property
1668 def has_full_access(self):
1666 def has_full_access(self):
1669 return False
1667 return False
1670
1668
1671 def has_access(self, path):
1669 def has_access(self, path):
1672 return False
1670 return False
1673
1671
1674
1672
1675 class PatternPathPermissionChecker(BasePathPermissionChecker):
1673 class PatternPathPermissionChecker(BasePathPermissionChecker):
1676
1674
1677 def __init__(self, includes, excludes):
1675 def __init__(self, includes, excludes):
1678 self.includes = includes
1676 self.includes = includes
1679 self.excludes = excludes
1677 self.excludes = excludes
1680 self.includes_re = [] if not includes else [re.compile(fnmatch.translate(pattern)) for pattern in includes]
1678 self.includes_re = [] if not includes else [
1681 self.excludes_re = [] if not excludes else [re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1679 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1680 self.excludes_re = [] if not excludes else [
1681 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1682
1682
1683 @property
1683 @property
1684 def has_full_access(self):
1684 def has_full_access(self):
1685 return '*' in self.includes and not self.excludes
1685 return '*' in self.includes and not self.excludes
1686
1686
1687 def has_access(self, path):
1687 def has_access(self, path):
1688 for re in self.excludes_re:
1688 for regex in self.excludes_re:
1689 if re.match(path):
1689 if regex.match(path):
1690 return False
1690 return False
1691 for re in self.includes_re:
1691 for regex in self.includes_re:
1692 if re.match(path):
1692 if regex.match(path):
1693 return True
1693 return True
1694 return False No newline at end of file
1694 return False
@@ -1,545 +1,545 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from pyramid.compat import configparser
32
31
33 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
36 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
37 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49 _author_property = 'author'
49 _author_property = 'author'
50 _committer_property = 'committer'
50 _committer_property = 'committer'
51 _date_property = 'commit_time'
51 _date_property = 'commit_time'
52 _date_tz_property = 'commit_timezone'
52 _date_tz_property = 'commit_timezone'
53 _message_property = 'message'
53 _message_property = 'message'
54 _parents_property = 'parents'
54 _parents_property = 'parents'
55
55
56 _filter_pre_load = [
56 _filter_pre_load = [
57 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
58 "affected_files",
58 "affected_files",
59 # based on repository cached property
59 # based on repository cached property
60 "branch",
60 "branch",
61 # done through subprocess not remote call
61 # done through subprocess not remote call
62 "children",
62 "children",
63 # done through a more complex tree walk on parents
63 # done through a more complex tree walk on parents
64 "status",
64 "status",
65 # mercurial specific property not supported here
65 # mercurial specific property not supported here
66 "_file_paths",
66 "_file_paths",
67 # mercurial specific property not supported here
67 # mercurial specific property not supported here
68 'obsolete',
68 'obsolete',
69 # mercurial specific property not supported here
69 # mercurial specific property not supported here
70 'phase',
70 'phase',
71 # mercurial specific property not supported here
71 # mercurial specific property not supported here
72 'hidden'
72 'hidden'
73 ]
73 ]
74
74
75 def __init__(self, repository, raw_id, idx, pre_load=None):
75 def __init__(self, repository, raw_id, idx, pre_load=None):
76 self.repository = repository
76 self.repository = repository
77 self._remote = repository._remote
77 self._remote = repository._remote
78 # TODO: johbo: Tweak of raw_id should not be necessary
78 # TODO: johbo: Tweak of raw_id should not be necessary
79 self.raw_id = safe_str(raw_id)
79 self.raw_id = safe_str(raw_id)
80 self.idx = idx
80 self.idx = idx
81
81
82 self._set_bulk_properties(pre_load)
82 self._set_bulk_properties(pre_load)
83
83
84 # caches
84 # caches
85 self._stat_modes = {} # stat info for paths
85 self._stat_modes = {} # stat info for paths
86 self._paths = {} # path processed with parse_tree
86 self._paths = {} # path processed with parse_tree
87 self.nodes = {}
87 self.nodes = {}
88 self._submodules = None
88 self._submodules = None
89
89
90 def _set_bulk_properties(self, pre_load):
90 def _set_bulk_properties(self, pre_load):
91 if not pre_load:
91 if not pre_load:
92 return
92 return
93 pre_load = [entry for entry in pre_load
93 pre_load = [entry for entry in pre_load
94 if entry not in self._filter_pre_load]
94 if entry not in self._filter_pre_load]
95 if not pre_load:
95 if not pre_load:
96 return
96 return
97
97
98 result = self._remote.bulk_request(self.raw_id, pre_load)
98 result = self._remote.bulk_request(self.raw_id, pre_load)
99 for attr, value in result.items():
99 for attr, value in result.items():
100 if attr in ["author", "message"]:
100 if attr in ["author", "message"]:
101 if value:
101 if value:
102 value = safe_unicode(value)
102 value = safe_unicode(value)
103 elif attr == "date":
103 elif attr == "date":
104 value = utcdate_fromtimestamp(*value)
104 value = utcdate_fromtimestamp(*value)
105 elif attr == "parents":
105 elif attr == "parents":
106 value = self._make_commits(value)
106 value = self._make_commits(value)
107 self.__dict__[attr] = value
107 self.__dict__[attr] = value
108
108
109 @LazyProperty
109 @LazyProperty
110 def _commit(self):
110 def _commit(self):
111 return self._remote[self.raw_id]
111 return self._remote[self.raw_id]
112
112
113 @LazyProperty
113 @LazyProperty
114 def _tree_id(self):
114 def _tree_id(self):
115 return self._remote[self._commit['tree']]['id']
115 return self._remote[self._commit['tree']]['id']
116
116
117 @LazyProperty
117 @LazyProperty
118 def id(self):
118 def id(self):
119 return self.raw_id
119 return self.raw_id
120
120
121 @LazyProperty
121 @LazyProperty
122 def short_id(self):
122 def short_id(self):
123 return self.raw_id[:12]
123 return self.raw_id[:12]
124
124
125 @LazyProperty
125 @LazyProperty
126 def message(self):
126 def message(self):
127 return safe_unicode(
127 return safe_unicode(
128 self._remote.commit_attribute(self.id, self._message_property))
128 self._remote.commit_attribute(self.id, self._message_property))
129
129
130 @LazyProperty
130 @LazyProperty
131 def committer(self):
131 def committer(self):
132 return safe_unicode(
132 return safe_unicode(
133 self._remote.commit_attribute(self.id, self._committer_property))
133 self._remote.commit_attribute(self.id, self._committer_property))
134
134
135 @LazyProperty
135 @LazyProperty
136 def author(self):
136 def author(self):
137 return safe_unicode(
137 return safe_unicode(
138 self._remote.commit_attribute(self.id, self._author_property))
138 self._remote.commit_attribute(self.id, self._author_property))
139
139
140 @LazyProperty
140 @LazyProperty
141 def date(self):
141 def date(self):
142 unix_ts, tz = self._remote.get_object_attrs(
142 unix_ts, tz = self._remote.get_object_attrs(
143 self.raw_id, self._date_property, self._date_tz_property)
143 self.raw_id, self._date_property, self._date_tz_property)
144 return utcdate_fromtimestamp(unix_ts, tz)
144 return utcdate_fromtimestamp(unix_ts, tz)
145
145
146 @LazyProperty
146 @LazyProperty
147 def status(self):
147 def status(self):
148 """
148 """
149 Returns modified, added, removed, deleted files for current commit
149 Returns modified, added, removed, deleted files for current commit
150 """
150 """
151 return self.changed, self.added, self.removed
151 return self.changed, self.added, self.removed
152
152
153 @LazyProperty
153 @LazyProperty
154 def tags(self):
154 def tags(self):
155 tags = [safe_unicode(name) for name,
155 tags = [safe_unicode(name) for name,
156 commit_id in self.repository.tags.iteritems()
156 commit_id in self.repository.tags.iteritems()
157 if commit_id == self.raw_id]
157 if commit_id == self.raw_id]
158 return tags
158 return tags
159
159
160 @LazyProperty
160 @LazyProperty
161 def branch(self):
161 def branch(self):
162 for name, commit_id in self.repository.branches.iteritems():
162 for name, commit_id in self.repository.branches.iteritems():
163 if commit_id == self.raw_id:
163 if commit_id == self.raw_id:
164 return safe_unicode(name)
164 return safe_unicode(name)
165 return None
165 return None
166
166
167 def _get_id_for_path(self, path):
167 def _get_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 parts = path.split('/')
180 parts = path.split('/')
181 dirs, name = parts[:-1], parts[-1]
181 dirs, name = parts[:-1], parts[-1]
182 cur_dir = ''
182 cur_dir = ''
183
183
184 # initially extract things from root dir
184 # initially extract things from root dir
185 tree_items = self._remote.tree_items(tree_id)
185 tree_items = self._remote.tree_items(tree_id)
186 self._process_tree_items(tree_items, cur_dir)
186 self._process_tree_items(tree_items, cur_dir)
187
187
188 for dir in dirs:
188 for dir in dirs:
189 if cur_dir:
189 if cur_dir:
190 cur_dir = '/'.join((cur_dir, dir))
190 cur_dir = '/'.join((cur_dir, dir))
191 else:
191 else:
192 cur_dir = dir
192 cur_dir = dir
193 dir_id = None
193 dir_id = None
194 for item, stat_, id_, type_ in tree_items:
194 for item, stat_, id_, type_ in tree_items:
195 if item == dir:
195 if item == dir:
196 dir_id = id_
196 dir_id = id_
197 break
197 break
198 if dir_id:
198 if dir_id:
199 if type_ != "tree":
199 if type_ != "tree":
200 raise CommitError('%s is not a directory' % cur_dir)
200 raise CommitError('%s is not a directory' % cur_dir)
201 # update tree
201 # update tree
202 tree_items = self._remote.tree_items(dir_id)
202 tree_items = self._remote.tree_items(dir_id)
203 else:
203 else:
204 raise CommitError('%s have not been found' % cur_dir)
204 raise CommitError('%s have not been found' % cur_dir)
205
205
206 # cache all items from the given traversed tree
206 # cache all items from the given traversed tree
207 self._process_tree_items(tree_items, cur_dir)
207 self._process_tree_items(tree_items, cur_dir)
208
208
209 if path not in self._paths:
209 if path not in self._paths:
210 raise self.no_node_at_path(path)
210 raise self.no_node_at_path(path)
211
211
212 return self._paths[path]
212 return self._paths[path]
213
213
214 def _process_tree_items(self, items, cur_dir):
214 def _process_tree_items(self, items, cur_dir):
215 for item, stat_, id_, type_ in items:
215 for item, stat_, id_, type_ in items:
216 if cur_dir:
216 if cur_dir:
217 name = '/'.join((cur_dir, item))
217 name = '/'.join((cur_dir, item))
218 else:
218 else:
219 name = item
219 name = item
220 self._paths[name] = [id_, type_]
220 self._paths[name] = [id_, type_]
221 self._stat_modes[name] = stat_
221 self._stat_modes[name] = stat_
222
222
223 def _get_kind(self, path):
223 def _get_kind(self, path):
224 path_id, type_ = self._get_id_for_path(path)
224 path_id, type_ = self._get_id_for_path(path)
225 if type_ == 'blob':
225 if type_ == 'blob':
226 return NodeKind.FILE
226 return NodeKind.FILE
227 elif type_ == 'tree':
227 elif type_ == 'tree':
228 return NodeKind.DIR
228 return NodeKind.DIR
229 elif type == 'link':
229 elif type == 'link':
230 return NodeKind.SUBMODULE
230 return NodeKind.SUBMODULE
231 return None
231 return None
232
232
233 def _get_filectx(self, path):
233 def _get_filectx(self, path):
234 path = self._fix_path(path)
234 path = self._fix_path(path)
235 if self._get_kind(path) != NodeKind.FILE:
235 if self._get_kind(path) != NodeKind.FILE:
236 raise CommitError(
236 raise CommitError(
237 "File does not exist for commit %s at '%s'" %
237 "File does not exist for commit %s at '%s'" %
238 (self.raw_id, path))
238 (self.raw_id, path))
239 return path
239 return path
240
240
241 def _get_file_nodes(self):
241 def _get_file_nodes(self):
242 return chain(*(t[2] for t in self.walk()))
242 return chain(*(t[2] for t in self.walk()))
243
243
244 @LazyProperty
244 @LazyProperty
245 def parents(self):
245 def parents(self):
246 """
246 """
247 Returns list of parent commits.
247 Returns list of parent commits.
248 """
248 """
249 parent_ids = self._remote.commit_attribute(
249 parent_ids = self._remote.commit_attribute(
250 self.id, self._parents_property)
250 self.id, self._parents_property)
251 return self._make_commits(parent_ids)
251 return self._make_commits(parent_ids)
252
252
253 @LazyProperty
253 @LazyProperty
254 def children(self):
254 def children(self):
255 """
255 """
256 Returns list of child commits.
256 Returns list of child commits.
257 """
257 """
258 rev_filter = settings.GIT_REV_FILTER
258 rev_filter = settings.GIT_REV_FILTER
259 output, __ = self.repository.run_git_command(
259 output, __ = self.repository.run_git_command(
260 ['rev-list', '--children'] + rev_filter)
260 ['rev-list', '--children'] + rev_filter)
261
261
262 child_ids = []
262 child_ids = []
263 pat = re.compile(r'^%s' % self.raw_id)
263 pat = re.compile(r'^%s' % self.raw_id)
264 for l in output.splitlines():
264 for l in output.splitlines():
265 if pat.match(l):
265 if pat.match(l):
266 found_ids = l.split(' ')[1:]
266 found_ids = l.split(' ')[1:]
267 child_ids.extend(found_ids)
267 child_ids.extend(found_ids)
268 return self._make_commits(child_ids)
268 return self._make_commits(child_ids)
269
269
270 def _make_commits(self, commit_ids, pre_load=None):
270 def _make_commits(self, commit_ids, pre_load=None):
271 return [
271 return [
272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
273 for commit_id in commit_ids]
273 for commit_id in commit_ids]
274
274
275 def get_file_mode(self, path):
275 def get_file_mode(self, path):
276 """
276 """
277 Returns stat mode of the file at the given `path`.
277 Returns stat mode of the file at the given `path`.
278 """
278 """
279 path = safe_str(path)
279 path = safe_str(path)
280 # ensure path is traversed
280 # ensure path is traversed
281 self._get_id_for_path(path)
281 self._get_id_for_path(path)
282 return self._stat_modes[path]
282 return self._stat_modes[path]
283
283
284 def is_link(self, path):
284 def is_link(self, path):
285 return stat.S_ISLNK(self.get_file_mode(path))
285 return stat.S_ISLNK(self.get_file_mode(path))
286
286
287 def get_file_content(self, path):
287 def get_file_content(self, path):
288 """
288 """
289 Returns content of the file at given `path`.
289 Returns content of the file at given `path`.
290 """
290 """
291 id_, _ = self._get_id_for_path(path)
291 id_, _ = self._get_id_for_path(path)
292 return self._remote.blob_as_pretty_string(id_)
292 return self._remote.blob_as_pretty_string(id_)
293
293
294 def get_file_size(self, path):
294 def get_file_size(self, path):
295 """
295 """
296 Returns size of the file at given `path`.
296 Returns size of the file at given `path`.
297 """
297 """
298 id_, _ = self._get_id_for_path(path)
298 id_, _ = self._get_id_for_path(path)
299 return self._remote.blob_raw_length(id_)
299 return self._remote.blob_raw_length(id_)
300
300
301 def get_file_history(self, path, limit=None, pre_load=None):
301 def get_file_history(self, path, limit=None, pre_load=None):
302 """
302 """
303 Returns history of file as reversed list of `GitCommit` objects for
303 Returns history of file as reversed list of `GitCommit` objects for
304 which file at given `path` has been modified.
304 which file at given `path` has been modified.
305
305
306 TODO: This function now uses an underlying 'git' command which works
306 TODO: This function now uses an underlying 'git' command which works
307 quickly but ideally we should replace with an algorithm.
307 quickly but ideally we should replace with an algorithm.
308 """
308 """
309 self._get_filectx(path)
309 self._get_filectx(path)
310 f_path = safe_str(path)
310 f_path = safe_str(path)
311
311
312 cmd = ['log']
312 cmd = ['log']
313 if limit:
313 if limit:
314 cmd.extend(['-n', str(safe_int(limit, 0))])
314 cmd.extend(['-n', str(safe_int(limit, 0))])
315 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
315 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
316
316
317 output, __ = self.repository.run_git_command(cmd)
317 output, __ = self.repository.run_git_command(cmd)
318 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
318 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
319
319
320 return [
320 return [
321 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
321 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
322 for commit_id in commit_ids]
322 for commit_id in commit_ids]
323
323
324 # TODO: unused for now potential replacement for subprocess
324 # TODO: unused for now potential replacement for subprocess
325 def get_file_history_2(self, path, limit=None, pre_load=None):
325 def get_file_history_2(self, path, limit=None, pre_load=None):
326 """
326 """
327 Returns history of file as reversed list of `Commit` objects for
327 Returns history of file as reversed list of `Commit` objects for
328 which file at given `path` has been modified.
328 which file at given `path` has been modified.
329 """
329 """
330 self._get_filectx(path)
330 self._get_filectx(path)
331 f_path = safe_str(path)
331 f_path = safe_str(path)
332
332
333 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
333 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
334
334
335 return [
335 return [
336 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
336 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
337 for commit_id in commit_ids]
337 for commit_id in commit_ids]
338
338
339 def get_file_annotate(self, path, pre_load=None):
339 def get_file_annotate(self, path, pre_load=None):
340 """
340 """
341 Returns a generator of four element tuples with
341 Returns a generator of four element tuples with
342 lineno, commit_id, commit lazy loader and line
342 lineno, commit_id, commit lazy loader and line
343
343
344 TODO: This function now uses os underlying 'git' command which is
344 TODO: This function now uses os underlying 'git' command which is
345 generally not good. Should be replaced with algorithm iterating
345 generally not good. Should be replaced with algorithm iterating
346 commits.
346 commits.
347 """
347 """
348 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
348 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
349 # -l ==> outputs long shas (and we need all 40 characters)
349 # -l ==> outputs long shas (and we need all 40 characters)
350 # --root ==> doesn't put '^' character for bounderies
350 # --root ==> doesn't put '^' character for bounderies
351 # -r commit_id ==> blames for the given commit
351 # -r commit_id ==> blames for the given commit
352 output, __ = self.repository.run_git_command(cmd)
352 output, __ = self.repository.run_git_command(cmd)
353
353
354 for i, blame_line in enumerate(output.split('\n')[:-1]):
354 for i, blame_line in enumerate(output.split('\n')[:-1]):
355 line_no = i + 1
355 line_no = i + 1
356 commit_id, line = re.split(r' ', blame_line, 1)
356 commit_id, line = re.split(r' ', blame_line, 1)
357 yield (
357 yield (
358 line_no, commit_id,
358 line_no, commit_id,
359 lambda: self.repository.get_commit(commit_id=commit_id,
359 lambda: self.repository.get_commit(commit_id=commit_id,
360 pre_load=pre_load),
360 pre_load=pre_load),
361 line)
361 line)
362
362
363 def get_nodes(self, path):
363 def get_nodes(self, path):
364 if self._get_kind(path) != NodeKind.DIR:
364 if self._get_kind(path) != NodeKind.DIR:
365 raise CommitError(
365 raise CommitError(
366 "Directory does not exist for commit %s at "
366 "Directory does not exist for commit %s at "
367 " '%s'" % (self.raw_id, path))
367 " '%s'" % (self.raw_id, path))
368 path = self._fix_path(path)
368 path = self._fix_path(path)
369 id_, _ = self._get_id_for_path(path)
369 id_, _ = self._get_id_for_path(path)
370 tree_id = self._remote[id_]['id']
370 tree_id = self._remote[id_]['id']
371 dirnodes = []
371 dirnodes = []
372 filenodes = []
372 filenodes = []
373 alias = self.repository.alias
373 alias = self.repository.alias
374 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
374 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
375 if type_ == 'link':
375 if type_ == 'link':
376 url = self._get_submodule_url('/'.join((path, name)))
376 url = self._get_submodule_url('/'.join((path, name)))
377 dirnodes.append(SubModuleNode(
377 dirnodes.append(SubModuleNode(
378 name, url=url, commit=id_, alias=alias))
378 name, url=url, commit=id_, alias=alias))
379 continue
379 continue
380
380
381 if path != '':
381 if path != '':
382 obj_path = '/'.join((path, name))
382 obj_path = '/'.join((path, name))
383 else:
383 else:
384 obj_path = name
384 obj_path = name
385 if obj_path not in self._stat_modes:
385 if obj_path not in self._stat_modes:
386 self._stat_modes[obj_path] = stat_
386 self._stat_modes[obj_path] = stat_
387
387
388 if type_ == 'tree':
388 if type_ == 'tree':
389 dirnodes.append(DirNode(obj_path, commit=self))
389 dirnodes.append(DirNode(obj_path, commit=self))
390 elif type_ == 'blob':
390 elif type_ == 'blob':
391 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
391 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
392 else:
392 else:
393 raise CommitError(
393 raise CommitError(
394 "Requested object should be Tree or Blob, is %s", type_)
394 "Requested object should be Tree or Blob, is %s", type_)
395
395
396 nodes = dirnodes + filenodes
396 nodes = dirnodes + filenodes
397 for node in nodes:
397 for node in nodes:
398 if node.path not in self.nodes:
398 if node.path not in self.nodes:
399 self.nodes[node.path] = node
399 self.nodes[node.path] = node
400 nodes.sort()
400 nodes.sort()
401 return nodes
401 return nodes
402
402
403 def get_node(self, path, pre_load=None):
403 def get_node(self, path, pre_load=None):
404 if isinstance(path, unicode):
404 if isinstance(path, unicode):
405 path = path.encode('utf-8')
405 path = path.encode('utf-8')
406 path = self._fix_path(path)
406 path = self._fix_path(path)
407 if path not in self.nodes:
407 if path not in self.nodes:
408 try:
408 try:
409 id_, type_ = self._get_id_for_path(path)
409 id_, type_ = self._get_id_for_path(path)
410 except CommitError:
410 except CommitError:
411 raise NodeDoesNotExistError(
411 raise NodeDoesNotExistError(
412 "Cannot find one of parents' directories for a given "
412 "Cannot find one of parents' directories for a given "
413 "path: %s" % path)
413 "path: %s" % path)
414
414
415 if type_ == 'link':
415 if type_ == 'link':
416 url = self._get_submodule_url(path)
416 url = self._get_submodule_url(path)
417 node = SubModuleNode(path, url=url, commit=id_,
417 node = SubModuleNode(path, url=url, commit=id_,
418 alias=self.repository.alias)
418 alias=self.repository.alias)
419 elif type_ == 'tree':
419 elif type_ == 'tree':
420 if path == '':
420 if path == '':
421 node = RootNode(commit=self)
421 node = RootNode(commit=self)
422 else:
422 else:
423 node = DirNode(path, commit=self)
423 node = DirNode(path, commit=self)
424 elif type_ == 'blob':
424 elif type_ == 'blob':
425 node = FileNode(path, commit=self, pre_load=pre_load)
425 node = FileNode(path, commit=self, pre_load=pre_load)
426 else:
426 else:
427 raise self.no_node_at_path(path)
427 raise self.no_node_at_path(path)
428
428
429 # cache node
429 # cache node
430 self.nodes[path] = node
430 self.nodes[path] = node
431 return self.nodes[path]
431 return self.nodes[path]
432
432
433 def get_largefile_node(self, path):
433 def get_largefile_node(self, path):
434 id_, _ = self._get_id_for_path(path)
434 id_, _ = self._get_id_for_path(path)
435 pointer_spec = self._remote.is_large_file(id_)
435 pointer_spec = self._remote.is_large_file(id_)
436
436
437 if pointer_spec:
437 if pointer_spec:
438 # content of that file regular FileNode is the hash of largefile
438 # content of that file regular FileNode is the hash of largefile
439 file_id = pointer_spec.get('oid_hash')
439 file_id = pointer_spec.get('oid_hash')
440 if self._remote.in_largefiles_store(file_id):
440 if self._remote.in_largefiles_store(file_id):
441 lf_path = self._remote.store_path(file_id)
441 lf_path = self._remote.store_path(file_id)
442 return LargeFileNode(lf_path, commit=self, org_path=path)
442 return LargeFileNode(lf_path, commit=self, org_path=path)
443
443
444 @LazyProperty
444 @LazyProperty
445 def affected_files(self):
445 def affected_files(self):
446 """
446 """
447 Gets a fast accessible file changes for given commit
447 Gets a fast accessible file changes for given commit
448 """
448 """
449 added, modified, deleted = self._changes_cache
449 added, modified, deleted = self._changes_cache
450 return list(added.union(modified).union(deleted))
450 return list(added.union(modified).union(deleted))
451
451
452 @LazyProperty
452 @LazyProperty
453 def _changes_cache(self):
453 def _changes_cache(self):
454 added = set()
454 added = set()
455 modified = set()
455 modified = set()
456 deleted = set()
456 deleted = set()
457 _r = self._remote
457 _r = self._remote
458
458
459 parents = self.parents
459 parents = self.parents
460 if not self.parents:
460 if not self.parents:
461 parents = [base.EmptyCommit()]
461 parents = [base.EmptyCommit()]
462 for parent in parents:
462 for parent in parents:
463 if isinstance(parent, base.EmptyCommit):
463 if isinstance(parent, base.EmptyCommit):
464 oid = None
464 oid = None
465 else:
465 else:
466 oid = parent.raw_id
466 oid = parent.raw_id
467 changes = _r.tree_changes(oid, self.raw_id)
467 changes = _r.tree_changes(oid, self.raw_id)
468 for (oldpath, newpath), (_, _), (_, _) in changes:
468 for (oldpath, newpath), (_, _), (_, _) in changes:
469 if newpath and oldpath:
469 if newpath and oldpath:
470 modified.add(newpath)
470 modified.add(newpath)
471 elif newpath and not oldpath:
471 elif newpath and not oldpath:
472 added.add(newpath)
472 added.add(newpath)
473 elif not newpath and oldpath:
473 elif not newpath and oldpath:
474 deleted.add(oldpath)
474 deleted.add(oldpath)
475 return added, modified, deleted
475 return added, modified, deleted
476
476
477 def _get_paths_for_status(self, status):
477 def _get_paths_for_status(self, status):
478 """
478 """
479 Returns sorted list of paths for given ``status``.
479 Returns sorted list of paths for given ``status``.
480
480
481 :param status: one of: *added*, *modified* or *deleted*
481 :param status: one of: *added*, *modified* or *deleted*
482 """
482 """
483 added, modified, deleted = self._changes_cache
483 added, modified, deleted = self._changes_cache
484 return sorted({
484 return sorted({
485 'added': list(added),
485 'added': list(added),
486 'modified': list(modified),
486 'modified': list(modified),
487 'deleted': list(deleted)}[status]
487 'deleted': list(deleted)}[status]
488 )
488 )
489
489
490 @LazyProperty
490 @LazyProperty
491 def added(self):
491 def added(self):
492 """
492 """
493 Returns list of added ``FileNode`` objects.
493 Returns list of added ``FileNode`` objects.
494 """
494 """
495 if not self.parents:
495 if not self.parents:
496 return list(self._get_file_nodes())
496 return list(self._get_file_nodes())
497 return AddedFileNodesGenerator(
497 return AddedFileNodesGenerator(
498 [n for n in self._get_paths_for_status('added')], self)
498 [n for n in self._get_paths_for_status('added')], self)
499
499
500 @LazyProperty
500 @LazyProperty
501 def changed(self):
501 def changed(self):
502 """
502 """
503 Returns list of modified ``FileNode`` objects.
503 Returns list of modified ``FileNode`` objects.
504 """
504 """
505 if not self.parents:
505 if not self.parents:
506 return []
506 return []
507 return ChangedFileNodesGenerator(
507 return ChangedFileNodesGenerator(
508 [n for n in self._get_paths_for_status('modified')], self)
508 [n for n in self._get_paths_for_status('modified')], self)
509
509
510 @LazyProperty
510 @LazyProperty
511 def removed(self):
511 def removed(self):
512 """
512 """
513 Returns list of removed ``FileNode`` objects.
513 Returns list of removed ``FileNode`` objects.
514 """
514 """
515 if not self.parents:
515 if not self.parents:
516 return []
516 return []
517 return RemovedFileNodesGenerator(
517 return RemovedFileNodesGenerator(
518 [n for n in self._get_paths_for_status('deleted')], self)
518 [n for n in self._get_paths_for_status('deleted')], self)
519
519
520 def _get_submodule_url(self, submodule_path):
520 def _get_submodule_url(self, submodule_path):
521 git_modules_path = '.gitmodules'
521 git_modules_path = '.gitmodules'
522
522
523 if self._submodules is None:
523 if self._submodules is None:
524 self._submodules = {}
524 self._submodules = {}
525
525
526 try:
526 try:
527 submodules_node = self.get_node(git_modules_path)
527 submodules_node = self.get_node(git_modules_path)
528 except NodeDoesNotExistError:
528 except NodeDoesNotExistError:
529 return None
529 return None
530
530
531 content = submodules_node.content
531 content = submodules_node.content
532
532
533 # ConfigParser fails if there are whitespaces
533 # ConfigParser fails if there are whitespaces
534 content = '\n'.join(l.strip() for l in content.split('\n'))
534 content = '\n'.join(l.strip() for l in content.split('\n'))
535
535
536 parser = configparser.ConfigParser()
536 parser = configparser.ConfigParser()
537 parser.readfp(StringIO(content))
537 parser.readfp(StringIO(content))
538
538
539 for section in parser.sections():
539 for section in parser.sections():
540 path = parser.get(section, 'path')
540 path = parser.get(section, 'path')
541 url = parser.get(section, 'url')
541 url = parser.get(section, 'url')
542 if path and url:
542 if path and url:
543 self._submodules[path.strip('/')] = url
543 self._submodules[path.strip('/')] = url
544
544
545 return self._submodules.get(submodule_path.strip('/'))
545 return self._submodules.get(submodule_path.strip('/'))
@@ -1,929 +1,936 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import ConfigParser
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
28 import shutil
27 import shutil
29 import urllib
28 import urllib
30
29
31 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
32
31
33 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
36 date_astimestamp)
37 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
38 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
39 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 from rhodecode.lib.vcs.compat import configparser
48
47
49 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
50 nullid = "\0" * 20
49 nullid = "\0" * 20
51
50
52 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
53
52
54
53
55 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
56 """
55 """
57 Mercurial repository backend
56 Mercurial repository backend
58 """
57 """
59 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
60
59
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None):
61 update_after_clone=False, with_wire=None):
63 """
62 """
64 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
64 ``repo_path``.
66
65
67 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
69 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
71 :param update_after_clone=False: sets update of working copy after
73 making a clone
72 making a clone
74 """
73 """
75
74
76 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
79 # special requirements
78 # special requirements
80 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
82
81
83 self._remote = connection.Hg(
82 self._remote = connection.Hg(
84 self.path, self.config, with_wire=with_wire)
83 self.path, self.config, with_wire=with_wire)
85
84
86 self._init_repo(create, src_url, update_after_clone)
85 self._init_repo(create, src_url, update_after_clone)
87
86
88 # caches
87 # caches
89 self._commit_ids = {}
88 self._commit_ids = {}
90
89
91 @LazyProperty
90 @LazyProperty
92 def commit_ids(self):
91 def commit_ids(self):
93 """
92 """
94 Returns list of commit ids, in ascending order. Being lazy
93 Returns list of commit ids, in ascending order. Being lazy
95 attribute allows external tools to inject shas from cache.
94 attribute allows external tools to inject shas from cache.
96 """
95 """
97 commit_ids = self._get_all_commit_ids()
96 commit_ids = self._get_all_commit_ids()
98 self._rebuild_cache(commit_ids)
97 self._rebuild_cache(commit_ids)
99 return commit_ids
98 return commit_ids
100
99
101 def _rebuild_cache(self, commit_ids):
100 def _rebuild_cache(self, commit_ids):
102 self._commit_ids = dict((commit_id, index)
101 self._commit_ids = dict((commit_id, index)
103 for index, commit_id in enumerate(commit_ids))
102 for index, commit_id in enumerate(commit_ids))
104
103
105 @LazyProperty
104 @LazyProperty
106 def branches(self):
105 def branches(self):
107 return self._get_branches()
106 return self._get_branches()
108
107
109 @LazyProperty
108 @LazyProperty
110 def branches_closed(self):
109 def branches_closed(self):
111 return self._get_branches(active=False, closed=True)
110 return self._get_branches(active=False, closed=True)
112
111
113 @LazyProperty
112 @LazyProperty
114 def branches_all(self):
113 def branches_all(self):
115 all_branches = {}
114 all_branches = {}
116 all_branches.update(self.branches)
115 all_branches.update(self.branches)
117 all_branches.update(self.branches_closed)
116 all_branches.update(self.branches_closed)
118 return all_branches
117 return all_branches
119
118
120 def _get_branches(self, active=True, closed=False):
119 def _get_branches(self, active=True, closed=False):
121 """
120 """
122 Gets branches for this repository
121 Gets branches for this repository
123 Returns only not closed active branches by default
122 Returns only not closed active branches by default
124
123
125 :param active: return also active branches
124 :param active: return also active branches
126 :param closed: return also closed branches
125 :param closed: return also closed branches
127
126
128 """
127 """
129 if self.is_empty():
128 if self.is_empty():
130 return {}
129 return {}
131
130
132 def get_name(ctx):
131 def get_name(ctx):
133 return ctx[0]
132 return ctx[0]
134
133
135 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 self._remote.branches(active, closed).items()]
135 self._remote.branches(active, closed).items()]
137
136
138 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139
138
140 @LazyProperty
139 @LazyProperty
141 def tags(self):
140 def tags(self):
142 """
141 """
143 Gets tags for this repository
142 Gets tags for this repository
144 """
143 """
145 return self._get_tags()
144 return self._get_tags()
146
145
147 def _get_tags(self):
146 def _get_tags(self):
148 if self.is_empty():
147 if self.is_empty():
149 return {}
148 return {}
150
149
151 def get_name(ctx):
150 def get_name(ctx):
152 return ctx[0]
151 return ctx[0]
153
152
154 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 self._remote.tags().items()]
154 self._remote.tags().items()]
156
155
157 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158
157
159 def tag(self, name, user, commit_id=None, message=None, date=None,
158 def tag(self, name, user, commit_id=None, message=None, date=None,
160 **kwargs):
159 **kwargs):
161 """
160 """
162 Creates and returns a tag for the given ``commit_id``.
161 Creates and returns a tag for the given ``commit_id``.
163
162
164 :param name: name for new tag
163 :param name: name for new tag
165 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param commit_id: commit id for which new tag would be created
165 :param commit_id: commit id for which new tag would be created
167 :param message: message of the tag's commit
166 :param message: message of the tag's commit
168 :param date: date of tag's commit
167 :param date: date of tag's commit
169
168
170 :raises TagAlreadyExistError: if tag with same name already exists
169 :raises TagAlreadyExistError: if tag with same name already exists
171 """
170 """
172 if name in self.tags:
171 if name in self.tags:
173 raise TagAlreadyExistError("Tag %s already exists" % name)
172 raise TagAlreadyExistError("Tag %s already exists" % name)
174 commit = self.get_commit(commit_id=commit_id)
173 commit = self.get_commit(commit_id=commit_id)
175 local = kwargs.setdefault('local', False)
174 local = kwargs.setdefault('local', False)
176
175
177 if message is None:
176 if message is None:
178 message = "Added tag %s for commit %s" % (name, commit.short_id)
177 message = "Added tag %s for commit %s" % (name, commit.short_id)
179
178
180 date, tz = date_to_timestamp_plus_offset(date)
179 date, tz = date_to_timestamp_plus_offset(date)
181
180
182 self._remote.tag(
181 self._remote.tag(
183 name, commit.raw_id, message, local, user, date, tz)
182 name, commit.raw_id, message, local, user, date, tz)
184 self._remote.invalidate_vcs_cache()
183 self._remote.invalidate_vcs_cache()
185
184
186 # Reinitialize tags
185 # Reinitialize tags
187 self.tags = self._get_tags()
186 self.tags = self._get_tags()
188 tag_id = self.tags[name]
187 tag_id = self.tags[name]
189
188
190 return self.get_commit(commit_id=tag_id)
189 return self.get_commit(commit_id=tag_id)
191
190
192 def remove_tag(self, name, user, message=None, date=None):
191 def remove_tag(self, name, user, message=None, date=None):
193 """
192 """
194 Removes tag with the given `name`.
193 Removes tag with the given `name`.
195
194
196 :param name: name of the tag to be removed
195 :param name: name of the tag to be removed
197 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param message: message of the tag's removal commit
197 :param message: message of the tag's removal commit
199 :param date: date of tag's removal commit
198 :param date: date of tag's removal commit
200
199
201 :raises TagDoesNotExistError: if tag with given name does not exists
200 :raises TagDoesNotExistError: if tag with given name does not exists
202 """
201 """
203 if name not in self.tags:
202 if name not in self.tags:
204 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 if message is None:
204 if message is None:
206 message = "Removed tag %s" % name
205 message = "Removed tag %s" % name
207 local = False
206 local = False
208
207
209 date, tz = date_to_timestamp_plus_offset(date)
208 date, tz = date_to_timestamp_plus_offset(date)
210
209
211 self._remote.tag(name, nullid, message, local, user, date, tz)
210 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.invalidate_vcs_cache()
211 self._remote.invalidate_vcs_cache()
213 self.tags = self._get_tags()
212 self.tags = self._get_tags()
214
213
215 @LazyProperty
214 @LazyProperty
216 def bookmarks(self):
215 def bookmarks(self):
217 """
216 """
218 Gets bookmarks for this repository
217 Gets bookmarks for this repository
219 """
218 """
220 return self._get_bookmarks()
219 return self._get_bookmarks()
221
220
222 def _get_bookmarks(self):
221 def _get_bookmarks(self):
223 if self.is_empty():
222 if self.is_empty():
224 return {}
223 return {}
225
224
226 def get_name(ctx):
225 def get_name(ctx):
227 return ctx[0]
226 return ctx[0]
228
227
229 _bookmarks = [
228 _bookmarks = [
230 (safe_unicode(n), hexlify(h)) for n, h in
229 (safe_unicode(n), hexlify(h)) for n, h in
231 self._remote.bookmarks().items()]
230 self._remote.bookmarks().items()]
232
231
233 return OrderedDict(sorted(_bookmarks, key=get_name))
232 return OrderedDict(sorted(_bookmarks, key=get_name))
234
233
235 def _get_all_commit_ids(self):
234 def _get_all_commit_ids(self):
236 return self._remote.get_all_commit_ids('visible')
235 return self._remote.get_all_commit_ids('visible')
237
236
238 def get_diff(
237 def get_diff(
239 self, commit1, commit2, path='', ignore_whitespace=False,
238 self, commit1, commit2, path='', ignore_whitespace=False,
240 context=3, path1=None):
239 context=3, path1=None):
241 """
240 """
242 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 `commit2` since `commit1`.
242 `commit2` since `commit1`.
244
243
245 :param commit1: Entry point from which diff is shown. Can be
244 :param commit1: Entry point from which diff is shown. Can be
246 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 the changes since empty state of the repository until `commit2`
246 the changes since empty state of the repository until `commit2`
248 :param commit2: Until which commit changes should be shown.
247 :param commit2: Until which commit changes should be shown.
249 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 changes. Defaults to ``False``.
249 changes. Defaults to ``False``.
251 :param context: How many lines before/after changed lines should be
250 :param context: How many lines before/after changed lines should be
252 shown. Defaults to ``3``.
251 shown. Defaults to ``3``.
253 """
252 """
254 self._validate_diff_commits(commit1, commit2)
253 self._validate_diff_commits(commit1, commit2)
255 if path1 is not None and path1 != path:
254 if path1 is not None and path1 != path:
256 raise ValueError("Diff of two different paths not supported.")
255 raise ValueError("Diff of two different paths not supported.")
257
256
258 if path:
257 if path:
259 file_filter = [self.path, path]
258 file_filter = [self.path, path]
260 else:
259 else:
261 file_filter = None
260 file_filter = None
262
261
263 diff = self._remote.diff(
262 diff = self._remote.diff(
264 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 opt_git=True, opt_ignorews=ignore_whitespace,
264 opt_git=True, opt_ignorews=ignore_whitespace,
266 context=context)
265 context=context)
267 return MercurialDiff(diff)
266 return MercurialDiff(diff)
268
267
269 def strip(self, commit_id, branch=None):
268 def strip(self, commit_id, branch=None):
270 self._remote.strip(commit_id, update=False, backup="none")
269 self._remote.strip(commit_id, update=False, backup="none")
271
270
272 self._remote.invalidate_vcs_cache()
271 self._remote.invalidate_vcs_cache()
273 self.commit_ids = self._get_all_commit_ids()
272 self.commit_ids = self._get_all_commit_ids()
274 self._rebuild_cache(self.commit_ids)
273 self._rebuild_cache(self.commit_ids)
275
274
276 def verify(self):
275 def verify(self):
277 verify = self._remote.verify()
276 verify = self._remote.verify()
278
277
279 self._remote.invalidate_vcs_cache()
278 self._remote.invalidate_vcs_cache()
280 return verify
279 return verify
281
280
282 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 if commit_id1 == commit_id2:
282 if commit_id1 == commit_id2:
284 return commit_id1
283 return commit_id1
285
284
286 ancestors = self._remote.revs_from_revspec(
285 ancestors = self._remote.revs_from_revspec(
287 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 other_path=repo2.path)
287 other_path=repo2.path)
289 return repo2[ancestors[0]].raw_id if ancestors else None
288 return repo2[ancestors[0]].raw_id if ancestors else None
290
289
291 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 if commit_id1 == commit_id2:
291 if commit_id1 == commit_id2:
293 commits = []
292 commits = []
294 else:
293 else:
295 if merge:
294 if merge:
296 indexes = self._remote.revs_from_revspec(
295 indexes = self._remote.revs_from_revspec(
297 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 else:
298 else:
300 indexes = self._remote.revs_from_revspec(
299 indexes = self._remote.revs_from_revspec(
301 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 commit_id1, other_path=repo2.path)
301 commit_id1, other_path=repo2.path)
303
302
304 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 for idx in indexes]
304 for idx in indexes]
306
305
307 return commits
306 return commits
308
307
309 @staticmethod
308 @staticmethod
310 def check_url(url, config):
309 def check_url(url, config):
311 """
310 """
312 Function will check given url and try to verify if it's a valid
311 Function will check given url and try to verify if it's a valid
313 link. Sometimes it may happened that mercurial will issue basic
312 link. Sometimes it may happened that mercurial will issue basic
314 auth request that can cause whole API to hang when used from python
313 auth request that can cause whole API to hang when used from python
315 or other external calls.
314 or other external calls.
316
315
317 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 when the return code is non 200
317 when the return code is non 200
319 """
318 """
320 # check first if it's not an local url
319 # check first if it's not an local url
321 if os.path.isdir(url) or url.startswith('file:'):
320 if os.path.isdir(url) or url.startswith('file:'):
322 return True
321 return True
323
322
324 # Request the _remote to verify the url
323 # Request the _remote to verify the url
325 return connection.Hg.check_url(url, config.serialize())
324 return connection.Hg.check_url(url, config.serialize())
326
325
327 @staticmethod
326 @staticmethod
328 def is_valid_repository(path):
327 def is_valid_repository(path):
329 return os.path.isdir(os.path.join(path, '.hg'))
328 return os.path.isdir(os.path.join(path, '.hg'))
330
329
331 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 def _init_repo(self, create, src_url=None, update_after_clone=False):
332 """
331 """
333 Function will check for mercurial repository in given path. If there
332 Function will check for mercurial repository in given path. If there
334 is no repository in that path it will raise an exception unless
333 is no repository in that path it will raise an exception unless
335 `create` parameter is set to True - in that case repository would
334 `create` parameter is set to True - in that case repository would
336 be created.
335 be created.
337
336
338 If `src_url` is given, would try to clone repository from the
337 If `src_url` is given, would try to clone repository from the
339 location at given clone_point. Additionally it'll make update to
338 location at given clone_point. Additionally it'll make update to
340 working copy accordingly to `update_after_clone` flag.
339 working copy accordingly to `update_after_clone` flag.
341 """
340 """
342 if create and os.path.exists(self.path):
341 if create and os.path.exists(self.path):
343 raise RepositoryError(
342 raise RepositoryError(
344 "Cannot create repository at %s, location already exist"
343 "Cannot create repository at %s, location already exist"
345 % self.path)
344 % self.path)
346
345
347 if src_url:
346 if src_url:
348 url = str(self._get_url(src_url))
347 url = str(self._get_url(src_url))
349 MercurialRepository.check_url(url, self.config)
348 MercurialRepository.check_url(url, self.config)
350
349
351 self._remote.clone(url, self.path, update_after_clone)
350 self._remote.clone(url, self.path, update_after_clone)
352
351
353 # Don't try to create if we've already cloned repo
352 # Don't try to create if we've already cloned repo
354 create = False
353 create = False
355
354
356 if create:
355 if create:
357 os.makedirs(self.path, mode=0755)
356 os.makedirs(self.path, mode=0755)
358
357
359 self._remote.localrepository(create)
358 self._remote.localrepository(create)
360
359
361 @LazyProperty
360 @LazyProperty
362 def in_memory_commit(self):
361 def in_memory_commit(self):
363 return MercurialInMemoryCommit(self)
362 return MercurialInMemoryCommit(self)
364
363
365 @LazyProperty
364 @LazyProperty
366 def description(self):
365 def description(self):
367 description = self._remote.get_config_value(
366 description = self._remote.get_config_value(
368 'web', 'description', untrusted=True)
367 'web', 'description', untrusted=True)
369 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370
369
371 @LazyProperty
370 @LazyProperty
372 def contact(self):
371 def contact(self):
373 contact = (
372 contact = (
374 self._remote.get_config_value("web", "contact") or
373 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("ui", "username"))
374 self._remote.get_config_value("ui", "username"))
376 return safe_unicode(contact or self.DEFAULT_CONTACT)
375 return safe_unicode(contact or self.DEFAULT_CONTACT)
377
376
378 @LazyProperty
377 @LazyProperty
379 def last_change(self):
378 def last_change(self):
380 """
379 """
381 Returns last change made on this repository as
380 Returns last change made on this repository as
382 `datetime.datetime` object.
381 `datetime.datetime` object.
383 """
382 """
384 try:
383 try:
385 return self.get_commit().date
384 return self.get_commit().date
386 except RepositoryError:
385 except RepositoryError:
387 tzoffset = makedate()[1]
386 tzoffset = makedate()[1]
388 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389
388
390 def _get_fs_mtime(self):
389 def _get_fs_mtime(self):
391 # fallback to filesystem
390 # fallback to filesystem
392 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 st_path = os.path.join(self.path, '.hg', "store")
392 st_path = os.path.join(self.path, '.hg', "store")
394 if os.path.exists(cl_path):
393 if os.path.exists(cl_path):
395 return os.stat(cl_path).st_mtime
394 return os.stat(cl_path).st_mtime
396 else:
395 else:
397 return os.stat(st_path).st_mtime
396 return os.stat(st_path).st_mtime
398
397
399 def _sanitize_commit_idx(self, idx):
398 def _sanitize_commit_idx(self, idx):
400 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
399 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
401 # number. A `long` is treated in the correct way though. So we convert
400 # number. A `long` is treated in the correct way though. So we convert
402 # `int` to `long` here to make sure it is handled correctly.
401 # `int` to `long` here to make sure it is handled correctly.
403 if isinstance(idx, int):
402 if isinstance(idx, int):
404 return long(idx)
403 return long(idx)
405 return idx
404 return idx
406
405
407 def _get_url(self, url):
406 def _get_url(self, url):
408 """
407 """
409 Returns normalized url. If schema is not given, would fall
408 Returns normalized url. If schema is not given, would fall
410 to filesystem
409 to filesystem
411 (``file:///``) schema.
410 (``file:///``) schema.
412 """
411 """
413 url = url.encode('utf8')
412 url = url.encode('utf8')
414 if url != 'default' and '://' not in url:
413 if url != 'default' and '://' not in url:
415 url = "file:" + urllib.pathname2url(url)
414 url = "file:" + urllib.pathname2url(url)
416 return url
415 return url
417
416
418 def get_hook_location(self):
417 def get_hook_location(self):
419 """
418 """
420 returns absolute path to location where hooks are stored
419 returns absolute path to location where hooks are stored
421 """
420 """
422 return os.path.join(self.path, '.hg', '.hgrc')
421 return os.path.join(self.path, '.hg', '.hgrc')
423
422
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
425 """
424 """
426 Returns ``MercurialCommit`` object representing repository's
425 Returns ``MercurialCommit`` object representing repository's
427 commit at the given `commit_id` or `commit_idx`.
426 commit at the given `commit_id` or `commit_idx`.
428 """
427 """
429 if self.is_empty():
428 if self.is_empty():
430 raise EmptyRepositoryError("There are no commits yet")
429 raise EmptyRepositoryError("There are no commits yet")
431
430
432 if commit_id is not None:
431 if commit_id is not None:
433 self._validate_commit_id(commit_id)
432 self._validate_commit_id(commit_id)
434 try:
433 try:
435 idx = self._commit_ids[commit_id]
434 idx = self._commit_ids[commit_id]
436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
435 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
437 except KeyError:
436 except KeyError:
438 pass
437 pass
439 elif commit_idx is not None:
438 elif commit_idx is not None:
440 self._validate_commit_idx(commit_idx)
439 self._validate_commit_idx(commit_idx)
441 commit_idx = self._sanitize_commit_idx(commit_idx)
440 commit_idx = self._sanitize_commit_idx(commit_idx)
442 try:
441 try:
443 id_ = self.commit_ids[commit_idx]
442 id_ = self.commit_ids[commit_idx]
444 if commit_idx < 0:
443 if commit_idx < 0:
445 commit_idx += len(self.commit_ids)
444 commit_idx += len(self.commit_ids)
446 return MercurialCommit(
445 return MercurialCommit(
447 self, id_, commit_idx, pre_load=pre_load)
446 self, id_, commit_idx, pre_load=pre_load)
448 except IndexError:
447 except IndexError:
449 commit_id = commit_idx
448 commit_id = commit_idx
450 else:
449 else:
451 commit_id = "tip"
450 commit_id = "tip"
452
451
453 # TODO Paris: Ugly hack to "serialize" long for msgpack
452 # TODO Paris: Ugly hack to "serialize" long for msgpack
454 if isinstance(commit_id, long):
453 if isinstance(commit_id, long):
455 commit_id = float(commit_id)
454 commit_id = float(commit_id)
456
455
457 if isinstance(commit_id, unicode):
456 if isinstance(commit_id, unicode):
458 commit_id = safe_str(commit_id)
457 commit_id = safe_str(commit_id)
459
458
460 try:
459 try:
461 raw_id, idx = self._remote.lookup(commit_id, both=True)
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
462 except CommitDoesNotExistError:
461 except CommitDoesNotExistError:
463 msg = "Commit %s does not exist for %s" % (
462 msg = "Commit %s does not exist for %s" % (
464 commit_id, self)
463 commit_id, self)
465 raise CommitDoesNotExistError(msg)
464 raise CommitDoesNotExistError(msg)
466
465
467 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
468
467
469 def get_commits(
468 def get_commits(
470 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
471 branch_name=None, show_hidden=False, pre_load=None):
470 branch_name=None, show_hidden=False, pre_load=None):
472 """
471 """
473 Returns generator of ``MercurialCommit`` objects from start to end
472 Returns generator of ``MercurialCommit`` objects from start to end
474 (both are inclusive)
473 (both are inclusive)
475
474
476 :param start_id: None, str(commit_id)
475 :param start_id: None, str(commit_id)
477 :param end_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
478 :param start_date: if specified, commits with commit date less than
477 :param start_date: if specified, commits with commit date less than
479 ``start_date`` would be filtered out from returned set
478 ``start_date`` would be filtered out from returned set
480 :param end_date: if specified, commits with commit date greater than
479 :param end_date: if specified, commits with commit date greater than
481 ``end_date`` would be filtered out from returned set
480 ``end_date`` would be filtered out from returned set
482 :param branch_name: if specified, commits not reachable from given
481 :param branch_name: if specified, commits not reachable from given
483 branch would be filtered out from returned set
482 branch would be filtered out from returned set
484 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
485 Mercurial evolve
484 Mercurial evolve
486 :raise BranchDoesNotExistError: If given ``branch_name`` does not
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
487 exist.
486 exist.
488 :raise CommitDoesNotExistError: If commit for given ``start`` or
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
489 ``end`` could not be found.
488 ``end`` could not be found.
490 """
489 """
491 # actually we should check now if it's not an empty repo
490 # actually we should check now if it's not an empty repo
492 branch_ancestors = False
491 branch_ancestors = False
493 if self.is_empty():
492 if self.is_empty():
494 raise EmptyRepositoryError("There are no commits yet")
493 raise EmptyRepositoryError("There are no commits yet")
495 self._validate_branch_name(branch_name)
494 self._validate_branch_name(branch_name)
496
495
497 if start_id is not None:
496 if start_id is not None:
498 self._validate_commit_id(start_id)
497 self._validate_commit_id(start_id)
499 c_start = self.get_commit(commit_id=start_id)
498 c_start = self.get_commit(commit_id=start_id)
500 start_pos = self._commit_ids[c_start.raw_id]
499 start_pos = self._commit_ids[c_start.raw_id]
501 else:
500 else:
502 start_pos = None
501 start_pos = None
503
502
504 if end_id is not None:
503 if end_id is not None:
505 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
506 c_end = self.get_commit(commit_id=end_id)
505 c_end = self.get_commit(commit_id=end_id)
507 end_pos = max(0, self._commit_ids[c_end.raw_id])
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
508 else:
507 else:
509 end_pos = None
508 end_pos = None
510
509
511 if None not in [start_id, end_id] and start_pos > end_pos:
510 if None not in [start_id, end_id] and start_pos > end_pos:
512 raise RepositoryError(
511 raise RepositoryError(
513 "Start commit '%s' cannot be after end commit '%s'" %
512 "Start commit '%s' cannot be after end commit '%s'" %
514 (start_id, end_id))
513 (start_id, end_id))
515
514
516 if end_pos is not None:
515 if end_pos is not None:
517 end_pos += 1
516 end_pos += 1
518
517
519 commit_filter = []
518 commit_filter = []
520
519
521 if branch_name and not branch_ancestors:
520 if branch_name and not branch_ancestors:
522 commit_filter.append('branch("%s")' % (branch_name,))
521 commit_filter.append('branch("%s")' % (branch_name,))
523 elif branch_name and branch_ancestors:
522 elif branch_name and branch_ancestors:
524 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
525
524
526 if start_date and not end_date:
525 if start_date and not end_date:
527 commit_filter.append('date(">%s")' % (start_date,))
526 commit_filter.append('date(">%s")' % (start_date,))
528 if end_date and not start_date:
527 if end_date and not start_date:
529 commit_filter.append('date("<%s")' % (end_date,))
528 commit_filter.append('date("<%s")' % (end_date,))
530 if start_date and end_date:
529 if start_date and end_date:
531 commit_filter.append(
530 commit_filter.append(
532 'date(">%s") and date("<%s")' % (start_date, end_date))
531 'date(">%s") and date("<%s")' % (start_date, end_date))
533
532
534 if not show_hidden:
533 if not show_hidden:
535 commit_filter.append('not obsolete()')
534 commit_filter.append('not obsolete()')
536 commit_filter.append('not hidden()')
535 commit_filter.append('not hidden()')
537
536
538 # TODO: johbo: Figure out a simpler way for this solution
537 # TODO: johbo: Figure out a simpler way for this solution
539 collection_generator = CollectionGenerator
538 collection_generator = CollectionGenerator
540 if commit_filter:
539 if commit_filter:
541 commit_filter = ' and '.join(map(safe_str, commit_filter))
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
542 revisions = self._remote.rev_range([commit_filter])
541 revisions = self._remote.rev_range([commit_filter])
543 collection_generator = MercurialIndexBasedCollectionGenerator
542 collection_generator = MercurialIndexBasedCollectionGenerator
544 else:
543 else:
545 revisions = self.commit_ids
544 revisions = self.commit_ids
546
545
547 if start_pos or end_pos:
546 if start_pos or end_pos:
548 revisions = revisions[start_pos:end_pos]
547 revisions = revisions[start_pos:end_pos]
549
548
550 return collection_generator(self, revisions, pre_load=pre_load)
549 return collection_generator(self, revisions, pre_load=pre_load)
551
550
552 def pull(self, url, commit_ids=None):
551 def pull(self, url, commit_ids=None):
553 """
552 """
554 Tries to pull changes from external location.
553 Tries to pull changes from external location.
555
554
556 :param commit_ids: Optional. Can be set to a list of commit ids
555 :param commit_ids: Optional. Can be set to a list of commit ids
557 which shall be pulled from the other repository.
556 which shall be pulled from the other repository.
558 """
557 """
559 url = self._get_url(url)
558 url = self._get_url(url)
560 self._remote.pull(url, commit_ids=commit_ids)
559 self._remote.pull(url, commit_ids=commit_ids)
561 self._remote.invalidate_vcs_cache()
560 self._remote.invalidate_vcs_cache()
562
561
563 def push(self, url):
562 def push(self, url):
564 url = self._get_url(url)
563 url = self._get_url(url)
565 self._remote.sync_push(url)
564 self._remote.sync_push(url)
566
565
567 def _local_clone(self, clone_path):
566 def _local_clone(self, clone_path):
568 """
567 """
569 Create a local clone of the current repo.
568 Create a local clone of the current repo.
570 """
569 """
571 self._remote.clone(self.path, clone_path, update_after_clone=True,
570 self._remote.clone(self.path, clone_path, update_after_clone=True,
572 hooks=False)
571 hooks=False)
573
572
574 def _update(self, revision, clean=False):
573 def _update(self, revision, clean=False):
575 """
574 """
576 Update the working copy to the specified revision.
575 Update the working copy to the specified revision.
577 """
576 """
578 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
577 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
579 self._remote.update(revision, clean=clean)
578 self._remote.update(revision, clean=clean)
580
579
581 def _identify(self):
580 def _identify(self):
582 """
581 """
583 Return the current state of the working directory.
582 Return the current state of the working directory.
584 """
583 """
585 return self._remote.identify().strip().rstrip('+')
584 return self._remote.identify().strip().rstrip('+')
586
585
587 def _heads(self, branch=None):
586 def _heads(self, branch=None):
588 """
587 """
589 Return the commit ids of the repository heads.
588 Return the commit ids of the repository heads.
590 """
589 """
591 return self._remote.heads(branch=branch).strip().split(' ')
590 return self._remote.heads(branch=branch).strip().split(' ')
592
591
593 def _ancestor(self, revision1, revision2):
592 def _ancestor(self, revision1, revision2):
594 """
593 """
595 Return the common ancestor of the two revisions.
594 Return the common ancestor of the two revisions.
596 """
595 """
597 return self._remote.ancestor(revision1, revision2)
596 return self._remote.ancestor(revision1, revision2)
598
597
599 def _local_push(
598 def _local_push(
600 self, revision, repository_path, push_branches=False,
599 self, revision, repository_path, push_branches=False,
601 enable_hooks=False):
600 enable_hooks=False):
602 """
601 """
603 Push the given revision to the specified repository.
602 Push the given revision to the specified repository.
604
603
605 :param push_branches: allow to create branches in the target repo.
604 :param push_branches: allow to create branches in the target repo.
606 """
605 """
607 self._remote.push(
606 self._remote.push(
608 [revision], repository_path, hooks=enable_hooks,
607 [revision], repository_path, hooks=enable_hooks,
609 push_branches=push_branches)
608 push_branches=push_branches)
610
609
611 def _local_merge(self, target_ref, merge_message, user_name, user_email,
610 def _local_merge(self, target_ref, merge_message, user_name, user_email,
612 source_ref, use_rebase=False, dry_run=False):
611 source_ref, use_rebase=False, dry_run=False):
613 """
612 """
614 Merge the given source_revision into the checked out revision.
613 Merge the given source_revision into the checked out revision.
615
614
616 Returns the commit id of the merge and a boolean indicating if the
615 Returns the commit id of the merge and a boolean indicating if the
617 commit needs to be pushed.
616 commit needs to be pushed.
618 """
617 """
619 self._update(target_ref.commit_id)
618 self._update(target_ref.commit_id)
620
619
621 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
620 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
622 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
621 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
623
622
624 if ancestor == source_ref.commit_id:
623 if ancestor == source_ref.commit_id:
625 # Nothing to do, the changes were already integrated
624 # Nothing to do, the changes were already integrated
626 return target_ref.commit_id, False
625 return target_ref.commit_id, False
627
626
628 elif ancestor == target_ref.commit_id and is_the_same_branch:
627 elif ancestor == target_ref.commit_id and is_the_same_branch:
629 # In this case we should force a commit message
628 # In this case we should force a commit message
630 return source_ref.commit_id, True
629 return source_ref.commit_id, True
631
630
632 if use_rebase:
631 if use_rebase:
633 try:
632 try:
634 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
633 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
635 target_ref.commit_id)
634 target_ref.commit_id)
636 self.bookmark(bookmark_name, revision=source_ref.commit_id)
635 self.bookmark(bookmark_name, revision=source_ref.commit_id)
637 self._remote.rebase(
636 self._remote.rebase(
638 source=source_ref.commit_id, dest=target_ref.commit_id)
637 source=source_ref.commit_id, dest=target_ref.commit_id)
639 self._remote.invalidate_vcs_cache()
638 self._remote.invalidate_vcs_cache()
640 self._update(bookmark_name)
639 self._update(bookmark_name)
641 return self._identify(), True
640 return self._identify(), True
642 except RepositoryError:
641 except RepositoryError:
643 # The rebase-abort may raise another exception which 'hides'
642 # The rebase-abort may raise another exception which 'hides'
644 # the original one, therefore we log it here.
643 # the original one, therefore we log it here.
645 log.exception('Error while rebasing shadow repo during merge.')
644 log.exception('Error while rebasing shadow repo during merge.')
646
645
647 # Cleanup any rebase leftovers
646 # Cleanup any rebase leftovers
648 self._remote.invalidate_vcs_cache()
647 self._remote.invalidate_vcs_cache()
649 self._remote.rebase(abort=True)
648 self._remote.rebase(abort=True)
650 self._remote.invalidate_vcs_cache()
649 self._remote.invalidate_vcs_cache()
651 self._remote.update(clean=True)
650 self._remote.update(clean=True)
652 raise
651 raise
653 else:
652 else:
654 try:
653 try:
655 self._remote.merge(source_ref.commit_id)
654 self._remote.merge(source_ref.commit_id)
656 self._remote.invalidate_vcs_cache()
655 self._remote.invalidate_vcs_cache()
657 self._remote.commit(
656 self._remote.commit(
658 message=safe_str(merge_message),
657 message=safe_str(merge_message),
659 username=safe_str('%s <%s>' % (user_name, user_email)))
658 username=safe_str('%s <%s>' % (user_name, user_email)))
660 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
661 return self._identify(), True
660 return self._identify(), True
662 except RepositoryError:
661 except RepositoryError:
663 # Cleanup any merge leftovers
662 # Cleanup any merge leftovers
664 self._remote.update(clean=True)
663 self._remote.update(clean=True)
665 raise
664 raise
666
665
667 def _local_close(self, target_ref, user_name, user_email,
666 def _local_close(self, target_ref, user_name, user_email,
668 source_ref, close_message=''):
667 source_ref, close_message=''):
669 """
668 """
670 Close the branch of the given source_revision
669 Close the branch of the given source_revision
671
670
672 Returns the commit id of the close and a boolean indicating if the
671 Returns the commit id of the close and a boolean indicating if the
673 commit needs to be pushed.
672 commit needs to be pushed.
674 """
673 """
675 self._update(source_ref.commit_id)
674 self._update(source_ref.commit_id)
676 message = close_message or "Closing branch: `{}`".format(source_ref.name)
675 message = close_message or "Closing branch: `{}`".format(source_ref.name)
677 try:
676 try:
678 self._remote.commit(
677 self._remote.commit(
679 message=safe_str(message),
678 message=safe_str(message),
680 username=safe_str('%s <%s>' % (user_name, user_email)),
679 username=safe_str('%s <%s>' % (user_name, user_email)),
681 close_branch=True)
680 close_branch=True)
682 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
683 return self._identify(), True
682 return self._identify(), True
684 except RepositoryError:
683 except RepositoryError:
685 # Cleanup any commit leftovers
684 # Cleanup any commit leftovers
686 self._remote.update(clean=True)
685 self._remote.update(clean=True)
687 raise
686 raise
688
687
689 def _is_the_same_branch(self, target_ref, source_ref):
688 def _is_the_same_branch(self, target_ref, source_ref):
690 return (
689 return (
691 self._get_branch_name(target_ref) ==
690 self._get_branch_name(target_ref) ==
692 self._get_branch_name(source_ref))
691 self._get_branch_name(source_ref))
693
692
694 def _get_branch_name(self, ref):
693 def _get_branch_name(self, ref):
695 if ref.type == 'branch':
694 if ref.type == 'branch':
696 return ref.name
695 return ref.name
697 return self._remote.ctx_branch(ref.commit_id)
696 return self._remote.ctx_branch(ref.commit_id)
698
697
699 def _get_shadow_repository_path(self, workspace_id):
698 def _get_shadow_repository_path(self, workspace_id):
700 # The name of the shadow repository must start with '.', so it is
699 # The name of the shadow repository must start with '.', so it is
701 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
700 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
702 return os.path.join(
701 return os.path.join(
703 os.path.dirname(self.path),
702 os.path.dirname(self.path),
704 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
703 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
705
704
706 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
705 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
707 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
706 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
708 if not os.path.exists(shadow_repository_path):
707 if not os.path.exists(shadow_repository_path):
709 self._local_clone(shadow_repository_path)
708 self._local_clone(shadow_repository_path)
710 log.debug(
709 log.debug(
711 'Prepared shadow repository in %s', shadow_repository_path)
710 'Prepared shadow repository in %s', shadow_repository_path)
712
711
713 return shadow_repository_path
712 return shadow_repository_path
714
713
715 def cleanup_merge_workspace(self, workspace_id):
714 def cleanup_merge_workspace(self, workspace_id):
716 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
715 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
717 shutil.rmtree(shadow_repository_path, ignore_errors=True)
716 shutil.rmtree(shadow_repository_path, ignore_errors=True)
718
717
719 def _merge_repo(self, shadow_repository_path, target_ref,
718 def _merge_repo(self, shadow_repository_path, target_ref,
720 source_repo, source_ref, merge_message,
719 source_repo, source_ref, merge_message,
721 merger_name, merger_email, dry_run=False,
720 merger_name, merger_email, dry_run=False,
722 use_rebase=False, close_branch=False):
721 use_rebase=False, close_branch=False):
723
722
724 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
723 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
725 'rebase' if use_rebase else 'merge', dry_run)
724 'rebase' if use_rebase else 'merge', dry_run)
726 if target_ref.commit_id not in self._heads():
725 if target_ref.commit_id not in self._heads():
727 return MergeResponse(
726 return MergeResponse(
728 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
727 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
729
728
730 try:
729 try:
731 if (target_ref.type == 'branch' and
730 if (target_ref.type == 'branch' and
732 len(self._heads(target_ref.name)) != 1):
731 len(self._heads(target_ref.name)) != 1):
733 return MergeResponse(
732 return MergeResponse(
734 False, False, None,
733 False, False, None,
735 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
736 except CommitDoesNotExistError:
735 except CommitDoesNotExistError:
737 log.exception('Failure when looking up branch heads on hg target')
736 log.exception('Failure when looking up branch heads on hg target')
738 return MergeResponse(
737 return MergeResponse(
739 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
738 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
740
739
741 shadow_repo = self._get_shadow_instance(shadow_repository_path)
740 shadow_repo = self._get_shadow_instance(shadow_repository_path)
742
741
743 log.debug('Pulling in target reference %s', target_ref)
742 log.debug('Pulling in target reference %s', target_ref)
744 self._validate_pull_reference(target_ref)
743 self._validate_pull_reference(target_ref)
745 shadow_repo._local_pull(self.path, target_ref)
744 shadow_repo._local_pull(self.path, target_ref)
746 try:
745 try:
747 log.debug('Pulling in source reference %s', source_ref)
746 log.debug('Pulling in source reference %s', source_ref)
748 source_repo._validate_pull_reference(source_ref)
747 source_repo._validate_pull_reference(source_ref)
749 shadow_repo._local_pull(source_repo.path, source_ref)
748 shadow_repo._local_pull(source_repo.path, source_ref)
750 except CommitDoesNotExistError:
749 except CommitDoesNotExistError:
751 log.exception('Failure when doing local pull on hg shadow repo')
750 log.exception('Failure when doing local pull on hg shadow repo')
752 return MergeResponse(
751 return MergeResponse(
753 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
752 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
754
753
755 merge_ref = None
754 merge_ref = None
756 merge_commit_id = None
755 merge_commit_id = None
757 close_commit_id = None
756 close_commit_id = None
758 merge_failure_reason = MergeFailureReason.NONE
757 merge_failure_reason = MergeFailureReason.NONE
759
758
760 # enforce that close branch should be used only in case we source from
759 # enforce that close branch should be used only in case we source from
761 # an actual Branch
760 # an actual Branch
762 close_branch = close_branch and source_ref.type == 'branch'
761 close_branch = close_branch and source_ref.type == 'branch'
763
762
764 # don't allow to close branch if source and target are the same
763 # don't allow to close branch if source and target are the same
765 close_branch = close_branch and source_ref.name != target_ref.name
764 close_branch = close_branch and source_ref.name != target_ref.name
766
765
767 needs_push_on_close = False
766 needs_push_on_close = False
768 if close_branch and not use_rebase and not dry_run:
767 if close_branch and not use_rebase and not dry_run:
769 try:
768 try:
770 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
771 target_ref, merger_name, merger_email, source_ref)
770 target_ref, merger_name, merger_email, source_ref)
772 merge_possible = True
771 merge_possible = True
773 except RepositoryError:
772 except RepositoryError:
774 log.exception(
773 log.exception(
775 'Failure when doing close branch on hg shadow repo')
774 'Failure when doing close branch on hg shadow repo')
776 merge_possible = False
775 merge_possible = False
777 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
778 else:
777 else:
779 merge_possible = True
778 merge_possible = True
780
779
780 needs_push = False
781 if merge_possible:
781 if merge_possible:
782 try:
782 try:
783 merge_commit_id, needs_push = shadow_repo._local_merge(
783 merge_commit_id, needs_push = shadow_repo._local_merge(
784 target_ref, merge_message, merger_name, merger_email,
784 target_ref, merge_message, merger_name, merger_email,
785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
786 merge_possible = True
786 merge_possible = True
787
787
788 # read the state of the close action, if it
788 # read the state of the close action, if it
789 # maybe required a push
789 # maybe required a push
790 needs_push = needs_push or needs_push_on_close
790 needs_push = needs_push or needs_push_on_close
791
791
792 # Set a bookmark pointing to the merge commit. This bookmark
792 # Set a bookmark pointing to the merge commit. This bookmark
793 # may be used to easily identify the last successful merge
793 # may be used to easily identify the last successful merge
794 # commit in the shadow repository.
794 # commit in the shadow repository.
795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
797 except SubrepoMergeError:
797 except SubrepoMergeError:
798 log.exception(
798 log.exception(
799 'Subrepo merge error during local merge on hg shadow repo.')
799 'Subrepo merge error during local merge on hg shadow repo.')
800 merge_possible = False
800 merge_possible = False
801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
802 needs_push = False
802 needs_push = False
803 except RepositoryError:
803 except RepositoryError:
804 log.exception('Failure when doing local merge on hg shadow repo')
804 log.exception('Failure when doing local merge on hg shadow repo')
805 merge_possible = False
805 merge_possible = False
806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
807 needs_push = False
807 needs_push = False
808
808
809 if merge_possible and not dry_run:
809 if merge_possible and not dry_run:
810 if needs_push:
810 if needs_push:
811 # In case the target is a bookmark, update it, so after pushing
811 # In case the target is a bookmark, update it, so after pushing
812 # the bookmarks is also updated in the target.
812 # the bookmarks is also updated in the target.
813 if target_ref.type == 'book':
813 if target_ref.type == 'book':
814 shadow_repo.bookmark(
814 shadow_repo.bookmark(
815 target_ref.name, revision=merge_commit_id)
815 target_ref.name, revision=merge_commit_id)
816 try:
816 try:
817 shadow_repo_with_hooks = self._get_shadow_instance(
817 shadow_repo_with_hooks = self._get_shadow_instance(
818 shadow_repository_path,
818 shadow_repository_path,
819 enable_hooks=True)
819 enable_hooks=True)
820 # This is the actual merge action, we push from shadow
820 # This is the actual merge action, we push from shadow
821 # into origin.
821 # into origin.
822 # Note: the push_branches option will push any new branch
822 # Note: the push_branches option will push any new branch
823 # defined in the source repository to the target. This may
823 # defined in the source repository to the target. This may
824 # be dangerous as branches are permanent in Mercurial.
824 # be dangerous as branches are permanent in Mercurial.
825 # This feature was requested in issue #441.
825 # This feature was requested in issue #441.
826 shadow_repo_with_hooks._local_push(
826 shadow_repo_with_hooks._local_push(
827 merge_commit_id, self.path, push_branches=True,
827 merge_commit_id, self.path, push_branches=True,
828 enable_hooks=True)
828 enable_hooks=True)
829
829
830 # maybe we also need to push the close_commit_id
830 # maybe we also need to push the close_commit_id
831 if close_commit_id:
831 if close_commit_id:
832 shadow_repo_with_hooks._local_push(
832 shadow_repo_with_hooks._local_push(
833 close_commit_id, self.path, push_branches=True,
833 close_commit_id, self.path, push_branches=True,
834 enable_hooks=True)
834 enable_hooks=True)
835 merge_succeeded = True
835 merge_succeeded = True
836 except RepositoryError:
836 except RepositoryError:
837 log.exception(
837 log.exception(
838 'Failure when doing local push from the shadow '
838 'Failure when doing local push from the shadow '
839 'repository to the target repository.')
839 'repository to the target repository.')
840 merge_succeeded = False
840 merge_succeeded = False
841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
842 else:
842 else:
843 merge_succeeded = True
843 merge_succeeded = True
844 else:
844 else:
845 merge_succeeded = False
845 merge_succeeded = False
846
846
847 return MergeResponse(
847 return MergeResponse(
848 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
848 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
849
849
850 def _get_shadow_instance(
850 def _get_shadow_instance(
851 self, shadow_repository_path, enable_hooks=False):
851 self, shadow_repository_path, enable_hooks=False):
852 config = self.config.copy()
852 config = self.config.copy()
853 if not enable_hooks:
853 if not enable_hooks:
854 config.clear_section('hooks')
854 config.clear_section('hooks')
855 return MercurialRepository(shadow_repository_path, config)
855 return MercurialRepository(shadow_repository_path, config)
856
856
857 def _validate_pull_reference(self, reference):
857 def _validate_pull_reference(self, reference):
858 if not (reference.name in self.bookmarks or
858 if not (reference.name in self.bookmarks or
859 reference.name in self.branches or
859 reference.name in self.branches or
860 self.get_commit(reference.commit_id)):
860 self.get_commit(reference.commit_id)):
861 raise CommitDoesNotExistError(
861 raise CommitDoesNotExistError(
862 'Unknown branch, bookmark or commit id')
862 'Unknown branch, bookmark or commit id')
863
863
864 def _local_pull(self, repository_path, reference):
864 def _local_pull(self, repository_path, reference):
865 """
865 """
866 Fetch a branch, bookmark or commit from a local repository.
866 Fetch a branch, bookmark or commit from a local repository.
867 """
867 """
868 repository_path = os.path.abspath(repository_path)
868 repository_path = os.path.abspath(repository_path)
869 if repository_path == self.path:
869 if repository_path == self.path:
870 raise ValueError('Cannot pull from the same repository')
870 raise ValueError('Cannot pull from the same repository')
871
871
872 reference_type_to_option_name = {
872 reference_type_to_option_name = {
873 'book': 'bookmark',
873 'book': 'bookmark',
874 'branch': 'branch',
874 'branch': 'branch',
875 }
875 }
876 option_name = reference_type_to_option_name.get(
876 option_name = reference_type_to_option_name.get(
877 reference.type, 'revision')
877 reference.type, 'revision')
878
878
879 if option_name == 'revision':
879 if option_name == 'revision':
880 ref = reference.commit_id
880 ref = reference.commit_id
881 else:
881 else:
882 ref = reference.name
882 ref = reference.name
883
883
884 options = {option_name: [ref]}
884 options = {option_name: [ref]}
885 self._remote.pull_cmd(repository_path, hooks=False, **options)
885 self._remote.pull_cmd(repository_path, hooks=False, **options)
886 self._remote.invalidate_vcs_cache()
886 self._remote.invalidate_vcs_cache()
887
887
888 def bookmark(self, bookmark, revision=None):
888 def bookmark(self, bookmark, revision=None):
889 if isinstance(bookmark, unicode):
889 if isinstance(bookmark, unicode):
890 bookmark = safe_str(bookmark)
890 bookmark = safe_str(bookmark)
891 self._remote.bookmark(bookmark, revision=revision)
891 self._remote.bookmark(bookmark, revision=revision)
892 self._remote.invalidate_vcs_cache()
892 self._remote.invalidate_vcs_cache()
893
893
894 def get_path_permissions(self, username):
894 def get_path_permissions(self, username):
895 hgacl_file = self.path + '/.hg/hgacl'
895 hgacl_file = os.path.join(self.path, '.hg/hgacl')
896
897 def read_patterns(suffix):
898 svalue = None
899 try:
900 svalue = hgacl.get('narrowhgacl', username + suffix)
901 except configparser.NoOptionError:
902 try:
903 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
904 except configparser.NoOptionError:
905 pass
906 if not svalue:
907 return None
908 result = ['/']
909 for pattern in svalue.split():
910 result.append(pattern)
911 if '*' not in pattern and '?' not in pattern:
912 result.append(pattern + '/*')
913 return result
914
896 if os.path.exists(hgacl_file):
915 if os.path.exists(hgacl_file):
897 try:
916 try:
898 hgacl = ConfigParser.RawConfigParser()
917 hgacl = configparser.RawConfigParser()
899 hgacl.read(hgacl_file)
918 hgacl.read(hgacl_file)
900 def read_patterns(suffix):
919
901 svalue = None
902 try:
903 svalue = hgacl.get('narrowhgacl', username + suffix)
904 except ConfigParser.NoOptionError:
905 try:
906 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
907 except ConfigParser.NoOptionError:
908 pass
909 if not svalue:
910 return None
911 result = ['/']
912 for pattern in svalue.split():
913 result.append(pattern)
914 if '*' not in pattern and '?' not in pattern:
915 result.append(pattern + '/*')
916 return result
917 includes = read_patterns('.includes')
920 includes = read_patterns('.includes')
918 excludes = read_patterns('.excludes')
921 excludes = read_patterns('.excludes')
919 return BasePathPermissionChecker.create_from_patterns(includes, excludes)
922 return BasePathPermissionChecker.create_from_patterns(
923 includes, excludes)
920 except BaseException as e:
924 except BaseException as e:
921 raise exceptions.RepositoryRequirementError('Cannot read ACL settings for {}: {}'.format(self.name, e))
925 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
926 hgacl_file, self.name, e)
927 raise exceptions.RepositoryRequirementError(msg)
922 else:
928 else:
923 return None
929 return None
924
930
931
925 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
932 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
926
933
927 def _commit_factory(self, commit_id):
934 def _commit_factory(self, commit_id):
928 return self.repo.get_commit(
935 return self.repo.get_commit(
929 commit_idx=commit_id, pre_load=self.pre_load)
936 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now