##// END OF EJS Templates
code: code fixes and small adjustments
marcink -
r2623:0cd3ef36 default
parent child Browse files
Show More
@@ -0,0 +1,1 b''
1 from pyramid.compat import configparser No newline at end of file
@@ -1,625 +1,628 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 26
27 27 from rhodecode.lib import helpers as h, diffs
28 28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 30 from rhodecode.model import repo
31 31 from rhodecode.model import repo_group
32 32 from rhodecode.model import user_group
33 33 from rhodecode.model import user
34 34 from rhodecode.model.db import User
35 35 from rhodecode.model.scm import ScmModel
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 ADMIN_PREFIX = '/_admin'
41 41 STATIC_FILE_PREFIX = '/_static'
42 42
43 43 URL_NAME_REQUIREMENTS = {
44 44 # group name can have a slash in them, but they must not end with a slash
45 45 'group_name': r'.*?[^/]',
46 46 'repo_group_name': r'.*?[^/]',
47 47 # repo names can have a slash in them, but they must not end with a slash
48 48 'repo_name': r'.*?[^/]',
49 49 # file path eats up everything at the end
50 50 'f_path': r'.*',
51 51 # reference types
52 52 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
53 53 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
54 54 }
55 55
56 56
57 57 def add_route_with_slash(config,name, pattern, **kw):
58 58 config.add_route(name, pattern, **kw)
59 59 if not pattern.endswith('/'):
60 60 config.add_route(name + '_slash', pattern + '/', **kw)
61 61
62 62
63 63 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
64 64 """
65 65 Adds regex requirements to pyramid routes using a mapping dict
66 66 e.g::
67 67 add_route_requirements('{repo_name}/settings')
68 68 """
69 69 for key, regex in requirements.items():
70 70 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
71 71 return route_path
72 72
73 73
74 74 def get_format_ref_id(repo):
75 75 """Returns a `repo` specific reference formatter function"""
76 76 if h.is_svn(repo):
77 77 return _format_ref_id_svn
78 78 else:
79 79 return _format_ref_id
80 80
81 81
82 82 def _format_ref_id(name, raw_id):
83 83 """Default formatting of a given reference `name`"""
84 84 return name
85 85
86 86
87 87 def _format_ref_id_svn(name, raw_id):
88 88 """Special way of formatting a reference for Subversion including path"""
89 89 return '%s@%s' % (name, raw_id)
90 90
91 91
92 92 class TemplateArgs(StrictAttributeDict):
93 93 pass
94 94
95 95
96 96 class BaseAppView(object):
97 97
98 98 def __init__(self, context, request):
99 99 self.request = request
100 100 self.context = context
101 101 self.session = request.session
102 102 self._rhodecode_user = request.user # auth user
103 103 self._rhodecode_db_user = self._rhodecode_user.get_instance()
104 104 self._maybe_needs_password_change(
105 105 request.matched_route.name, self._rhodecode_db_user)
106 106
107 107 def _maybe_needs_password_change(self, view_name, user_obj):
108 108 log.debug('Checking if user %s needs password change on view %s',
109 109 user_obj, view_name)
110 110 skip_user_views = [
111 111 'logout', 'login',
112 112 'my_account_password', 'my_account_password_update'
113 113 ]
114 114
115 115 if not user_obj:
116 116 return
117 117
118 118 if user_obj.username == User.DEFAULT_USER:
119 119 return
120 120
121 121 now = time.time()
122 122 should_change = user_obj.user_data.get('force_password_change')
123 123 change_after = safe_int(should_change) or 0
124 124 if should_change and now > change_after:
125 125 log.debug('User %s requires password change', user_obj)
126 126 h.flash('You are required to change your password', 'warning',
127 127 ignore_duplicate=True)
128 128
129 129 if view_name not in skip_user_views:
130 130 raise HTTPFound(
131 131 self.request.route_path('my_account_password'))
132 132
133 133 def _log_creation_exception(self, e, repo_name):
134 134 _ = self.request.translate
135 135 reason = None
136 136 if len(e.args) == 2:
137 137 reason = e.args[1]
138 138
139 139 if reason == 'INVALID_CERTIFICATE':
140 140 log.exception(
141 141 'Exception creating a repository: invalid certificate')
142 142 msg = (_('Error creating repository %s: invalid certificate')
143 143 % repo_name)
144 144 else:
145 145 log.exception("Exception creating a repository")
146 146 msg = (_('Error creating repository %s')
147 147 % repo_name)
148 148 return msg
149 149
150 150 def _get_local_tmpl_context(self, include_app_defaults=True):
151 151 c = TemplateArgs()
152 152 c.auth_user = self.request.user
153 153 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
154 154 c.rhodecode_user = self.request.user
155 155
156 156 if include_app_defaults:
157 157 from rhodecode.lib.base import attach_context_attributes
158 158 attach_context_attributes(c, self.request, self.request.user.user_id)
159 159
160 160 return c
161 161
162 162 def _get_template_context(self, tmpl_args, **kwargs):
163 163
164 164 local_tmpl_args = {
165 165 'defaults': {},
166 166 'errors': {},
167 167 'c': tmpl_args
168 168 }
169 169 local_tmpl_args.update(kwargs)
170 170 return local_tmpl_args
171 171
172 172 def load_default_context(self):
173 173 """
174 174 example:
175 175
176 176 def load_default_context(self):
177 177 c = self._get_local_tmpl_context()
178 178 c.custom_var = 'foobar'
179 179
180 180 return c
181 181 """
182 182 raise NotImplementedError('Needs implementation in view class')
183 183
184 184
185 185 class RepoAppView(BaseAppView):
186 186
187 187 def __init__(self, context, request):
188 188 super(RepoAppView, self).__init__(context, request)
189 189 self.db_repo = request.db_repo
190 190 self.db_repo_name = self.db_repo.repo_name
191 191 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
192 192
193 193 def _handle_missing_requirements(self, error):
194 194 log.error(
195 195 'Requirements are missing for repository %s: %s',
196 196 self.db_repo_name, error.message)
197 197
198 198 def _get_local_tmpl_context(self, include_app_defaults=True):
199 199 _ = self.request.translate
200 200 c = super(RepoAppView, self)._get_local_tmpl_context(
201 201 include_app_defaults=include_app_defaults)
202 202
203 203 # register common vars for this type of view
204 204 c.rhodecode_db_repo = self.db_repo
205 205 c.repo_name = self.db_repo_name
206 206 c.repository_pull_requests = self.db_repo_pull_requests
207 207
208 208 c.repository_requirements_missing = False
209 209 try:
210 210 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
211 211 if self.rhodecode_vcs_repo:
212 self.path_filter = PathFilter(self.rhodecode_vcs_repo.get_path_permissions(c.auth_user.username))
212 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
213 c.auth_user.username)
214 self.path_filter = PathFilter(path_perms)
213 215 else:
214 216 self.path_filter = PathFilter(None)
215 217 except RepositoryRequirementError as e:
216 218 c.repository_requirements_missing = True
217 219 self._handle_missing_requirements(e)
218 220 self.rhodecode_vcs_repo = None
219 221 self.path_filter = None
220 222
221 c.path_filter = self.path_filter # used by atom_feed_entry.mako
223 c.path_filter = self.path_filter # used by atom_feed_entry.mako
222 224
223 225 if (not c.repository_requirements_missing
224 226 and self.rhodecode_vcs_repo is None):
225 227 # unable to fetch this repo as vcs instance, report back to user
226 228 h.flash(_(
227 229 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
228 230 "Please check if it exist, or is not damaged.") %
229 231 {'repo_name': c.repo_name},
230 232 category='error', ignore_duplicate=True)
231 233 raise HTTPFound(h.route_path('home'))
232 234
233 235 return c
234 236
235 237 def _get_f_path_unchecked(self, matchdict, default=None):
236 238 """
237 239 Should only be used by redirects, everything else should call _get_f_path
238 240 """
239 241 f_path = matchdict.get('f_path')
240 242 if f_path:
241 243 # fix for multiple initial slashes that causes errors for GIT
242 244 return f_path.lstrip('/')
243 245
244 246 return default
245 247
246 248 def _get_f_path(self, matchdict, default=None):
247 return self.path_filter.assert_path_permissions(self._get_f_path_unchecked(matchdict, default))
249 f_path_match = self._get_f_path_unchecked(matchdict, default)
250 return self.path_filter.assert_path_permissions(f_path_match)
248 251
249 252
250 253 class PathFilter(object):
251 254
252 255 # Expects and instance of BasePathPermissionChecker or None
253 256 def __init__(self, permission_checker):
254 257 self.permission_checker = permission_checker
255 258
256 259 def assert_path_permissions(self, path):
257 260 if path and self.permission_checker and not self.permission_checker.has_access(path):
258 261 raise HTTPForbidden()
259 262 return path
260 263
261 264 def filter_patchset(self, patchset):
262 265 if not self.permission_checker or not patchset:
263 266 return patchset, False
264 267 had_filtered = False
265 268 filtered_patchset = []
266 269 for patch in patchset:
267 270 filename = patch.get('filename', None)
268 271 if not filename or self.permission_checker.has_access(filename):
269 272 filtered_patchset.append(patch)
270 273 else:
271 274 had_filtered = True
272 275 if had_filtered:
273 276 if isinstance(patchset, diffs.LimitedDiffContainer):
274 277 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
275 278 return filtered_patchset, True
276 279 else:
277 280 return patchset, False
278 281
279 282 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
280 283 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
281 284 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
282 285 result.has_hidden_changes = has_hidden_changes
283 286 return result
284 287
285 288 def get_raw_patch(self, diff_processor):
286 289 if self.permission_checker is None:
287 290 return diff_processor.as_raw()
288 291 elif self.permission_checker.has_full_access:
289 292 return diff_processor.as_raw()
290 293 else:
291 294 return '# Repository has user-specific filters, raw patch generation is disabled.'
292 295
293 296 @property
294 297 def is_enabled(self):
295 298 return self.permission_checker is not None
296 299
297 300
298 301 class RepoGroupAppView(BaseAppView):
299 302 def __init__(self, context, request):
300 303 super(RepoGroupAppView, self).__init__(context, request)
301 304 self.db_repo_group = request.db_repo_group
302 305 self.db_repo_group_name = self.db_repo_group.group_name
303 306
304 307 def _revoke_perms_on_yourself(self, form_result):
305 308 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
306 309 form_result['perm_updates'])
307 310 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
308 311 form_result['perm_additions'])
309 312 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
310 313 form_result['perm_deletions'])
311 314 admin_perm = 'group.admin'
312 315 if _updates and _updates[0][1] != admin_perm or \
313 316 _additions and _additions[0][1] != admin_perm or \
314 317 _deletions and _deletions[0][1] != admin_perm:
315 318 return True
316 319 return False
317 320
318 321
319 322 class UserGroupAppView(BaseAppView):
320 323 def __init__(self, context, request):
321 324 super(UserGroupAppView, self).__init__(context, request)
322 325 self.db_user_group = request.db_user_group
323 326 self.db_user_group_name = self.db_user_group.users_group_name
324 327
325 328
326 329 class UserAppView(BaseAppView):
327 330 def __init__(self, context, request):
328 331 super(UserAppView, self).__init__(context, request)
329 332 self.db_user = request.db_user
330 333 self.db_user_id = self.db_user.user_id
331 334
332 335 _ = self.request.translate
333 336 if not request.db_user_supports_default:
334 337 if self.db_user.username == User.DEFAULT_USER:
335 338 h.flash(_("Editing user `{}` is disabled.".format(
336 339 User.DEFAULT_USER)), category='warning')
337 340 raise HTTPFound(h.route_path('users'))
338 341
339 342
340 343 class DataGridAppView(object):
341 344 """
342 345 Common class to have re-usable grid rendering components
343 346 """
344 347
345 348 def _extract_ordering(self, request, column_map=None):
346 349 column_map = column_map or {}
347 350 column_index = safe_int(request.GET.get('order[0][column]'))
348 351 order_dir = request.GET.get(
349 352 'order[0][dir]', 'desc')
350 353 order_by = request.GET.get(
351 354 'columns[%s][data][sort]' % column_index, 'name_raw')
352 355
353 356 # translate datatable to DB columns
354 357 order_by = column_map.get(order_by) or order_by
355 358
356 359 search_q = request.GET.get('search[value]')
357 360 return search_q, order_by, order_dir
358 361
359 362 def _extract_chunk(self, request):
360 363 start = safe_int(request.GET.get('start'), 0)
361 364 length = safe_int(request.GET.get('length'), 25)
362 365 draw = safe_int(request.GET.get('draw'))
363 366 return draw, start, length
364 367
365 368 def _get_order_col(self, order_by, model):
366 369 if isinstance(order_by, basestring):
367 370 try:
368 371 return operator.attrgetter(order_by)(model)
369 372 except AttributeError:
370 373 return None
371 374 else:
372 375 return order_by
373 376
374 377
375 378 class BaseReferencesView(RepoAppView):
376 379 """
377 380 Base for reference view for branches, tags and bookmarks.
378 381 """
379 382 def load_default_context(self):
380 383 c = self._get_local_tmpl_context()
381 384
382 385
383 386 return c
384 387
385 388 def load_refs_context(self, ref_items, partials_template):
386 389 _render = self.request.get_partial_renderer(partials_template)
387 390 pre_load = ["author", "date", "message"]
388 391
389 392 is_svn = h.is_svn(self.rhodecode_vcs_repo)
390 393 is_hg = h.is_hg(self.rhodecode_vcs_repo)
391 394
392 395 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
393 396
394 397 closed_refs = {}
395 398 if is_hg:
396 399 closed_refs = self.rhodecode_vcs_repo.branches_closed
397 400
398 401 data = []
399 402 for ref_name, commit_id in ref_items:
400 403 commit = self.rhodecode_vcs_repo.get_commit(
401 404 commit_id=commit_id, pre_load=pre_load)
402 405 closed = ref_name in closed_refs
403 406
404 407 # TODO: johbo: Unify generation of reference links
405 408 use_commit_id = '/' in ref_name or is_svn
406 409
407 410 if use_commit_id:
408 411 files_url = h.route_path(
409 412 'repo_files',
410 413 repo_name=self.db_repo_name,
411 414 f_path=ref_name if is_svn else '',
412 415 commit_id=commit_id)
413 416
414 417 else:
415 418 files_url = h.route_path(
416 419 'repo_files',
417 420 repo_name=self.db_repo_name,
418 421 f_path=ref_name if is_svn else '',
419 422 commit_id=ref_name,
420 423 _query=dict(at=ref_name))
421 424
422 425 data.append({
423 426 "name": _render('name', ref_name, files_url, closed),
424 427 "name_raw": ref_name,
425 428 "date": _render('date', commit.date),
426 429 "date_raw": datetime_to_time(commit.date),
427 430 "author": _render('author', commit.author),
428 431 "commit": _render(
429 432 'commit', commit.message, commit.raw_id, commit.idx),
430 433 "commit_raw": commit.idx,
431 434 "compare": _render(
432 435 'compare', format_ref_id(ref_name, commit.raw_id)),
433 436 })
434 437
435 438 return data
436 439
437 440
438 441 class RepoRoutePredicate(object):
439 442 def __init__(self, val, config):
440 443 self.val = val
441 444
442 445 def text(self):
443 446 return 'repo_route = %s' % self.val
444 447
445 448 phash = text
446 449
447 450 def __call__(self, info, request):
448 451
449 452 if hasattr(request, 'vcs_call'):
450 453 # skip vcs calls
451 454 return
452 455
453 456 repo_name = info['match']['repo_name']
454 457 repo_model = repo.RepoModel()
455 458 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
456 459
457 460 def redirect_if_creating(db_repo):
458 461 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
459 462 raise HTTPFound(
460 463 request.route_path('repo_creating',
461 464 repo_name=db_repo.repo_name))
462 465
463 466 if by_name_match:
464 467 # register this as request object we can re-use later
465 468 request.db_repo = by_name_match
466 469 redirect_if_creating(by_name_match)
467 470 return True
468 471
469 472 by_id_match = repo_model.get_repo_by_id(repo_name)
470 473 if by_id_match:
471 474 request.db_repo = by_id_match
472 475 redirect_if_creating(by_id_match)
473 476 return True
474 477
475 478 return False
476 479
477 480
478 481 class RepoTypeRoutePredicate(object):
479 482 def __init__(self, val, config):
480 483 self.val = val or ['hg', 'git', 'svn']
481 484
482 485 def text(self):
483 486 return 'repo_accepted_type = %s' % self.val
484 487
485 488 phash = text
486 489
487 490 def __call__(self, info, request):
488 491 if hasattr(request, 'vcs_call'):
489 492 # skip vcs calls
490 493 return
491 494
492 495 rhodecode_db_repo = request.db_repo
493 496
494 497 log.debug(
495 498 '%s checking repo type for %s in %s',
496 499 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
497 500
498 501 if rhodecode_db_repo.repo_type in self.val:
499 502 return True
500 503 else:
501 504 log.warning('Current view is not supported for repo type:%s',
502 505 rhodecode_db_repo.repo_type)
503 506 #
504 507 # h.flash(h.literal(
505 508 # _('Action not supported for %s.' % rhodecode_repo.alias)),
506 509 # category='warning')
507 510 # return redirect(
508 511 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
509 512
510 513 return False
511 514
512 515
513 516 class RepoGroupRoutePredicate(object):
514 517 def __init__(self, val, config):
515 518 self.val = val
516 519
517 520 def text(self):
518 521 return 'repo_group_route = %s' % self.val
519 522
520 523 phash = text
521 524
522 525 def __call__(self, info, request):
523 526 if hasattr(request, 'vcs_call'):
524 527 # skip vcs calls
525 528 return
526 529
527 530 repo_group_name = info['match']['repo_group_name']
528 531 repo_group_model = repo_group.RepoGroupModel()
529 532 by_name_match = repo_group_model.get_by_group_name(
530 533 repo_group_name, cache=True)
531 534
532 535 if by_name_match:
533 536 # register this as request object we can re-use later
534 537 request.db_repo_group = by_name_match
535 538 return True
536 539
537 540 return False
538 541
539 542
540 543 class UserGroupRoutePredicate(object):
541 544 def __init__(self, val, config):
542 545 self.val = val
543 546
544 547 def text(self):
545 548 return 'user_group_route = %s' % self.val
546 549
547 550 phash = text
548 551
549 552 def __call__(self, info, request):
550 553 if hasattr(request, 'vcs_call'):
551 554 # skip vcs calls
552 555 return
553 556
554 557 user_group_id = info['match']['user_group_id']
555 558 user_group_model = user_group.UserGroup()
556 559 by_id_match = user_group_model.get(
557 560 user_group_id, cache=True)
558 561
559 562 if by_id_match:
560 563 # register this as request object we can re-use later
561 564 request.db_user_group = by_id_match
562 565 return True
563 566
564 567 return False
565 568
566 569
567 570 class UserRoutePredicateBase(object):
568 571 supports_default = None
569 572
570 573 def __init__(self, val, config):
571 574 self.val = val
572 575
573 576 def text(self):
574 577 raise NotImplementedError()
575 578
576 579 def __call__(self, info, request):
577 580 if hasattr(request, 'vcs_call'):
578 581 # skip vcs calls
579 582 return
580 583
581 584 user_id = info['match']['user_id']
582 585 user_model = user.User()
583 586 by_id_match = user_model.get(
584 587 user_id, cache=True)
585 588
586 589 if by_id_match:
587 590 # register this as request object we can re-use later
588 591 request.db_user = by_id_match
589 592 request.db_user_supports_default = self.supports_default
590 593 return True
591 594
592 595 return False
593 596
594 597
595 598 class UserRoutePredicate(UserRoutePredicateBase):
596 599 supports_default = False
597 600
598 601 def text(self):
599 602 return 'user_route = %s' % self.val
600 603
601 604 phash = text
602 605
603 606
604 607 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
605 608 supports_default = True
606 609
607 610 def text(self):
608 611 return 'user_with_default_route = %s' % self.val
609 612
610 613 phash = text
611 614
612 615
613 616 def includeme(config):
614 617 config.add_route_predicate(
615 618 'repo_route', RepoRoutePredicate)
616 619 config.add_route_predicate(
617 620 'repo_accepted_types', RepoTypeRoutePredicate)
618 621 config.add_route_predicate(
619 622 'repo_group_route', RepoGroupRoutePredicate)
620 623 config.add_route_predicate(
621 624 'user_group_route', UserGroupRoutePredicate)
622 625 config.add_route_predicate(
623 626 'user_route_with_default', UserRouteWithDefaultPredicate)
624 627 config.add_route_predicate(
625 628 'user_route', UserRoutePredicate) No newline at end of file
@@ -1,1694 +1,1694 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.utils import author_name, author_email
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 46 RepositoryError)
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 FILEMODE_DEFAULT = 0100644
53 53 FILEMODE_EXECUTABLE = 0100755
54 54
55 55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 56 MergeResponse = collections.namedtuple(
57 57 'MergeResponse',
58 58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 59
60 60
61 61 class MergeFailureReason(object):
62 62 """
63 63 Enumeration with all the reasons why the server side merge could fail.
64 64
65 65 DO NOT change the number of the reasons, as they may be stored in the
66 66 database.
67 67
68 68 Changing the name of a reason is acceptable and encouraged to deprecate old
69 69 reasons.
70 70 """
71 71
72 72 # Everything went well.
73 73 NONE = 0
74 74
75 75 # An unexpected exception was raised. Check the logs for more details.
76 76 UNKNOWN = 1
77 77
78 78 # The merge was not successful, there are conflicts.
79 79 MERGE_FAILED = 2
80 80
81 81 # The merge succeeded but we could not push it to the target repository.
82 82 PUSH_FAILED = 3
83 83
84 84 # The specified target is not a head in the target repository.
85 85 TARGET_IS_NOT_HEAD = 4
86 86
87 87 # The source repository contains more branches than the target. Pushing
88 88 # the merge will create additional branches in the target.
89 89 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 90
91 91 # The target reference has multiple heads. That does not allow to correctly
92 92 # identify the target location. This could only happen for mercurial
93 93 # branches.
94 94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 95
96 96 # The target repository is locked
97 97 TARGET_IS_LOCKED = 7
98 98
99 99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 100 # A involved commit could not be found.
101 101 _DEPRECATED_MISSING_COMMIT = 8
102 102
103 103 # The target repo reference is missing.
104 104 MISSING_TARGET_REF = 9
105 105
106 106 # The source repo reference is missing.
107 107 MISSING_SOURCE_REF = 10
108 108
109 109 # The merge was not successful, there are conflicts related to sub
110 110 # repositories.
111 111 SUBREPO_MERGE_FAILED = 11
112 112
113 113
114 114 class UpdateFailureReason(object):
115 115 """
116 116 Enumeration with all the reasons why the pull request update could fail.
117 117
118 118 DO NOT change the number of the reasons, as they may be stored in the
119 119 database.
120 120
121 121 Changing the name of a reason is acceptable and encouraged to deprecate old
122 122 reasons.
123 123 """
124 124
125 125 # Everything went well.
126 126 NONE = 0
127 127
128 128 # An unexpected exception was raised. Check the logs for more details.
129 129 UNKNOWN = 1
130 130
131 131 # The pull request is up to date.
132 132 NO_CHANGE = 2
133 133
134 134 # The pull request has a reference type that is not supported for update.
135 135 WRONG_REF_TYPE = 3
136 136
137 137 # Update failed because the target reference is missing.
138 138 MISSING_TARGET_REF = 4
139 139
140 140 # Update failed because the source reference is missing.
141 141 MISSING_SOURCE_REF = 5
142 142
143 143
144 144 class BaseRepository(object):
145 145 """
146 146 Base Repository for final backends
147 147
148 148 .. attribute:: DEFAULT_BRANCH_NAME
149 149
150 150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 151
152 152 .. attribute:: commit_ids
153 153
154 154 list of all available commit ids, in ascending order
155 155
156 156 .. attribute:: path
157 157
158 158 absolute path to the repository
159 159
160 160 .. attribute:: bookmarks
161 161
162 162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 163 there are no bookmarks or the backend implementation does not support
164 164 bookmarks.
165 165
166 166 .. attribute:: tags
167 167
168 168 Mapping from name to :term:`Commit ID` of the tag.
169 169
170 170 """
171 171
172 172 DEFAULT_BRANCH_NAME = None
173 173 DEFAULT_CONTACT = u"Unknown"
174 174 DEFAULT_DESCRIPTION = u"unknown"
175 175 EMPTY_COMMIT_ID = '0' * 40
176 176
177 177 path = None
178 178
179 179 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 180 """
181 181 Initializes repository. Raises RepositoryError if repository could
182 182 not be find at the given ``repo_path`` or directory at ``repo_path``
183 183 exists and ``create`` is set to True.
184 184
185 185 :param repo_path: local path of the repository
186 186 :param config: repository configuration
187 187 :param create=False: if set to True, would try to create repository.
188 188 :param src_url=None: if set, should be proper url from which repository
189 189 would be cloned; requires ``create`` parameter to be set to True -
190 190 raises RepositoryError if src_url is set and create evaluates to
191 191 False
192 192 """
193 193 raise NotImplementedError
194 194
195 195 def __repr__(self):
196 196 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 197
198 198 def __len__(self):
199 199 return self.count()
200 200
201 201 def __eq__(self, other):
202 202 same_instance = isinstance(other, self.__class__)
203 203 return same_instance and other.path == self.path
204 204
205 205 def __ne__(self, other):
206 206 return not self.__eq__(other)
207 207
208 208 @classmethod
209 209 def get_default_config(cls, default=None):
210 210 config = Config()
211 211 if default and isinstance(default, list):
212 212 for section, key, val in default:
213 213 config.set(section, key, val)
214 214 return config
215 215
216 216 @LazyProperty
217 217 def EMPTY_COMMIT(self):
218 218 return EmptyCommit(self.EMPTY_COMMIT_ID)
219 219
220 220 @LazyProperty
221 221 def alias(self):
222 222 for k, v in settings.BACKENDS.items():
223 223 if v.split('.')[-1] == str(self.__class__.__name__):
224 224 return k
225 225
226 226 @LazyProperty
227 227 def name(self):
228 228 return safe_unicode(os.path.basename(self.path))
229 229
230 230 @LazyProperty
231 231 def description(self):
232 232 raise NotImplementedError
233 233
234 234 def refs(self):
235 235 """
236 236 returns a `dict` with branches, bookmarks, tags, and closed_branches
237 237 for this repository
238 238 """
239 239 return dict(
240 240 branches=self.branches,
241 241 branches_closed=self.branches_closed,
242 242 tags=self.tags,
243 243 bookmarks=self.bookmarks
244 244 )
245 245
246 246 @LazyProperty
247 247 def branches(self):
248 248 """
249 249 A `dict` which maps branch names to commit ids.
250 250 """
251 251 raise NotImplementedError
252 252
253 253 @LazyProperty
254 254 def branches_closed(self):
255 255 """
256 256 A `dict` which maps tags names to commit ids.
257 257 """
258 258 raise NotImplementedError
259 259
260 260 @LazyProperty
261 261 def bookmarks(self):
262 262 """
263 263 A `dict` which maps tags names to commit ids.
264 264 """
265 265 raise NotImplementedError
266 266
267 267 @LazyProperty
268 268 def tags(self):
269 269 """
270 270 A `dict` which maps tags names to commit ids.
271 271 """
272 272 raise NotImplementedError
273 273
274 274 @LazyProperty
275 275 def size(self):
276 276 """
277 277 Returns combined size in bytes for all repository files
278 278 """
279 279 tip = self.get_commit()
280 280 return tip.size
281 281
282 282 def size_at_commit(self, commit_id):
283 283 commit = self.get_commit(commit_id)
284 284 return commit.size
285 285
286 286 def is_empty(self):
287 287 return not bool(self.commit_ids)
288 288
289 289 @staticmethod
290 290 def check_url(url, config):
291 291 """
292 292 Function will check given url and try to verify if it's a valid
293 293 link.
294 294 """
295 295 raise NotImplementedError
296 296
297 297 @staticmethod
298 298 def is_valid_repository(path):
299 299 """
300 300 Check if given `path` contains a valid repository of this backend
301 301 """
302 302 raise NotImplementedError
303 303
304 304 # ==========================================================================
305 305 # COMMITS
306 306 # ==========================================================================
307 307
308 308 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
309 309 """
310 310 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
311 311 are both None, most recent commit is returned.
312 312
313 313 :param pre_load: Optional. List of commit attributes to load.
314 314
315 315 :raises ``EmptyRepositoryError``: if there are no commits
316 316 """
317 317 raise NotImplementedError
318 318
319 319 def __iter__(self):
320 320 for commit_id in self.commit_ids:
321 321 yield self.get_commit(commit_id=commit_id)
322 322
323 323 def get_commits(
324 324 self, start_id=None, end_id=None, start_date=None, end_date=None,
325 325 branch_name=None, show_hidden=False, pre_load=None):
326 326 """
327 327 Returns iterator of `BaseCommit` objects from start to end
328 328 not inclusive. This should behave just like a list, ie. end is not
329 329 inclusive.
330 330
331 331 :param start_id: None or str, must be a valid commit id
332 332 :param end_id: None or str, must be a valid commit id
333 333 :param start_date:
334 334 :param end_date:
335 335 :param branch_name:
336 336 :param show_hidden:
337 337 :param pre_load:
338 338 """
339 339 raise NotImplementedError
340 340
341 341 def __getitem__(self, key):
342 342 """
343 343 Allows index based access to the commit objects of this repository.
344 344 """
345 345 pre_load = ["author", "branch", "date", "message", "parents"]
346 346 if isinstance(key, slice):
347 347 return self._get_range(key, pre_load)
348 348 return self.get_commit(commit_idx=key, pre_load=pre_load)
349 349
350 350 def _get_range(self, slice_obj, pre_load):
351 351 for commit_id in self.commit_ids.__getitem__(slice_obj):
352 352 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
353 353
354 354 def count(self):
355 355 return len(self.commit_ids)
356 356
357 357 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
358 358 """
359 359 Creates and returns a tag for the given ``commit_id``.
360 360
361 361 :param name: name for new tag
362 362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 363 :param commit_id: commit id for which new tag would be created
364 364 :param message: message of the tag's commit
365 365 :param date: date of tag's commit
366 366
367 367 :raises TagAlreadyExistError: if tag with same name already exists
368 368 """
369 369 raise NotImplementedError
370 370
371 371 def remove_tag(self, name, user, message=None, date=None):
372 372 """
373 373 Removes tag with the given ``name``.
374 374
375 375 :param name: name of the tag to be removed
376 376 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 377 :param message: message of the tag's removal commit
378 378 :param date: date of tag's removal commit
379 379
380 380 :raises TagDoesNotExistError: if tag with given name does not exists
381 381 """
382 382 raise NotImplementedError
383 383
384 384 def get_diff(
385 385 self, commit1, commit2, path=None, ignore_whitespace=False,
386 386 context=3, path1=None):
387 387 """
388 388 Returns (git like) *diff*, as plain text. Shows changes introduced by
389 389 `commit2` since `commit1`.
390 390
391 391 :param commit1: Entry point from which diff is shown. Can be
392 392 ``self.EMPTY_COMMIT`` - in this case, patch showing all
393 393 the changes since empty state of the repository until `commit2`
394 394 :param commit2: Until which commit changes should be shown.
395 395 :param path: Can be set to a path of a file to create a diff of that
396 396 file. If `path1` is also set, this value is only associated to
397 397 `commit2`.
398 398 :param ignore_whitespace: If set to ``True``, would not show whitespace
399 399 changes. Defaults to ``False``.
400 400 :param context: How many lines before/after changed lines should be
401 401 shown. Defaults to ``3``.
402 402 :param path1: Can be set to a path to associate with `commit1`. This
403 403 parameter works only for backends which support diff generation for
404 404 different paths. Other backends will raise a `ValueError` if `path1`
405 405 is set and has a different value than `path`.
406 406 :param file_path: filter this diff by given path pattern
407 407 """
408 408 raise NotImplementedError
409 409
410 410 def strip(self, commit_id, branch=None):
411 411 """
412 412 Strip given commit_id from the repository
413 413 """
414 414 raise NotImplementedError
415 415
416 416 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
417 417 """
418 418 Return a latest common ancestor commit if one exists for this repo
419 419 `commit_id1` vs `commit_id2` from `repo2`.
420 420
421 421 :param commit_id1: Commit it from this repository to use as a
422 422 target for the comparison.
423 423 :param commit_id2: Source commit id to use for comparison.
424 424 :param repo2: Source repository to use for comparison.
425 425 """
426 426 raise NotImplementedError
427 427
428 428 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
429 429 """
430 430 Compare this repository's revision `commit_id1` with `commit_id2`.
431 431
432 432 Returns a tuple(commits, ancestor) that would be merged from
433 433 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
434 434 will be returned as ancestor.
435 435
436 436 :param commit_id1: Commit it from this repository to use as a
437 437 target for the comparison.
438 438 :param commit_id2: Source commit id to use for comparison.
439 439 :param repo2: Source repository to use for comparison.
440 440 :param merge: If set to ``True`` will do a merge compare which also
441 441 returns the common ancestor.
442 442 :param pre_load: Optional. List of commit attributes to load.
443 443 """
444 444 raise NotImplementedError
445 445
446 446 def merge(self, target_ref, source_repo, source_ref, workspace_id,
447 447 user_name='', user_email='', message='', dry_run=False,
448 448 use_rebase=False, close_branch=False):
449 449 """
450 450 Merge the revisions specified in `source_ref` from `source_repo`
451 451 onto the `target_ref` of this repository.
452 452
453 453 `source_ref` and `target_ref` are named tupls with the following
454 454 fields `type`, `name` and `commit_id`.
455 455
456 456 Returns a MergeResponse named tuple with the following fields
457 457 'possible', 'executed', 'source_commit', 'target_commit',
458 458 'merge_commit'.
459 459
460 460 :param target_ref: `target_ref` points to the commit on top of which
461 461 the `source_ref` should be merged.
462 462 :param source_repo: The repository that contains the commits to be
463 463 merged.
464 464 :param source_ref: `source_ref` points to the topmost commit from
465 465 the `source_repo` which should be merged.
466 466 :param workspace_id: `workspace_id` unique identifier.
467 467 :param user_name: Merge commit `user_name`.
468 468 :param user_email: Merge commit `user_email`.
469 469 :param message: Merge commit `message`.
470 470 :param dry_run: If `True` the merge will not take place.
471 471 :param use_rebase: If `True` commits from the source will be rebased
472 472 on top of the target instead of being merged.
473 473 :param close_branch: If `True` branch will be close before merging it
474 474 """
475 475 if dry_run:
476 476 message = message or 'dry_run_merge_message'
477 477 user_email = user_email or 'dry-run-merge@rhodecode.com'
478 478 user_name = user_name or 'Dry-Run User'
479 479 else:
480 480 if not user_name:
481 481 raise ValueError('user_name cannot be empty')
482 482 if not user_email:
483 483 raise ValueError('user_email cannot be empty')
484 484 if not message:
485 485 raise ValueError('message cannot be empty')
486 486
487 487 shadow_repository_path = self._maybe_prepare_merge_workspace(
488 488 workspace_id, target_ref, source_ref)
489 489
490 490 try:
491 491 return self._merge_repo(
492 492 shadow_repository_path, target_ref, source_repo,
493 493 source_ref, message, user_name, user_email, dry_run=dry_run,
494 494 use_rebase=use_rebase, close_branch=close_branch)
495 495 except RepositoryError:
496 496 log.exception(
497 497 'Unexpected failure when running merge, dry-run=%s',
498 498 dry_run)
499 499 return MergeResponse(
500 500 False, False, None, MergeFailureReason.UNKNOWN)
501 501
502 502 def _merge_repo(self, shadow_repository_path, target_ref,
503 503 source_repo, source_ref, merge_message,
504 504 merger_name, merger_email, dry_run=False,
505 505 use_rebase=False, close_branch=False):
506 506 """Internal implementation of merge."""
507 507 raise NotImplementedError
508 508
509 509 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
510 510 """
511 511 Create the merge workspace.
512 512
513 513 :param workspace_id: `workspace_id` unique identifier.
514 514 """
515 515 raise NotImplementedError
516 516
517 517 def cleanup_merge_workspace(self, workspace_id):
518 518 """
519 519 Remove merge workspace.
520 520
521 521 This function MUST not fail in case there is no workspace associated to
522 522 the given `workspace_id`.
523 523
524 524 :param workspace_id: `workspace_id` unique identifier.
525 525 """
526 526 raise NotImplementedError
527 527
528 528 # ========== #
529 529 # COMMIT API #
530 530 # ========== #
531 531
532 532 @LazyProperty
533 533 def in_memory_commit(self):
534 534 """
535 535 Returns :class:`InMemoryCommit` object for this repository.
536 536 """
537 537 raise NotImplementedError
538 538
539 539 # ======================== #
540 540 # UTILITIES FOR SUBCLASSES #
541 541 # ======================== #
542 542
543 543 def _validate_diff_commits(self, commit1, commit2):
544 544 """
545 545 Validates that the given commits are related to this repository.
546 546
547 547 Intended as a utility for sub classes to have a consistent validation
548 548 of input parameters in methods like :meth:`get_diff`.
549 549 """
550 550 self._validate_commit(commit1)
551 551 self._validate_commit(commit2)
552 552 if (isinstance(commit1, EmptyCommit) and
553 553 isinstance(commit2, EmptyCommit)):
554 554 raise ValueError("Cannot compare two empty commits")
555 555
556 556 def _validate_commit(self, commit):
557 557 if not isinstance(commit, BaseCommit):
558 558 raise TypeError(
559 559 "%s is not of type BaseCommit" % repr(commit))
560 560 if commit.repository != self and not isinstance(commit, EmptyCommit):
561 561 raise ValueError(
562 562 "Commit %s must be a valid commit from this repository %s, "
563 563 "related to this repository instead %s." %
564 564 (commit, self, commit.repository))
565 565
566 566 def _validate_commit_id(self, commit_id):
567 567 if not isinstance(commit_id, basestring):
568 568 raise TypeError("commit_id must be a string value")
569 569
570 570 def _validate_commit_idx(self, commit_idx):
571 571 if not isinstance(commit_idx, (int, long)):
572 572 raise TypeError("commit_idx must be a numeric value")
573 573
574 574 def _validate_branch_name(self, branch_name):
575 575 if branch_name and branch_name not in self.branches_all:
576 576 msg = ("Branch %s not found in %s" % (branch_name, self))
577 577 raise BranchDoesNotExistError(msg)
578 578
579 579 #
580 580 # Supporting deprecated API parts
581 581 # TODO: johbo: consider to move this into a mixin
582 582 #
583 583
584 584 @property
585 585 def EMPTY_CHANGESET(self):
586 586 warnings.warn(
587 587 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
588 588 return self.EMPTY_COMMIT_ID
589 589
590 590 @property
591 591 def revisions(self):
592 592 warnings.warn("Use commits attribute instead", DeprecationWarning)
593 593 return self.commit_ids
594 594
595 595 @revisions.setter
596 596 def revisions(self, value):
597 597 warnings.warn("Use commits attribute instead", DeprecationWarning)
598 598 self.commit_ids = value
599 599
600 600 def get_changeset(self, revision=None, pre_load=None):
601 601 warnings.warn("Use get_commit instead", DeprecationWarning)
602 602 commit_id = None
603 603 commit_idx = None
604 604 if isinstance(revision, basestring):
605 605 commit_id = revision
606 606 else:
607 607 commit_idx = revision
608 608 return self.get_commit(
609 609 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
610 610
611 611 def get_changesets(
612 612 self, start=None, end=None, start_date=None, end_date=None,
613 613 branch_name=None, pre_load=None):
614 614 warnings.warn("Use get_commits instead", DeprecationWarning)
615 615 start_id = self._revision_to_commit(start)
616 616 end_id = self._revision_to_commit(end)
617 617 return self.get_commits(
618 618 start_id=start_id, end_id=end_id, start_date=start_date,
619 619 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
620 620
621 621 def _revision_to_commit(self, revision):
622 622 """
623 623 Translates a revision to a commit_id
624 624
625 625 Helps to support the old changeset based API which allows to use
626 626 commit ids and commit indices interchangeable.
627 627 """
628 628 if revision is None:
629 629 return revision
630 630
631 631 if isinstance(revision, basestring):
632 632 commit_id = revision
633 633 else:
634 634 commit_id = self.commit_ids[revision]
635 635 return commit_id
636 636
637 637 @property
638 638 def in_memory_changeset(self):
639 639 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
640 640 return self.in_memory_commit
641 641
642 #
643 642 def get_path_permissions(self, username):
644 643 """
645
646 644 Returns a path permission checker or None if not supported
647 645
648 646 :param username: session user name
649 647 :return: an instance of BasePathPermissionChecker or None
650 648 """
651 649 return None
652 650
653 651
654 652 class BaseCommit(object):
655 653 """
656 654 Each backend should implement it's commit representation.
657 655
658 656 **Attributes**
659 657
660 658 ``repository``
661 659 repository object within which commit exists
662 660
663 661 ``id``
664 662 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
665 663 just ``tip``.
666 664
667 665 ``raw_id``
668 666 raw commit representation (i.e. full 40 length sha for git
669 667 backend)
670 668
671 669 ``short_id``
672 670 shortened (if apply) version of ``raw_id``; it would be simple
673 671 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
674 672 as ``raw_id`` for subversion
675 673
676 674 ``idx``
677 675 commit index
678 676
679 677 ``files``
680 678 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
681 679
682 680 ``dirs``
683 681 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
684 682
685 683 ``nodes``
686 684 combined list of ``Node`` objects
687 685
688 686 ``author``
689 687 author of the commit, as unicode
690 688
691 689 ``message``
692 690 message of the commit, as unicode
693 691
694 692 ``parents``
695 693 list of parent commits
696 694
697 695 """
698 696
699 697 branch = None
700 698 """
701 699 Depending on the backend this should be set to the branch name of the
702 700 commit. Backends not supporting branches on commits should leave this
703 701 value as ``None``.
704 702 """
705 703
706 704 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
707 705 """
708 706 This template is used to generate a default prefix for repository archives
709 707 if no prefix has been specified.
710 708 """
711 709
712 710 def __str__(self):
713 711 return '<%s at %s:%s>' % (
714 712 self.__class__.__name__, self.idx, self.short_id)
715 713
716 714 def __repr__(self):
717 715 return self.__str__()
718 716
719 717 def __unicode__(self):
720 718 return u'%s:%s' % (self.idx, self.short_id)
721 719
722 720 def __eq__(self, other):
723 721 same_instance = isinstance(other, self.__class__)
724 722 return same_instance and self.raw_id == other.raw_id
725 723
726 724 def __json__(self):
727 725 parents = []
728 726 try:
729 727 for parent in self.parents:
730 728 parents.append({'raw_id': parent.raw_id})
731 729 except NotImplementedError:
732 730 # empty commit doesn't have parents implemented
733 731 pass
734 732
735 733 return {
736 734 'short_id': self.short_id,
737 735 'raw_id': self.raw_id,
738 736 'revision': self.idx,
739 737 'message': self.message,
740 738 'date': self.date,
741 739 'author': self.author,
742 740 'parents': parents,
743 741 'branch': self.branch
744 742 }
745 743
746 744 def _get_refs(self):
747 745 return {
748 746 'branches': [self.branch],
749 747 'bookmarks': getattr(self, 'bookmarks', []),
750 748 'tags': self.tags
751 749 }
752 750
753 751 @LazyProperty
754 752 def last(self):
755 753 """
756 754 ``True`` if this is last commit in repository, ``False``
757 755 otherwise; trying to access this attribute while there is no
758 756 commits would raise `EmptyRepositoryError`
759 757 """
760 758 if self.repository is None:
761 759 raise CommitError("Cannot check if it's most recent commit")
762 760 return self.raw_id == self.repository.commit_ids[-1]
763 761
764 762 @LazyProperty
765 763 def parents(self):
766 764 """
767 765 Returns list of parent commits.
768 766 """
769 767 raise NotImplementedError
770 768
771 769 @property
772 770 def merge(self):
773 771 """
774 772 Returns boolean if commit is a merge.
775 773 """
776 774 return len(self.parents) > 1
777 775
778 776 @LazyProperty
779 777 def children(self):
780 778 """
781 779 Returns list of child commits.
782 780 """
783 781 raise NotImplementedError
784 782
785 783 @LazyProperty
786 784 def id(self):
787 785 """
788 786 Returns string identifying this commit.
789 787 """
790 788 raise NotImplementedError
791 789
792 790 @LazyProperty
793 791 def raw_id(self):
794 792 """
795 793 Returns raw string identifying this commit.
796 794 """
797 795 raise NotImplementedError
798 796
799 797 @LazyProperty
800 798 def short_id(self):
801 799 """
802 800 Returns shortened version of ``raw_id`` attribute, as string,
803 801 identifying this commit, useful for presentation to users.
804 802 """
805 803 raise NotImplementedError
806 804
807 805 @LazyProperty
808 806 def idx(self):
809 807 """
810 808 Returns integer identifying this commit.
811 809 """
812 810 raise NotImplementedError
813 811
814 812 @LazyProperty
815 813 def committer(self):
816 814 """
817 815 Returns committer for this commit
818 816 """
819 817 raise NotImplementedError
820 818
821 819 @LazyProperty
822 820 def committer_name(self):
823 821 """
824 822 Returns committer name for this commit
825 823 """
826 824
827 825 return author_name(self.committer)
828 826
829 827 @LazyProperty
830 828 def committer_email(self):
831 829 """
832 830 Returns committer email address for this commit
833 831 """
834 832
835 833 return author_email(self.committer)
836 834
837 835 @LazyProperty
838 836 def author(self):
839 837 """
840 838 Returns author for this commit
841 839 """
842 840
843 841 raise NotImplementedError
844 842
845 843 @LazyProperty
846 844 def author_name(self):
847 845 """
848 846 Returns author name for this commit
849 847 """
850 848
851 849 return author_name(self.author)
852 850
853 851 @LazyProperty
854 852 def author_email(self):
855 853 """
856 854 Returns author email address for this commit
857 855 """
858 856
859 857 return author_email(self.author)
860 858
861 859 def get_file_mode(self, path):
862 860 """
863 861 Returns stat mode of the file at `path`.
864 862 """
865 863 raise NotImplementedError
866 864
867 865 def is_link(self, path):
868 866 """
869 867 Returns ``True`` if given `path` is a symlink
870 868 """
871 869 raise NotImplementedError
872 870
873 871 def get_file_content(self, path):
874 872 """
875 873 Returns content of the file at the given `path`.
876 874 """
877 875 raise NotImplementedError
878 876
879 877 def get_file_size(self, path):
880 878 """
881 879 Returns size of the file at the given `path`.
882 880 """
883 881 raise NotImplementedError
884 882
885 883 def get_file_commit(self, path, pre_load=None):
886 884 """
887 885 Returns last commit of the file at the given `path`.
888 886
889 887 :param pre_load: Optional. List of commit attributes to load.
890 888 """
891 889 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
892 890 if not commits:
893 891 raise RepositoryError(
894 892 'Failed to fetch history for path {}. '
895 893 'Please check if such path exists in your repository'.format(
896 894 path))
897 895 return commits[0]
898 896
899 897 def get_file_history(self, path, limit=None, pre_load=None):
900 898 """
901 899 Returns history of file as reversed list of :class:`BaseCommit`
902 900 objects for which file at given `path` has been modified.
903 901
904 902 :param limit: Optional. Allows to limit the size of the returned
905 903 history. This is intended as a hint to the underlying backend, so
906 904 that it can apply optimizations depending on the limit.
907 905 :param pre_load: Optional. List of commit attributes to load.
908 906 """
909 907 raise NotImplementedError
910 908
911 909 def get_file_annotate(self, path, pre_load=None):
912 910 """
913 911 Returns a generator of four element tuples with
914 912 lineno, sha, commit lazy loader and line
915 913
916 914 :param pre_load: Optional. List of commit attributes to load.
917 915 """
918 916 raise NotImplementedError
919 917
920 918 def get_nodes(self, path):
921 919 """
922 920 Returns combined ``DirNode`` and ``FileNode`` objects list representing
923 921 state of commit at the given ``path``.
924 922
925 923 :raises ``CommitError``: if node at the given ``path`` is not
926 924 instance of ``DirNode``
927 925 """
928 926 raise NotImplementedError
929 927
930 928 def get_node(self, path):
931 929 """
932 930 Returns ``Node`` object from the given ``path``.
933 931
934 932 :raises ``NodeDoesNotExistError``: if there is no node at the given
935 933 ``path``
936 934 """
937 935 raise NotImplementedError
938 936
939 937 def get_largefile_node(self, path):
940 938 """
941 939 Returns the path to largefile from Mercurial/Git-lfs storage.
942 940 or None if it's not a largefile node
943 941 """
944 942 return None
945 943
946 944 def archive_repo(self, file_path, kind='tgz', subrepos=None,
947 945 prefix=None, write_metadata=False, mtime=None):
948 946 """
949 947 Creates an archive containing the contents of the repository.
950 948
951 949 :param file_path: path to the file which to create the archive.
952 950 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
953 951 :param prefix: name of root directory in archive.
954 952 Default is repository name and commit's short_id joined with dash:
955 953 ``"{repo_name}-{short_id}"``.
956 954 :param write_metadata: write a metadata file into archive.
957 955 :param mtime: custom modification time for archive creation, defaults
958 956 to time.time() if not given.
959 957
960 958 :raise VCSError: If prefix has a problem.
961 959 """
962 960 allowed_kinds = settings.ARCHIVE_SPECS.keys()
963 961 if kind not in allowed_kinds:
964 962 raise ImproperArchiveTypeError(
965 963 'Archive kind (%s) not supported use one of %s' %
966 964 (kind, allowed_kinds))
967 965
968 966 prefix = self._validate_archive_prefix(prefix)
969 967
970 968 mtime = mtime or time.mktime(self.date.timetuple())
971 969
972 970 file_info = []
973 971 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
974 972 for _r, _d, files in cur_rev.walk('/'):
975 973 for f in files:
976 974 f_path = os.path.join(prefix, f.path)
977 975 file_info.append(
978 976 (f_path, f.mode, f.is_link(), f.raw_bytes))
979 977
980 978 if write_metadata:
981 979 metadata = [
982 980 ('repo_name', self.repository.name),
983 981 ('rev', self.raw_id),
984 982 ('create_time', mtime),
985 983 ('branch', self.branch),
986 984 ('tags', ','.join(self.tags)),
987 985 ]
988 986 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
989 987 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
990 988
991 989 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
992 990
993 991 def _validate_archive_prefix(self, prefix):
994 992 if prefix is None:
995 993 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
996 994 repo_name=safe_str(self.repository.name),
997 995 short_id=self.short_id)
998 996 elif not isinstance(prefix, str):
999 997 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1000 998 elif prefix.startswith('/'):
1001 999 raise VCSError("Prefix cannot start with leading slash")
1002 1000 elif prefix.strip() == '':
1003 1001 raise VCSError("Prefix cannot be empty")
1004 1002 return prefix
1005 1003
1006 1004 @LazyProperty
1007 1005 def root(self):
1008 1006 """
1009 1007 Returns ``RootNode`` object for this commit.
1010 1008 """
1011 1009 return self.get_node('')
1012 1010
1013 1011 def next(self, branch=None):
1014 1012 """
1015 1013 Returns next commit from current, if branch is gives it will return
1016 1014 next commit belonging to this branch
1017 1015
1018 1016 :param branch: show commits within the given named branch
1019 1017 """
1020 1018 indexes = xrange(self.idx + 1, self.repository.count())
1021 1019 return self._find_next(indexes, branch)
1022 1020
1023 1021 def prev(self, branch=None):
1024 1022 """
1025 1023 Returns previous commit from current, if branch is gives it will
1026 1024 return previous commit belonging to this branch
1027 1025
1028 1026 :param branch: show commit within the given named branch
1029 1027 """
1030 1028 indexes = xrange(self.idx - 1, -1, -1)
1031 1029 return self._find_next(indexes, branch)
1032 1030
1033 1031 def _find_next(self, indexes, branch=None):
1034 1032 if branch and self.branch != branch:
1035 1033 raise VCSError('Branch option used on commit not belonging '
1036 1034 'to that branch')
1037 1035
1038 1036 for next_idx in indexes:
1039 1037 commit = self.repository.get_commit(commit_idx=next_idx)
1040 1038 if branch and branch != commit.branch:
1041 1039 continue
1042 1040 return commit
1043 1041 raise CommitDoesNotExistError
1044 1042
1045 1043 def diff(self, ignore_whitespace=True, context=3):
1046 1044 """
1047 1045 Returns a `Diff` object representing the change made by this commit.
1048 1046 """
1049 1047 parent = (
1050 1048 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1051 1049 diff = self.repository.get_diff(
1052 1050 parent, self,
1053 1051 ignore_whitespace=ignore_whitespace,
1054 1052 context=context)
1055 1053 return diff
1056 1054
1057 1055 @LazyProperty
1058 1056 def added(self):
1059 1057 """
1060 1058 Returns list of added ``FileNode`` objects.
1061 1059 """
1062 1060 raise NotImplementedError
1063 1061
1064 1062 @LazyProperty
1065 1063 def changed(self):
1066 1064 """
1067 1065 Returns list of modified ``FileNode`` objects.
1068 1066 """
1069 1067 raise NotImplementedError
1070 1068
1071 1069 @LazyProperty
1072 1070 def removed(self):
1073 1071 """
1074 1072 Returns list of removed ``FileNode`` objects.
1075 1073 """
1076 1074 raise NotImplementedError
1077 1075
1078 1076 @LazyProperty
1079 1077 def size(self):
1080 1078 """
1081 1079 Returns total number of bytes from contents of all filenodes.
1082 1080 """
1083 1081 return sum((node.size for node in self.get_filenodes_generator()))
1084 1082
1085 1083 def walk(self, topurl=''):
1086 1084 """
1087 1085 Similar to os.walk method. Insted of filesystem it walks through
1088 1086 commit starting at given ``topurl``. Returns generator of tuples
1089 1087 (topnode, dirnodes, filenodes).
1090 1088 """
1091 1089 topnode = self.get_node(topurl)
1092 1090 if not topnode.is_dir():
1093 1091 return
1094 1092 yield (topnode, topnode.dirs, topnode.files)
1095 1093 for dirnode in topnode.dirs:
1096 1094 for tup in self.walk(dirnode.path):
1097 1095 yield tup
1098 1096
1099 1097 def get_filenodes_generator(self):
1100 1098 """
1101 1099 Returns generator that yields *all* file nodes.
1102 1100 """
1103 1101 for topnode, dirs, files in self.walk():
1104 1102 for node in files:
1105 1103 yield node
1106 1104
1107 1105 #
1108 1106 # Utilities for sub classes to support consistent behavior
1109 1107 #
1110 1108
1111 1109 def no_node_at_path(self, path):
1112 1110 return NodeDoesNotExistError(
1113 1111 u"There is no file nor directory at the given path: "
1114 1112 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1115 1113
1116 1114 def _fix_path(self, path):
1117 1115 """
1118 1116 Paths are stored without trailing slash so we need to get rid off it if
1119 1117 needed.
1120 1118 """
1121 1119 return path.rstrip('/')
1122 1120
1123 1121 #
1124 1122 # Deprecated API based on changesets
1125 1123 #
1126 1124
1127 1125 @property
1128 1126 def revision(self):
1129 1127 warnings.warn("Use idx instead", DeprecationWarning)
1130 1128 return self.idx
1131 1129
1132 1130 @revision.setter
1133 1131 def revision(self, value):
1134 1132 warnings.warn("Use idx instead", DeprecationWarning)
1135 1133 self.idx = value
1136 1134
1137 1135 def get_file_changeset(self, path):
1138 1136 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1139 1137 return self.get_file_commit(path)
1140 1138
1141 1139
1142 1140 class BaseChangesetClass(type):
1143 1141
1144 1142 def __instancecheck__(self, instance):
1145 1143 return isinstance(instance, BaseCommit)
1146 1144
1147 1145
1148 1146 class BaseChangeset(BaseCommit):
1149 1147
1150 1148 __metaclass__ = BaseChangesetClass
1151 1149
1152 1150 def __new__(cls, *args, **kwargs):
1153 1151 warnings.warn(
1154 1152 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1155 1153 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1156 1154
1157 1155
1158 1156 class BaseInMemoryCommit(object):
1159 1157 """
1160 1158 Represents differences between repository's state (most recent head) and
1161 1159 changes made *in place*.
1162 1160
1163 1161 **Attributes**
1164 1162
1165 1163 ``repository``
1166 1164 repository object for this in-memory-commit
1167 1165
1168 1166 ``added``
1169 1167 list of ``FileNode`` objects marked as *added*
1170 1168
1171 1169 ``changed``
1172 1170 list of ``FileNode`` objects marked as *changed*
1173 1171
1174 1172 ``removed``
1175 1173 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1176 1174 *removed*
1177 1175
1178 1176 ``parents``
1179 1177 list of :class:`BaseCommit` instances representing parents of
1180 1178 in-memory commit. Should always be 2-element sequence.
1181 1179
1182 1180 """
1183 1181
1184 1182 def __init__(self, repository):
1185 1183 self.repository = repository
1186 1184 self.added = []
1187 1185 self.changed = []
1188 1186 self.removed = []
1189 1187 self.parents = []
1190 1188
1191 1189 def add(self, *filenodes):
1192 1190 """
1193 1191 Marks given ``FileNode`` objects as *to be committed*.
1194 1192
1195 1193 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1196 1194 latest commit
1197 1195 :raises ``NodeAlreadyAddedError``: if node with same path is already
1198 1196 marked as *added*
1199 1197 """
1200 1198 # Check if not already marked as *added* first
1201 1199 for node in filenodes:
1202 1200 if node.path in (n.path for n in self.added):
1203 1201 raise NodeAlreadyAddedError(
1204 1202 "Such FileNode %s is already marked for addition"
1205 1203 % node.path)
1206 1204 for node in filenodes:
1207 1205 self.added.append(node)
1208 1206
1209 1207 def change(self, *filenodes):
1210 1208 """
1211 1209 Marks given ``FileNode`` objects to be *changed* in next commit.
1212 1210
1213 1211 :raises ``EmptyRepositoryError``: if there are no commits yet
1214 1212 :raises ``NodeAlreadyExistsError``: if node with same path is already
1215 1213 marked to be *changed*
1216 1214 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1217 1215 marked to be *removed*
1218 1216 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1219 1217 commit
1220 1218 :raises ``NodeNotChangedError``: if node hasn't really be changed
1221 1219 """
1222 1220 for node in filenodes:
1223 1221 if node.path in (n.path for n in self.removed):
1224 1222 raise NodeAlreadyRemovedError(
1225 1223 "Node at %s is already marked as removed" % node.path)
1226 1224 try:
1227 1225 self.repository.get_commit()
1228 1226 except EmptyRepositoryError:
1229 1227 raise EmptyRepositoryError(
1230 1228 "Nothing to change - try to *add* new nodes rather than "
1231 1229 "changing them")
1232 1230 for node in filenodes:
1233 1231 if node.path in (n.path for n in self.changed):
1234 1232 raise NodeAlreadyChangedError(
1235 1233 "Node at '%s' is already marked as changed" % node.path)
1236 1234 self.changed.append(node)
1237 1235
1238 1236 def remove(self, *filenodes):
1239 1237 """
1240 1238 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1241 1239 *removed* in next commit.
1242 1240
1243 1241 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1244 1242 be *removed*
1245 1243 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1246 1244 be *changed*
1247 1245 """
1248 1246 for node in filenodes:
1249 1247 if node.path in (n.path for n in self.removed):
1250 1248 raise NodeAlreadyRemovedError(
1251 1249 "Node is already marked to for removal at %s" % node.path)
1252 1250 if node.path in (n.path for n in self.changed):
1253 1251 raise NodeAlreadyChangedError(
1254 1252 "Node is already marked to be changed at %s" % node.path)
1255 1253 # We only mark node as *removed* - real removal is done by
1256 1254 # commit method
1257 1255 self.removed.append(node)
1258 1256
1259 1257 def reset(self):
1260 1258 """
1261 1259 Resets this instance to initial state (cleans ``added``, ``changed``
1262 1260 and ``removed`` lists).
1263 1261 """
1264 1262 self.added = []
1265 1263 self.changed = []
1266 1264 self.removed = []
1267 1265 self.parents = []
1268 1266
1269 1267 def get_ipaths(self):
1270 1268 """
1271 1269 Returns generator of paths from nodes marked as added, changed or
1272 1270 removed.
1273 1271 """
1274 1272 for node in itertools.chain(self.added, self.changed, self.removed):
1275 1273 yield node.path
1276 1274
1277 1275 def get_paths(self):
1278 1276 """
1279 1277 Returns list of paths from nodes marked as added, changed or removed.
1280 1278 """
1281 1279 return list(self.get_ipaths())
1282 1280
1283 1281 def check_integrity(self, parents=None):
1284 1282 """
1285 1283 Checks in-memory commit's integrity. Also, sets parents if not
1286 1284 already set.
1287 1285
1288 1286 :raises CommitError: if any error occurs (i.e.
1289 1287 ``NodeDoesNotExistError``).
1290 1288 """
1291 1289 if not self.parents:
1292 1290 parents = parents or []
1293 1291 if len(parents) == 0:
1294 1292 try:
1295 1293 parents = [self.repository.get_commit(), None]
1296 1294 except EmptyRepositoryError:
1297 1295 parents = [None, None]
1298 1296 elif len(parents) == 1:
1299 1297 parents += [None]
1300 1298 self.parents = parents
1301 1299
1302 1300 # Local parents, only if not None
1303 1301 parents = [p for p in self.parents if p]
1304 1302
1305 1303 # Check nodes marked as added
1306 1304 for p in parents:
1307 1305 for node in self.added:
1308 1306 try:
1309 1307 p.get_node(node.path)
1310 1308 except NodeDoesNotExistError:
1311 1309 pass
1312 1310 else:
1313 1311 raise NodeAlreadyExistsError(
1314 1312 "Node `%s` already exists at %s" % (node.path, p))
1315 1313
1316 1314 # Check nodes marked as changed
1317 1315 missing = set(self.changed)
1318 1316 not_changed = set(self.changed)
1319 1317 if self.changed and not parents:
1320 1318 raise NodeDoesNotExistError(str(self.changed[0].path))
1321 1319 for p in parents:
1322 1320 for node in self.changed:
1323 1321 try:
1324 1322 old = p.get_node(node.path)
1325 1323 missing.remove(node)
1326 1324 # if content actually changed, remove node from not_changed
1327 1325 if old.content != node.content:
1328 1326 not_changed.remove(node)
1329 1327 except NodeDoesNotExistError:
1330 1328 pass
1331 1329 if self.changed and missing:
1332 1330 raise NodeDoesNotExistError(
1333 1331 "Node `%s` marked as modified but missing in parents: %s"
1334 1332 % (node.path, parents))
1335 1333
1336 1334 if self.changed and not_changed:
1337 1335 raise NodeNotChangedError(
1338 1336 "Node `%s` wasn't actually changed (parents: %s)"
1339 1337 % (not_changed.pop().path, parents))
1340 1338
1341 1339 # Check nodes marked as removed
1342 1340 if self.removed and not parents:
1343 1341 raise NodeDoesNotExistError(
1344 1342 "Cannot remove node at %s as there "
1345 1343 "were no parents specified" % self.removed[0].path)
1346 1344 really_removed = set()
1347 1345 for p in parents:
1348 1346 for node in self.removed:
1349 1347 try:
1350 1348 p.get_node(node.path)
1351 1349 really_removed.add(node)
1352 1350 except CommitError:
1353 1351 pass
1354 1352 not_removed = set(self.removed) - really_removed
1355 1353 if not_removed:
1356 1354 # TODO: johbo: This code branch does not seem to be covered
1357 1355 raise NodeDoesNotExistError(
1358 1356 "Cannot remove node at %s from "
1359 1357 "following parents: %s" % (not_removed, parents))
1360 1358
1361 1359 def commit(
1362 1360 self, message, author, parents=None, branch=None, date=None,
1363 1361 **kwargs):
1364 1362 """
1365 1363 Performs in-memory commit (doesn't check workdir in any way) and
1366 1364 returns newly created :class:`BaseCommit`. Updates repository's
1367 1365 attribute `commits`.
1368 1366
1369 1367 .. note::
1370 1368
1371 1369 While overriding this method each backend's should call
1372 1370 ``self.check_integrity(parents)`` in the first place.
1373 1371
1374 1372 :param message: message of the commit
1375 1373 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1376 1374 :param parents: single parent or sequence of parents from which commit
1377 1375 would be derived
1378 1376 :param date: ``datetime.datetime`` instance. Defaults to
1379 1377 ``datetime.datetime.now()``.
1380 1378 :param branch: branch name, as string. If none given, default backend's
1381 1379 branch would be used.
1382 1380
1383 1381 :raises ``CommitError``: if any error occurs while committing
1384 1382 """
1385 1383 raise NotImplementedError
1386 1384
1387 1385
1388 1386 class BaseInMemoryChangesetClass(type):
1389 1387
1390 1388 def __instancecheck__(self, instance):
1391 1389 return isinstance(instance, BaseInMemoryCommit)
1392 1390
1393 1391
1394 1392 class BaseInMemoryChangeset(BaseInMemoryCommit):
1395 1393
1396 1394 __metaclass__ = BaseInMemoryChangesetClass
1397 1395
1398 1396 def __new__(cls, *args, **kwargs):
1399 1397 warnings.warn(
1400 1398 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1401 1399 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1402 1400
1403 1401
1404 1402 class EmptyCommit(BaseCommit):
1405 1403 """
1406 1404 An dummy empty commit. It's possible to pass hash when creating
1407 1405 an EmptyCommit
1408 1406 """
1409 1407
1410 1408 def __init__(
1411 1409 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1412 1410 message='', author='', date=None):
1413 1411 self._empty_commit_id = commit_id
1414 1412 # TODO: johbo: Solve idx parameter, default value does not make
1415 1413 # too much sense
1416 1414 self.idx = idx
1417 1415 self.message = message
1418 1416 self.author = author
1419 1417 self.date = date or datetime.datetime.fromtimestamp(0)
1420 1418 self.repository = repo
1421 1419 self.alias = alias
1422 1420
1423 1421 @LazyProperty
1424 1422 def raw_id(self):
1425 1423 """
1426 1424 Returns raw string identifying this commit, useful for web
1427 1425 representation.
1428 1426 """
1429 1427
1430 1428 return self._empty_commit_id
1431 1429
1432 1430 @LazyProperty
1433 1431 def branch(self):
1434 1432 if self.alias:
1435 1433 from rhodecode.lib.vcs.backends import get_backend
1436 1434 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1437 1435
1438 1436 @LazyProperty
1439 1437 def short_id(self):
1440 1438 return self.raw_id[:12]
1441 1439
1442 1440 @LazyProperty
1443 1441 def id(self):
1444 1442 return self.raw_id
1445 1443
1446 1444 def get_file_commit(self, path):
1447 1445 return self
1448 1446
1449 1447 def get_file_content(self, path):
1450 1448 return u''
1451 1449
1452 1450 def get_file_size(self, path):
1453 1451 return 0
1454 1452
1455 1453
1456 1454 class EmptyChangesetClass(type):
1457 1455
1458 1456 def __instancecheck__(self, instance):
1459 1457 return isinstance(instance, EmptyCommit)
1460 1458
1461 1459
1462 1460 class EmptyChangeset(EmptyCommit):
1463 1461
1464 1462 __metaclass__ = EmptyChangesetClass
1465 1463
1466 1464 def __new__(cls, *args, **kwargs):
1467 1465 warnings.warn(
1468 1466 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1469 1467 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1470 1468
1471 1469 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1472 1470 alias=None, revision=-1, message='', author='', date=None):
1473 1471 if requested_revision is not None:
1474 1472 warnings.warn(
1475 1473 "Parameter requested_revision not supported anymore",
1476 1474 DeprecationWarning)
1477 1475 super(EmptyChangeset, self).__init__(
1478 1476 commit_id=cs, repo=repo, alias=alias, idx=revision,
1479 1477 message=message, author=author, date=date)
1480 1478
1481 1479 @property
1482 1480 def revision(self):
1483 1481 warnings.warn("Use idx instead", DeprecationWarning)
1484 1482 return self.idx
1485 1483
1486 1484 @revision.setter
1487 1485 def revision(self, value):
1488 1486 warnings.warn("Use idx instead", DeprecationWarning)
1489 1487 self.idx = value
1490 1488
1491 1489
1492 1490 class EmptyRepository(BaseRepository):
1493 1491 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1494 1492 pass
1495 1493
1496 1494 def get_diff(self, *args, **kwargs):
1497 1495 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1498 1496 return GitDiff('')
1499 1497
1500 1498
1501 1499 class CollectionGenerator(object):
1502 1500
1503 1501 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1504 1502 self.repo = repo
1505 1503 self.commit_ids = commit_ids
1506 1504 # TODO: (oliver) this isn't currently hooked up
1507 1505 self.collection_size = None
1508 1506 self.pre_load = pre_load
1509 1507
1510 1508 def __len__(self):
1511 1509 if self.collection_size is not None:
1512 1510 return self.collection_size
1513 1511 return self.commit_ids.__len__()
1514 1512
1515 1513 def __iter__(self):
1516 1514 for commit_id in self.commit_ids:
1517 1515 # TODO: johbo: Mercurial passes in commit indices or commit ids
1518 1516 yield self._commit_factory(commit_id)
1519 1517
1520 1518 def _commit_factory(self, commit_id):
1521 1519 """
1522 1520 Allows backends to override the way commits are generated.
1523 1521 """
1524 1522 return self.repo.get_commit(commit_id=commit_id,
1525 1523 pre_load=self.pre_load)
1526 1524
1527 1525 def __getslice__(self, i, j):
1528 1526 """
1529 1527 Returns an iterator of sliced repository
1530 1528 """
1531 1529 commit_ids = self.commit_ids[i:j]
1532 1530 return self.__class__(
1533 1531 self.repo, commit_ids, pre_load=self.pre_load)
1534 1532
1535 1533 def __repr__(self):
1536 1534 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1537 1535
1538 1536
1539 1537 class Config(object):
1540 1538 """
1541 1539 Represents the configuration for a repository.
1542 1540
1543 1541 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1544 1542 standard library. It implements only the needed subset.
1545 1543 """
1546 1544
1547 1545 def __init__(self):
1548 1546 self._values = {}
1549 1547
1550 1548 def copy(self):
1551 1549 clone = Config()
1552 1550 for section, values in self._values.items():
1553 1551 clone._values[section] = values.copy()
1554 1552 return clone
1555 1553
1556 1554 def __repr__(self):
1557 1555 return '<Config(%s sections) at %s>' % (
1558 1556 len(self._values), hex(id(self)))
1559 1557
1560 1558 def items(self, section):
1561 1559 return self._values.get(section, {}).iteritems()
1562 1560
1563 1561 def get(self, section, option):
1564 1562 return self._values.get(section, {}).get(option)
1565 1563
1566 1564 def set(self, section, option, value):
1567 1565 section_values = self._values.setdefault(section, {})
1568 1566 section_values[option] = value
1569 1567
1570 1568 def clear_section(self, section):
1571 1569 self._values[section] = {}
1572 1570
1573 1571 def serialize(self):
1574 1572 """
1575 1573 Creates a list of three tuples (section, key, value) representing
1576 1574 this config object.
1577 1575 """
1578 1576 items = []
1579 1577 for section in self._values:
1580 1578 for option, value in self._values[section].items():
1581 1579 items.append(
1582 1580 (safe_str(section), safe_str(option), safe_str(value)))
1583 1581 return items
1584 1582
1585 1583
1586 1584 class Diff(object):
1587 1585 """
1588 1586 Represents a diff result from a repository backend.
1589 1587
1590 1588 Subclasses have to provide a backend specific value for
1591 1589 :attr:`_header_re` and :attr:`_meta_re`.
1592 1590 """
1593 1591 _meta_re = None
1594 1592 _header_re = None
1595 1593
1596 1594 def __init__(self, raw_diff):
1597 1595 self.raw = raw_diff
1598 1596
1599 1597 def chunks(self):
1600 1598 """
1601 1599 split the diff in chunks of separate --git a/file b/file chunks
1602 1600 to make diffs consistent we must prepend with \n, and make sure
1603 1601 we can detect last chunk as this was also has special rule
1604 1602 """
1605 1603
1606 1604 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1607 1605 header = diff_parts[0]
1608 1606
1609 1607 if self._meta_re:
1610 1608 match = self._meta_re.match(header)
1611 1609
1612 1610 chunks = diff_parts[1:]
1613 1611 total_chunks = len(chunks)
1614 1612
1615 1613 return (
1616 1614 DiffChunk(chunk, self, cur_chunk == total_chunks)
1617 1615 for cur_chunk, chunk in enumerate(chunks, start=1))
1618 1616
1619 1617
1620 1618 class DiffChunk(object):
1621 1619
1622 1620 def __init__(self, chunk, diff, last_chunk):
1623 1621 self._diff = diff
1624 1622
1625 1623 # since we split by \ndiff --git that part is lost from original diff
1626 1624 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1627 1625 if not last_chunk:
1628 1626 chunk += '\n'
1629 1627
1630 1628 match = self._diff._header_re.match(chunk)
1631 1629 self.header = match.groupdict()
1632 1630 self.diff = chunk[match.end():]
1633 1631 self.raw = chunk
1634 1632
1635 1633
1636 1634 class BasePathPermissionChecker(object):
1637 1635
1638 1636 @staticmethod
1639 1637 def create_from_patterns(includes, excludes):
1640 1638 if includes and '*' in includes and not excludes:
1641 1639 return AllPathPermissionChecker()
1642 1640 elif excludes and '*' in excludes:
1643 1641 return NonePathPermissionChecker()
1644 1642 else:
1645 1643 return PatternPathPermissionChecker(includes, excludes)
1646 1644
1647 1645 @property
1648 1646 def has_full_access(self):
1649 1647 raise NotImplemented()
1650 1648
1651 1649 def has_access(self, path):
1652 1650 raise NotImplemented()
1653 1651
1654 1652
1655 1653 class AllPathPermissionChecker(BasePathPermissionChecker):
1656 1654
1657 1655 @property
1658 1656 def has_full_access(self):
1659 1657 return True
1660 1658
1661 1659 def has_access(self, path):
1662 1660 return True
1663 1661
1664 1662
1665 1663 class NonePathPermissionChecker(BasePathPermissionChecker):
1666 1664
1667 1665 @property
1668 1666 def has_full_access(self):
1669 1667 return False
1670 1668
1671 1669 def has_access(self, path):
1672 1670 return False
1673 1671
1674 1672
1675 1673 class PatternPathPermissionChecker(BasePathPermissionChecker):
1676 1674
1677 1675 def __init__(self, includes, excludes):
1678 1676 self.includes = includes
1679 1677 self.excludes = excludes
1680 self.includes_re = [] if not includes else [re.compile(fnmatch.translate(pattern)) for pattern in includes]
1681 self.excludes_re = [] if not excludes else [re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1678 self.includes_re = [] if not includes else [
1679 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1680 self.excludes_re = [] if not excludes else [
1681 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1682 1682
1683 1683 @property
1684 1684 def has_full_access(self):
1685 1685 return '*' in self.includes and not self.excludes
1686 1686
1687 1687 def has_access(self, path):
1688 for re in self.excludes_re:
1689 if re.match(path):
1688 for regex in self.excludes_re:
1689 if regex.match(path):
1690 1690 return False
1691 for re in self.includes_re:
1692 if re.match(path):
1691 for regex in self.includes_re:
1692 if regex.match(path):
1693 1693 return True
1694 return False No newline at end of file
1694 return False
@@ -1,545 +1,545 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from pyramid.compat import configparser
32 31
33 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 33 from rhodecode.lib.utils import safe_unicode, safe_str
35 34 from rhodecode.lib.utils2 import safe_int
36 35 from rhodecode.lib.vcs.conf import settings
37 36 from rhodecode.lib.vcs.backends import base
38 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 38 from rhodecode.lib.vcs.nodes import (
40 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49 _author_property = 'author'
50 50 _committer_property = 'committer'
51 51 _date_property = 'commit_time'
52 52 _date_tz_property = 'commit_timezone'
53 53 _message_property = 'message'
54 54 _parents_property = 'parents'
55 55
56 56 _filter_pre_load = [
57 57 # done through a more complex tree walk on parents
58 58 "affected_files",
59 59 # based on repository cached property
60 60 "branch",
61 61 # done through subprocess not remote call
62 62 "children",
63 63 # done through a more complex tree walk on parents
64 64 "status",
65 65 # mercurial specific property not supported here
66 66 "_file_paths",
67 67 # mercurial specific property not supported here
68 68 'obsolete',
69 69 # mercurial specific property not supported here
70 70 'phase',
71 71 # mercurial specific property not supported here
72 72 'hidden'
73 73 ]
74 74
75 75 def __init__(self, repository, raw_id, idx, pre_load=None):
76 76 self.repository = repository
77 77 self._remote = repository._remote
78 78 # TODO: johbo: Tweak of raw_id should not be necessary
79 79 self.raw_id = safe_str(raw_id)
80 80 self.idx = idx
81 81
82 82 self._set_bulk_properties(pre_load)
83 83
84 84 # caches
85 85 self._stat_modes = {} # stat info for paths
86 86 self._paths = {} # path processed with parse_tree
87 87 self.nodes = {}
88 88 self._submodules = None
89 89
90 90 def _set_bulk_properties(self, pre_load):
91 91 if not pre_load:
92 92 return
93 93 pre_load = [entry for entry in pre_load
94 94 if entry not in self._filter_pre_load]
95 95 if not pre_load:
96 96 return
97 97
98 98 result = self._remote.bulk_request(self.raw_id, pre_load)
99 99 for attr, value in result.items():
100 100 if attr in ["author", "message"]:
101 101 if value:
102 102 value = safe_unicode(value)
103 103 elif attr == "date":
104 104 value = utcdate_fromtimestamp(*value)
105 105 elif attr == "parents":
106 106 value = self._make_commits(value)
107 107 self.__dict__[attr] = value
108 108
109 109 @LazyProperty
110 110 def _commit(self):
111 111 return self._remote[self.raw_id]
112 112
113 113 @LazyProperty
114 114 def _tree_id(self):
115 115 return self._remote[self._commit['tree']]['id']
116 116
117 117 @LazyProperty
118 118 def id(self):
119 119 return self.raw_id
120 120
121 121 @LazyProperty
122 122 def short_id(self):
123 123 return self.raw_id[:12]
124 124
125 125 @LazyProperty
126 126 def message(self):
127 127 return safe_unicode(
128 128 self._remote.commit_attribute(self.id, self._message_property))
129 129
130 130 @LazyProperty
131 131 def committer(self):
132 132 return safe_unicode(
133 133 self._remote.commit_attribute(self.id, self._committer_property))
134 134
135 135 @LazyProperty
136 136 def author(self):
137 137 return safe_unicode(
138 138 self._remote.commit_attribute(self.id, self._author_property))
139 139
140 140 @LazyProperty
141 141 def date(self):
142 142 unix_ts, tz = self._remote.get_object_attrs(
143 143 self.raw_id, self._date_property, self._date_tz_property)
144 144 return utcdate_fromtimestamp(unix_ts, tz)
145 145
146 146 @LazyProperty
147 147 def status(self):
148 148 """
149 149 Returns modified, added, removed, deleted files for current commit
150 150 """
151 151 return self.changed, self.added, self.removed
152 152
153 153 @LazyProperty
154 154 def tags(self):
155 155 tags = [safe_unicode(name) for name,
156 156 commit_id in self.repository.tags.iteritems()
157 157 if commit_id == self.raw_id]
158 158 return tags
159 159
160 160 @LazyProperty
161 161 def branch(self):
162 162 for name, commit_id in self.repository.branches.iteritems():
163 163 if commit_id == self.raw_id:
164 164 return safe_unicode(name)
165 165 return None
166 166
167 167 def _get_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 parts = path.split('/')
181 181 dirs, name = parts[:-1], parts[-1]
182 182 cur_dir = ''
183 183
184 184 # initially extract things from root dir
185 185 tree_items = self._remote.tree_items(tree_id)
186 186 self._process_tree_items(tree_items, cur_dir)
187 187
188 188 for dir in dirs:
189 189 if cur_dir:
190 190 cur_dir = '/'.join((cur_dir, dir))
191 191 else:
192 192 cur_dir = dir
193 193 dir_id = None
194 194 for item, stat_, id_, type_ in tree_items:
195 195 if item == dir:
196 196 dir_id = id_
197 197 break
198 198 if dir_id:
199 199 if type_ != "tree":
200 200 raise CommitError('%s is not a directory' % cur_dir)
201 201 # update tree
202 202 tree_items = self._remote.tree_items(dir_id)
203 203 else:
204 204 raise CommitError('%s have not been found' % cur_dir)
205 205
206 206 # cache all items from the given traversed tree
207 207 self._process_tree_items(tree_items, cur_dir)
208 208
209 209 if path not in self._paths:
210 210 raise self.no_node_at_path(path)
211 211
212 212 return self._paths[path]
213 213
214 214 def _process_tree_items(self, items, cur_dir):
215 215 for item, stat_, id_, type_ in items:
216 216 if cur_dir:
217 217 name = '/'.join((cur_dir, item))
218 218 else:
219 219 name = item
220 220 self._paths[name] = [id_, type_]
221 221 self._stat_modes[name] = stat_
222 222
223 223 def _get_kind(self, path):
224 224 path_id, type_ = self._get_id_for_path(path)
225 225 if type_ == 'blob':
226 226 return NodeKind.FILE
227 227 elif type_ == 'tree':
228 228 return NodeKind.DIR
229 229 elif type == 'link':
230 230 return NodeKind.SUBMODULE
231 231 return None
232 232
233 233 def _get_filectx(self, path):
234 234 path = self._fix_path(path)
235 235 if self._get_kind(path) != NodeKind.FILE:
236 236 raise CommitError(
237 237 "File does not exist for commit %s at '%s'" %
238 238 (self.raw_id, path))
239 239 return path
240 240
241 241 def _get_file_nodes(self):
242 242 return chain(*(t[2] for t in self.walk()))
243 243
244 244 @LazyProperty
245 245 def parents(self):
246 246 """
247 247 Returns list of parent commits.
248 248 """
249 249 parent_ids = self._remote.commit_attribute(
250 250 self.id, self._parents_property)
251 251 return self._make_commits(parent_ids)
252 252
253 253 @LazyProperty
254 254 def children(self):
255 255 """
256 256 Returns list of child commits.
257 257 """
258 258 rev_filter = settings.GIT_REV_FILTER
259 259 output, __ = self.repository.run_git_command(
260 260 ['rev-list', '--children'] + rev_filter)
261 261
262 262 child_ids = []
263 263 pat = re.compile(r'^%s' % self.raw_id)
264 264 for l in output.splitlines():
265 265 if pat.match(l):
266 266 found_ids = l.split(' ')[1:]
267 267 child_ids.extend(found_ids)
268 268 return self._make_commits(child_ids)
269 269
270 270 def _make_commits(self, commit_ids, pre_load=None):
271 271 return [
272 272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
273 273 for commit_id in commit_ids]
274 274
275 275 def get_file_mode(self, path):
276 276 """
277 277 Returns stat mode of the file at the given `path`.
278 278 """
279 279 path = safe_str(path)
280 280 # ensure path is traversed
281 281 self._get_id_for_path(path)
282 282 return self._stat_modes[path]
283 283
284 284 def is_link(self, path):
285 285 return stat.S_ISLNK(self.get_file_mode(path))
286 286
287 287 def get_file_content(self, path):
288 288 """
289 289 Returns content of the file at given `path`.
290 290 """
291 291 id_, _ = self._get_id_for_path(path)
292 292 return self._remote.blob_as_pretty_string(id_)
293 293
294 294 def get_file_size(self, path):
295 295 """
296 296 Returns size of the file at given `path`.
297 297 """
298 298 id_, _ = self._get_id_for_path(path)
299 299 return self._remote.blob_raw_length(id_)
300 300
301 301 def get_file_history(self, path, limit=None, pre_load=None):
302 302 """
303 303 Returns history of file as reversed list of `GitCommit` objects for
304 304 which file at given `path` has been modified.
305 305
306 306 TODO: This function now uses an underlying 'git' command which works
307 307 quickly but ideally we should replace with an algorithm.
308 308 """
309 309 self._get_filectx(path)
310 310 f_path = safe_str(path)
311 311
312 312 cmd = ['log']
313 313 if limit:
314 314 cmd.extend(['-n', str(safe_int(limit, 0))])
315 315 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
316 316
317 317 output, __ = self.repository.run_git_command(cmd)
318 318 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
319 319
320 320 return [
321 321 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
322 322 for commit_id in commit_ids]
323 323
324 324 # TODO: unused for now potential replacement for subprocess
325 325 def get_file_history_2(self, path, limit=None, pre_load=None):
326 326 """
327 327 Returns history of file as reversed list of `Commit` objects for
328 328 which file at given `path` has been modified.
329 329 """
330 330 self._get_filectx(path)
331 331 f_path = safe_str(path)
332 332
333 333 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
334 334
335 335 return [
336 336 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
337 337 for commit_id in commit_ids]
338 338
339 339 def get_file_annotate(self, path, pre_load=None):
340 340 """
341 341 Returns a generator of four element tuples with
342 342 lineno, commit_id, commit lazy loader and line
343 343
344 344 TODO: This function now uses os underlying 'git' command which is
345 345 generally not good. Should be replaced with algorithm iterating
346 346 commits.
347 347 """
348 348 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
349 349 # -l ==> outputs long shas (and we need all 40 characters)
350 350 # --root ==> doesn't put '^' character for bounderies
351 351 # -r commit_id ==> blames for the given commit
352 352 output, __ = self.repository.run_git_command(cmd)
353 353
354 354 for i, blame_line in enumerate(output.split('\n')[:-1]):
355 355 line_no = i + 1
356 356 commit_id, line = re.split(r' ', blame_line, 1)
357 357 yield (
358 358 line_no, commit_id,
359 359 lambda: self.repository.get_commit(commit_id=commit_id,
360 360 pre_load=pre_load),
361 361 line)
362 362
363 363 def get_nodes(self, path):
364 364 if self._get_kind(path) != NodeKind.DIR:
365 365 raise CommitError(
366 366 "Directory does not exist for commit %s at "
367 367 " '%s'" % (self.raw_id, path))
368 368 path = self._fix_path(path)
369 369 id_, _ = self._get_id_for_path(path)
370 370 tree_id = self._remote[id_]['id']
371 371 dirnodes = []
372 372 filenodes = []
373 373 alias = self.repository.alias
374 374 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
375 375 if type_ == 'link':
376 376 url = self._get_submodule_url('/'.join((path, name)))
377 377 dirnodes.append(SubModuleNode(
378 378 name, url=url, commit=id_, alias=alias))
379 379 continue
380 380
381 381 if path != '':
382 382 obj_path = '/'.join((path, name))
383 383 else:
384 384 obj_path = name
385 385 if obj_path not in self._stat_modes:
386 386 self._stat_modes[obj_path] = stat_
387 387
388 388 if type_ == 'tree':
389 389 dirnodes.append(DirNode(obj_path, commit=self))
390 390 elif type_ == 'blob':
391 391 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
392 392 else:
393 393 raise CommitError(
394 394 "Requested object should be Tree or Blob, is %s", type_)
395 395
396 396 nodes = dirnodes + filenodes
397 397 for node in nodes:
398 398 if node.path not in self.nodes:
399 399 self.nodes[node.path] = node
400 400 nodes.sort()
401 401 return nodes
402 402
403 403 def get_node(self, path, pre_load=None):
404 404 if isinstance(path, unicode):
405 405 path = path.encode('utf-8')
406 406 path = self._fix_path(path)
407 407 if path not in self.nodes:
408 408 try:
409 409 id_, type_ = self._get_id_for_path(path)
410 410 except CommitError:
411 411 raise NodeDoesNotExistError(
412 412 "Cannot find one of parents' directories for a given "
413 413 "path: %s" % path)
414 414
415 415 if type_ == 'link':
416 416 url = self._get_submodule_url(path)
417 417 node = SubModuleNode(path, url=url, commit=id_,
418 418 alias=self.repository.alias)
419 419 elif type_ == 'tree':
420 420 if path == '':
421 421 node = RootNode(commit=self)
422 422 else:
423 423 node = DirNode(path, commit=self)
424 424 elif type_ == 'blob':
425 425 node = FileNode(path, commit=self, pre_load=pre_load)
426 426 else:
427 427 raise self.no_node_at_path(path)
428 428
429 429 # cache node
430 430 self.nodes[path] = node
431 431 return self.nodes[path]
432 432
433 433 def get_largefile_node(self, path):
434 434 id_, _ = self._get_id_for_path(path)
435 435 pointer_spec = self._remote.is_large_file(id_)
436 436
437 437 if pointer_spec:
438 438 # content of that file regular FileNode is the hash of largefile
439 439 file_id = pointer_spec.get('oid_hash')
440 440 if self._remote.in_largefiles_store(file_id):
441 441 lf_path = self._remote.store_path(file_id)
442 442 return LargeFileNode(lf_path, commit=self, org_path=path)
443 443
444 444 @LazyProperty
445 445 def affected_files(self):
446 446 """
447 447 Gets a fast accessible file changes for given commit
448 448 """
449 449 added, modified, deleted = self._changes_cache
450 450 return list(added.union(modified).union(deleted))
451 451
452 452 @LazyProperty
453 453 def _changes_cache(self):
454 454 added = set()
455 455 modified = set()
456 456 deleted = set()
457 457 _r = self._remote
458 458
459 459 parents = self.parents
460 460 if not self.parents:
461 461 parents = [base.EmptyCommit()]
462 462 for parent in parents:
463 463 if isinstance(parent, base.EmptyCommit):
464 464 oid = None
465 465 else:
466 466 oid = parent.raw_id
467 467 changes = _r.tree_changes(oid, self.raw_id)
468 468 for (oldpath, newpath), (_, _), (_, _) in changes:
469 469 if newpath and oldpath:
470 470 modified.add(newpath)
471 471 elif newpath and not oldpath:
472 472 added.add(newpath)
473 473 elif not newpath and oldpath:
474 474 deleted.add(oldpath)
475 475 return added, modified, deleted
476 476
477 477 def _get_paths_for_status(self, status):
478 478 """
479 479 Returns sorted list of paths for given ``status``.
480 480
481 481 :param status: one of: *added*, *modified* or *deleted*
482 482 """
483 483 added, modified, deleted = self._changes_cache
484 484 return sorted({
485 485 'added': list(added),
486 486 'modified': list(modified),
487 487 'deleted': list(deleted)}[status]
488 488 )
489 489
490 490 @LazyProperty
491 491 def added(self):
492 492 """
493 493 Returns list of added ``FileNode`` objects.
494 494 """
495 495 if not self.parents:
496 496 return list(self._get_file_nodes())
497 497 return AddedFileNodesGenerator(
498 498 [n for n in self._get_paths_for_status('added')], self)
499 499
500 500 @LazyProperty
501 501 def changed(self):
502 502 """
503 503 Returns list of modified ``FileNode`` objects.
504 504 """
505 505 if not self.parents:
506 506 return []
507 507 return ChangedFileNodesGenerator(
508 508 [n for n in self._get_paths_for_status('modified')], self)
509 509
510 510 @LazyProperty
511 511 def removed(self):
512 512 """
513 513 Returns list of removed ``FileNode`` objects.
514 514 """
515 515 if not self.parents:
516 516 return []
517 517 return RemovedFileNodesGenerator(
518 518 [n for n in self._get_paths_for_status('deleted')], self)
519 519
520 520 def _get_submodule_url(self, submodule_path):
521 521 git_modules_path = '.gitmodules'
522 522
523 523 if self._submodules is None:
524 524 self._submodules = {}
525 525
526 526 try:
527 527 submodules_node = self.get_node(git_modules_path)
528 528 except NodeDoesNotExistError:
529 529 return None
530 530
531 531 content = submodules_node.content
532 532
533 533 # ConfigParser fails if there are whitespaces
534 534 content = '\n'.join(l.strip() for l in content.split('\n'))
535 535
536 536 parser = configparser.ConfigParser()
537 537 parser.readfp(StringIO(content))
538 538
539 539 for section in parser.sections():
540 540 path = parser.get(section, 'path')
541 541 url = parser.get(section, 'url')
542 542 if path and url:
543 543 self._submodules[path.strip('/')] = url
544 544
545 545 return self._submodules.get(submodule_path.strip('/'))
@@ -1,929 +1,936 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 import ConfigParser
24 import os
25 25 import logging
26 26 import binascii
27 import os
28 27 import shutil
29 28 import urllib
30 29
31 30 from zope.cachedescriptors.property import Lazy as LazyProperty
32 31
33 32 from rhodecode.lib.compat import OrderedDict
34 33 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 date_astimestamp)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
37 35 from rhodecode.lib.utils import safe_unicode, safe_str
38 36 from rhodecode.lib.vcs import connection, exceptions
39 37 from rhodecode.lib.vcs.backends.base import (
40 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 39 MergeFailureReason, Reference, BasePathPermissionChecker)
42 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 43 from rhodecode.lib.vcs.exceptions import (
46 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 from rhodecode.lib.vcs.compat import configparser
48 47
49 48 hexlify = binascii.hexlify
50 49 nullid = "\0" * 20
51 50
52 51 log = logging.getLogger(__name__)
53 52
54 53
55 54 class MercurialRepository(BaseRepository):
56 55 """
57 56 Mercurial repository backend
58 57 """
59 58 DEFAULT_BRANCH_NAME = 'default'
60 59
61 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 61 update_after_clone=False, with_wire=None):
63 62 """
64 63 Raises RepositoryError if repository could not be find at the given
65 64 ``repo_path``.
66 65
67 66 :param repo_path: local path of the repository
68 67 :param config: config object containing the repo configuration
69 68 :param create=False: if set to True, would try to create repository if
70 69 it does not exist rather than raising exception
71 70 :param src_url=None: would try to clone repository from given location
72 71 :param update_after_clone=False: sets update of working copy after
73 72 making a clone
74 73 """
75 74
76 75 self.path = safe_str(os.path.abspath(repo_path))
77 76 # mercurial since 4.4.X requires certain configuration to be present
78 77 # because sometimes we init the repos with config we need to meet
79 78 # special requirements
80 79 self.config = config if config else self.get_default_config(
81 80 default=[('extensions', 'largefiles', '1')])
82 81
83 82 self._remote = connection.Hg(
84 83 self.path, self.config, with_wire=with_wire)
85 84
86 85 self._init_repo(create, src_url, update_after_clone)
87 86
88 87 # caches
89 88 self._commit_ids = {}
90 89
91 90 @LazyProperty
92 91 def commit_ids(self):
93 92 """
94 93 Returns list of commit ids, in ascending order. Being lazy
95 94 attribute allows external tools to inject shas from cache.
96 95 """
97 96 commit_ids = self._get_all_commit_ids()
98 97 self._rebuild_cache(commit_ids)
99 98 return commit_ids
100 99
101 100 def _rebuild_cache(self, commit_ids):
102 101 self._commit_ids = dict((commit_id, index)
103 102 for index, commit_id in enumerate(commit_ids))
104 103
105 104 @LazyProperty
106 105 def branches(self):
107 106 return self._get_branches()
108 107
109 108 @LazyProperty
110 109 def branches_closed(self):
111 110 return self._get_branches(active=False, closed=True)
112 111
113 112 @LazyProperty
114 113 def branches_all(self):
115 114 all_branches = {}
116 115 all_branches.update(self.branches)
117 116 all_branches.update(self.branches_closed)
118 117 return all_branches
119 118
120 119 def _get_branches(self, active=True, closed=False):
121 120 """
122 121 Gets branches for this repository
123 122 Returns only not closed active branches by default
124 123
125 124 :param active: return also active branches
126 125 :param closed: return also closed branches
127 126
128 127 """
129 128 if self.is_empty():
130 129 return {}
131 130
132 131 def get_name(ctx):
133 132 return ctx[0]
134 133
135 134 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 135 self._remote.branches(active, closed).items()]
137 136
138 137 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 138
140 139 @LazyProperty
141 140 def tags(self):
142 141 """
143 142 Gets tags for this repository
144 143 """
145 144 return self._get_tags()
146 145
147 146 def _get_tags(self):
148 147 if self.is_empty():
149 148 return {}
150 149
151 150 def get_name(ctx):
152 151 return ctx[0]
153 152
154 153 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 154 self._remote.tags().items()]
156 155
157 156 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 157
159 158 def tag(self, name, user, commit_id=None, message=None, date=None,
160 159 **kwargs):
161 160 """
162 161 Creates and returns a tag for the given ``commit_id``.
163 162
164 163 :param name: name for new tag
165 164 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 165 :param commit_id: commit id for which new tag would be created
167 166 :param message: message of the tag's commit
168 167 :param date: date of tag's commit
169 168
170 169 :raises TagAlreadyExistError: if tag with same name already exists
171 170 """
172 171 if name in self.tags:
173 172 raise TagAlreadyExistError("Tag %s already exists" % name)
174 173 commit = self.get_commit(commit_id=commit_id)
175 174 local = kwargs.setdefault('local', False)
176 175
177 176 if message is None:
178 177 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 178
180 179 date, tz = date_to_timestamp_plus_offset(date)
181 180
182 181 self._remote.tag(
183 182 name, commit.raw_id, message, local, user, date, tz)
184 183 self._remote.invalidate_vcs_cache()
185 184
186 185 # Reinitialize tags
187 186 self.tags = self._get_tags()
188 187 tag_id = self.tags[name]
189 188
190 189 return self.get_commit(commit_id=tag_id)
191 190
192 191 def remove_tag(self, name, user, message=None, date=None):
193 192 """
194 193 Removes tag with the given `name`.
195 194
196 195 :param name: name of the tag to be removed
197 196 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 197 :param message: message of the tag's removal commit
199 198 :param date: date of tag's removal commit
200 199
201 200 :raises TagDoesNotExistError: if tag with given name does not exists
202 201 """
203 202 if name not in self.tags:
204 203 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 204 if message is None:
206 205 message = "Removed tag %s" % name
207 206 local = False
208 207
209 208 date, tz = date_to_timestamp_plus_offset(date)
210 209
211 210 self._remote.tag(name, nullid, message, local, user, date, tz)
212 211 self._remote.invalidate_vcs_cache()
213 212 self.tags = self._get_tags()
214 213
215 214 @LazyProperty
216 215 def bookmarks(self):
217 216 """
218 217 Gets bookmarks for this repository
219 218 """
220 219 return self._get_bookmarks()
221 220
222 221 def _get_bookmarks(self):
223 222 if self.is_empty():
224 223 return {}
225 224
226 225 def get_name(ctx):
227 226 return ctx[0]
228 227
229 228 _bookmarks = [
230 229 (safe_unicode(n), hexlify(h)) for n, h in
231 230 self._remote.bookmarks().items()]
232 231
233 232 return OrderedDict(sorted(_bookmarks, key=get_name))
234 233
235 234 def _get_all_commit_ids(self):
236 235 return self._remote.get_all_commit_ids('visible')
237 236
238 237 def get_diff(
239 238 self, commit1, commit2, path='', ignore_whitespace=False,
240 239 context=3, path1=None):
241 240 """
242 241 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 242 `commit2` since `commit1`.
244 243
245 244 :param commit1: Entry point from which diff is shown. Can be
246 245 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 246 the changes since empty state of the repository until `commit2`
248 247 :param commit2: Until which commit changes should be shown.
249 248 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 249 changes. Defaults to ``False``.
251 250 :param context: How many lines before/after changed lines should be
252 251 shown. Defaults to ``3``.
253 252 """
254 253 self._validate_diff_commits(commit1, commit2)
255 254 if path1 is not None and path1 != path:
256 255 raise ValueError("Diff of two different paths not supported.")
257 256
258 257 if path:
259 258 file_filter = [self.path, path]
260 259 else:
261 260 file_filter = None
262 261
263 262 diff = self._remote.diff(
264 263 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 264 opt_git=True, opt_ignorews=ignore_whitespace,
266 265 context=context)
267 266 return MercurialDiff(diff)
268 267
269 268 def strip(self, commit_id, branch=None):
270 269 self._remote.strip(commit_id, update=False, backup="none")
271 270
272 271 self._remote.invalidate_vcs_cache()
273 272 self.commit_ids = self._get_all_commit_ids()
274 273 self._rebuild_cache(self.commit_ids)
275 274
276 275 def verify(self):
277 276 verify = self._remote.verify()
278 277
279 278 self._remote.invalidate_vcs_cache()
280 279 return verify
281 280
282 281 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 282 if commit_id1 == commit_id2:
284 283 return commit_id1
285 284
286 285 ancestors = self._remote.revs_from_revspec(
287 286 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 287 other_path=repo2.path)
289 288 return repo2[ancestors[0]].raw_id if ancestors else None
290 289
291 290 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 291 if commit_id1 == commit_id2:
293 292 commits = []
294 293 else:
295 294 if merge:
296 295 indexes = self._remote.revs_from_revspec(
297 296 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 297 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 298 else:
300 299 indexes = self._remote.revs_from_revspec(
301 300 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 301 commit_id1, other_path=repo2.path)
303 302
304 303 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 304 for idx in indexes]
306 305
307 306 return commits
308 307
309 308 @staticmethod
310 309 def check_url(url, config):
311 310 """
312 311 Function will check given url and try to verify if it's a valid
313 312 link. Sometimes it may happened that mercurial will issue basic
314 313 auth request that can cause whole API to hang when used from python
315 314 or other external calls.
316 315
317 316 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 317 when the return code is non 200
319 318 """
320 319 # check first if it's not an local url
321 320 if os.path.isdir(url) or url.startswith('file:'):
322 321 return True
323 322
324 323 # Request the _remote to verify the url
325 324 return connection.Hg.check_url(url, config.serialize())
326 325
327 326 @staticmethod
328 327 def is_valid_repository(path):
329 328 return os.path.isdir(os.path.join(path, '.hg'))
330 329
331 330 def _init_repo(self, create, src_url=None, update_after_clone=False):
332 331 """
333 332 Function will check for mercurial repository in given path. If there
334 333 is no repository in that path it will raise an exception unless
335 334 `create` parameter is set to True - in that case repository would
336 335 be created.
337 336
338 337 If `src_url` is given, would try to clone repository from the
339 338 location at given clone_point. Additionally it'll make update to
340 339 working copy accordingly to `update_after_clone` flag.
341 340 """
342 341 if create and os.path.exists(self.path):
343 342 raise RepositoryError(
344 343 "Cannot create repository at %s, location already exist"
345 344 % self.path)
346 345
347 346 if src_url:
348 347 url = str(self._get_url(src_url))
349 348 MercurialRepository.check_url(url, self.config)
350 349
351 350 self._remote.clone(url, self.path, update_after_clone)
352 351
353 352 # Don't try to create if we've already cloned repo
354 353 create = False
355 354
356 355 if create:
357 356 os.makedirs(self.path, mode=0755)
358 357
359 358 self._remote.localrepository(create)
360 359
361 360 @LazyProperty
362 361 def in_memory_commit(self):
363 362 return MercurialInMemoryCommit(self)
364 363
365 364 @LazyProperty
366 365 def description(self):
367 366 description = self._remote.get_config_value(
368 367 'web', 'description', untrusted=True)
369 368 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 369
371 370 @LazyProperty
372 371 def contact(self):
373 372 contact = (
374 373 self._remote.get_config_value("web", "contact") or
375 374 self._remote.get_config_value("ui", "username"))
376 375 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 376
378 377 @LazyProperty
379 378 def last_change(self):
380 379 """
381 380 Returns last change made on this repository as
382 381 `datetime.datetime` object.
383 382 """
384 383 try:
385 384 return self.get_commit().date
386 385 except RepositoryError:
387 386 tzoffset = makedate()[1]
388 387 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 388
390 389 def _get_fs_mtime(self):
391 390 # fallback to filesystem
392 391 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 392 st_path = os.path.join(self.path, '.hg', "store")
394 393 if os.path.exists(cl_path):
395 394 return os.stat(cl_path).st_mtime
396 395 else:
397 396 return os.stat(st_path).st_mtime
398 397
399 398 def _sanitize_commit_idx(self, idx):
400 399 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
401 400 # number. A `long` is treated in the correct way though. So we convert
402 401 # `int` to `long` here to make sure it is handled correctly.
403 402 if isinstance(idx, int):
404 403 return long(idx)
405 404 return idx
406 405
407 406 def _get_url(self, url):
408 407 """
409 408 Returns normalized url. If schema is not given, would fall
410 409 to filesystem
411 410 (``file:///``) schema.
412 411 """
413 412 url = url.encode('utf8')
414 413 if url != 'default' and '://' not in url:
415 414 url = "file:" + urllib.pathname2url(url)
416 415 return url
417 416
418 417 def get_hook_location(self):
419 418 """
420 419 returns absolute path to location where hooks are stored
421 420 """
422 421 return os.path.join(self.path, '.hg', '.hgrc')
423 422
424 423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
425 424 """
426 425 Returns ``MercurialCommit`` object representing repository's
427 426 commit at the given `commit_id` or `commit_idx`.
428 427 """
429 428 if self.is_empty():
430 429 raise EmptyRepositoryError("There are no commits yet")
431 430
432 431 if commit_id is not None:
433 432 self._validate_commit_id(commit_id)
434 433 try:
435 434 idx = self._commit_ids[commit_id]
436 435 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
437 436 except KeyError:
438 437 pass
439 438 elif commit_idx is not None:
440 439 self._validate_commit_idx(commit_idx)
441 440 commit_idx = self._sanitize_commit_idx(commit_idx)
442 441 try:
443 442 id_ = self.commit_ids[commit_idx]
444 443 if commit_idx < 0:
445 444 commit_idx += len(self.commit_ids)
446 445 return MercurialCommit(
447 446 self, id_, commit_idx, pre_load=pre_load)
448 447 except IndexError:
449 448 commit_id = commit_idx
450 449 else:
451 450 commit_id = "tip"
452 451
453 452 # TODO Paris: Ugly hack to "serialize" long for msgpack
454 453 if isinstance(commit_id, long):
455 454 commit_id = float(commit_id)
456 455
457 456 if isinstance(commit_id, unicode):
458 457 commit_id = safe_str(commit_id)
459 458
460 459 try:
461 460 raw_id, idx = self._remote.lookup(commit_id, both=True)
462 461 except CommitDoesNotExistError:
463 462 msg = "Commit %s does not exist for %s" % (
464 463 commit_id, self)
465 464 raise CommitDoesNotExistError(msg)
466 465
467 466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
468 467
469 468 def get_commits(
470 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
471 470 branch_name=None, show_hidden=False, pre_load=None):
472 471 """
473 472 Returns generator of ``MercurialCommit`` objects from start to end
474 473 (both are inclusive)
475 474
476 475 :param start_id: None, str(commit_id)
477 476 :param end_id: None, str(commit_id)
478 477 :param start_date: if specified, commits with commit date less than
479 478 ``start_date`` would be filtered out from returned set
480 479 :param end_date: if specified, commits with commit date greater than
481 480 ``end_date`` would be filtered out from returned set
482 481 :param branch_name: if specified, commits not reachable from given
483 482 branch would be filtered out from returned set
484 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
485 484 Mercurial evolve
486 485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
487 486 exist.
488 487 :raise CommitDoesNotExistError: If commit for given ``start`` or
489 488 ``end`` could not be found.
490 489 """
491 490 # actually we should check now if it's not an empty repo
492 491 branch_ancestors = False
493 492 if self.is_empty():
494 493 raise EmptyRepositoryError("There are no commits yet")
495 494 self._validate_branch_name(branch_name)
496 495
497 496 if start_id is not None:
498 497 self._validate_commit_id(start_id)
499 498 c_start = self.get_commit(commit_id=start_id)
500 499 start_pos = self._commit_ids[c_start.raw_id]
501 500 else:
502 501 start_pos = None
503 502
504 503 if end_id is not None:
505 504 self._validate_commit_id(end_id)
506 505 c_end = self.get_commit(commit_id=end_id)
507 506 end_pos = max(0, self._commit_ids[c_end.raw_id])
508 507 else:
509 508 end_pos = None
510 509
511 510 if None not in [start_id, end_id] and start_pos > end_pos:
512 511 raise RepositoryError(
513 512 "Start commit '%s' cannot be after end commit '%s'" %
514 513 (start_id, end_id))
515 514
516 515 if end_pos is not None:
517 516 end_pos += 1
518 517
519 518 commit_filter = []
520 519
521 520 if branch_name and not branch_ancestors:
522 521 commit_filter.append('branch("%s")' % (branch_name,))
523 522 elif branch_name and branch_ancestors:
524 523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
525 524
526 525 if start_date and not end_date:
527 526 commit_filter.append('date(">%s")' % (start_date,))
528 527 if end_date and not start_date:
529 528 commit_filter.append('date("<%s")' % (end_date,))
530 529 if start_date and end_date:
531 530 commit_filter.append(
532 531 'date(">%s") and date("<%s")' % (start_date, end_date))
533 532
534 533 if not show_hidden:
535 534 commit_filter.append('not obsolete()')
536 535 commit_filter.append('not hidden()')
537 536
538 537 # TODO: johbo: Figure out a simpler way for this solution
539 538 collection_generator = CollectionGenerator
540 539 if commit_filter:
541 540 commit_filter = ' and '.join(map(safe_str, commit_filter))
542 541 revisions = self._remote.rev_range([commit_filter])
543 542 collection_generator = MercurialIndexBasedCollectionGenerator
544 543 else:
545 544 revisions = self.commit_ids
546 545
547 546 if start_pos or end_pos:
548 547 revisions = revisions[start_pos:end_pos]
549 548
550 549 return collection_generator(self, revisions, pre_load=pre_load)
551 550
552 551 def pull(self, url, commit_ids=None):
553 552 """
554 553 Tries to pull changes from external location.
555 554
556 555 :param commit_ids: Optional. Can be set to a list of commit ids
557 556 which shall be pulled from the other repository.
558 557 """
559 558 url = self._get_url(url)
560 559 self._remote.pull(url, commit_ids=commit_ids)
561 560 self._remote.invalidate_vcs_cache()
562 561
563 562 def push(self, url):
564 563 url = self._get_url(url)
565 564 self._remote.sync_push(url)
566 565
567 566 def _local_clone(self, clone_path):
568 567 """
569 568 Create a local clone of the current repo.
570 569 """
571 570 self._remote.clone(self.path, clone_path, update_after_clone=True,
572 571 hooks=False)
573 572
574 573 def _update(self, revision, clean=False):
575 574 """
576 575 Update the working copy to the specified revision.
577 576 """
578 577 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
579 578 self._remote.update(revision, clean=clean)
580 579
581 580 def _identify(self):
582 581 """
583 582 Return the current state of the working directory.
584 583 """
585 584 return self._remote.identify().strip().rstrip('+')
586 585
587 586 def _heads(self, branch=None):
588 587 """
589 588 Return the commit ids of the repository heads.
590 589 """
591 590 return self._remote.heads(branch=branch).strip().split(' ')
592 591
593 592 def _ancestor(self, revision1, revision2):
594 593 """
595 594 Return the common ancestor of the two revisions.
596 595 """
597 596 return self._remote.ancestor(revision1, revision2)
598 597
599 598 def _local_push(
600 599 self, revision, repository_path, push_branches=False,
601 600 enable_hooks=False):
602 601 """
603 602 Push the given revision to the specified repository.
604 603
605 604 :param push_branches: allow to create branches in the target repo.
606 605 """
607 606 self._remote.push(
608 607 [revision], repository_path, hooks=enable_hooks,
609 608 push_branches=push_branches)
610 609
611 610 def _local_merge(self, target_ref, merge_message, user_name, user_email,
612 611 source_ref, use_rebase=False, dry_run=False):
613 612 """
614 613 Merge the given source_revision into the checked out revision.
615 614
616 615 Returns the commit id of the merge and a boolean indicating if the
617 616 commit needs to be pushed.
618 617 """
619 618 self._update(target_ref.commit_id)
620 619
621 620 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
622 621 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
623 622
624 623 if ancestor == source_ref.commit_id:
625 624 # Nothing to do, the changes were already integrated
626 625 return target_ref.commit_id, False
627 626
628 627 elif ancestor == target_ref.commit_id and is_the_same_branch:
629 628 # In this case we should force a commit message
630 629 return source_ref.commit_id, True
631 630
632 631 if use_rebase:
633 632 try:
634 633 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
635 634 target_ref.commit_id)
636 635 self.bookmark(bookmark_name, revision=source_ref.commit_id)
637 636 self._remote.rebase(
638 637 source=source_ref.commit_id, dest=target_ref.commit_id)
639 638 self._remote.invalidate_vcs_cache()
640 639 self._update(bookmark_name)
641 640 return self._identify(), True
642 641 except RepositoryError:
643 642 # The rebase-abort may raise another exception which 'hides'
644 643 # the original one, therefore we log it here.
645 644 log.exception('Error while rebasing shadow repo during merge.')
646 645
647 646 # Cleanup any rebase leftovers
648 647 self._remote.invalidate_vcs_cache()
649 648 self._remote.rebase(abort=True)
650 649 self._remote.invalidate_vcs_cache()
651 650 self._remote.update(clean=True)
652 651 raise
653 652 else:
654 653 try:
655 654 self._remote.merge(source_ref.commit_id)
656 655 self._remote.invalidate_vcs_cache()
657 656 self._remote.commit(
658 657 message=safe_str(merge_message),
659 658 username=safe_str('%s <%s>' % (user_name, user_email)))
660 659 self._remote.invalidate_vcs_cache()
661 660 return self._identify(), True
662 661 except RepositoryError:
663 662 # Cleanup any merge leftovers
664 663 self._remote.update(clean=True)
665 664 raise
666 665
667 666 def _local_close(self, target_ref, user_name, user_email,
668 667 source_ref, close_message=''):
669 668 """
670 669 Close the branch of the given source_revision
671 670
672 671 Returns the commit id of the close and a boolean indicating if the
673 672 commit needs to be pushed.
674 673 """
675 674 self._update(source_ref.commit_id)
676 675 message = close_message or "Closing branch: `{}`".format(source_ref.name)
677 676 try:
678 677 self._remote.commit(
679 678 message=safe_str(message),
680 679 username=safe_str('%s <%s>' % (user_name, user_email)),
681 680 close_branch=True)
682 681 self._remote.invalidate_vcs_cache()
683 682 return self._identify(), True
684 683 except RepositoryError:
685 684 # Cleanup any commit leftovers
686 685 self._remote.update(clean=True)
687 686 raise
688 687
689 688 def _is_the_same_branch(self, target_ref, source_ref):
690 689 return (
691 690 self._get_branch_name(target_ref) ==
692 691 self._get_branch_name(source_ref))
693 692
694 693 def _get_branch_name(self, ref):
695 694 if ref.type == 'branch':
696 695 return ref.name
697 696 return self._remote.ctx_branch(ref.commit_id)
698 697
699 698 def _get_shadow_repository_path(self, workspace_id):
700 699 # The name of the shadow repository must start with '.', so it is
701 700 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
702 701 return os.path.join(
703 702 os.path.dirname(self.path),
704 703 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
705 704
706 705 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
707 706 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
708 707 if not os.path.exists(shadow_repository_path):
709 708 self._local_clone(shadow_repository_path)
710 709 log.debug(
711 710 'Prepared shadow repository in %s', shadow_repository_path)
712 711
713 712 return shadow_repository_path
714 713
715 714 def cleanup_merge_workspace(self, workspace_id):
716 715 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
717 716 shutil.rmtree(shadow_repository_path, ignore_errors=True)
718 717
719 718 def _merge_repo(self, shadow_repository_path, target_ref,
720 719 source_repo, source_ref, merge_message,
721 720 merger_name, merger_email, dry_run=False,
722 721 use_rebase=False, close_branch=False):
723 722
724 723 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
725 724 'rebase' if use_rebase else 'merge', dry_run)
726 725 if target_ref.commit_id not in self._heads():
727 726 return MergeResponse(
728 727 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
729 728
730 729 try:
731 730 if (target_ref.type == 'branch' and
732 731 len(self._heads(target_ref.name)) != 1):
733 732 return MergeResponse(
734 733 False, False, None,
735 734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
736 735 except CommitDoesNotExistError:
737 736 log.exception('Failure when looking up branch heads on hg target')
738 737 return MergeResponse(
739 738 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
740 739
741 740 shadow_repo = self._get_shadow_instance(shadow_repository_path)
742 741
743 742 log.debug('Pulling in target reference %s', target_ref)
744 743 self._validate_pull_reference(target_ref)
745 744 shadow_repo._local_pull(self.path, target_ref)
746 745 try:
747 746 log.debug('Pulling in source reference %s', source_ref)
748 747 source_repo._validate_pull_reference(source_ref)
749 748 shadow_repo._local_pull(source_repo.path, source_ref)
750 749 except CommitDoesNotExistError:
751 750 log.exception('Failure when doing local pull on hg shadow repo')
752 751 return MergeResponse(
753 752 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
754 753
755 754 merge_ref = None
756 755 merge_commit_id = None
757 756 close_commit_id = None
758 757 merge_failure_reason = MergeFailureReason.NONE
759 758
760 759 # enforce that close branch should be used only in case we source from
761 760 # an actual Branch
762 761 close_branch = close_branch and source_ref.type == 'branch'
763 762
764 763 # don't allow to close branch if source and target are the same
765 764 close_branch = close_branch and source_ref.name != target_ref.name
766 765
767 766 needs_push_on_close = False
768 767 if close_branch and not use_rebase and not dry_run:
769 768 try:
770 769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
771 770 target_ref, merger_name, merger_email, source_ref)
772 771 merge_possible = True
773 772 except RepositoryError:
774 773 log.exception(
775 774 'Failure when doing close branch on hg shadow repo')
776 775 merge_possible = False
777 776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
778 777 else:
779 778 merge_possible = True
780 779
780 needs_push = False
781 781 if merge_possible:
782 782 try:
783 783 merge_commit_id, needs_push = shadow_repo._local_merge(
784 784 target_ref, merge_message, merger_name, merger_email,
785 785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
786 786 merge_possible = True
787 787
788 788 # read the state of the close action, if it
789 789 # maybe required a push
790 790 needs_push = needs_push or needs_push_on_close
791 791
792 792 # Set a bookmark pointing to the merge commit. This bookmark
793 793 # may be used to easily identify the last successful merge
794 794 # commit in the shadow repository.
795 795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
796 796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
797 797 except SubrepoMergeError:
798 798 log.exception(
799 799 'Subrepo merge error during local merge on hg shadow repo.')
800 800 merge_possible = False
801 801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
802 802 needs_push = False
803 803 except RepositoryError:
804 804 log.exception('Failure when doing local merge on hg shadow repo')
805 805 merge_possible = False
806 806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
807 807 needs_push = False
808 808
809 809 if merge_possible and not dry_run:
810 810 if needs_push:
811 811 # In case the target is a bookmark, update it, so after pushing
812 812 # the bookmarks is also updated in the target.
813 813 if target_ref.type == 'book':
814 814 shadow_repo.bookmark(
815 815 target_ref.name, revision=merge_commit_id)
816 816 try:
817 817 shadow_repo_with_hooks = self._get_shadow_instance(
818 818 shadow_repository_path,
819 819 enable_hooks=True)
820 820 # This is the actual merge action, we push from shadow
821 821 # into origin.
822 822 # Note: the push_branches option will push any new branch
823 823 # defined in the source repository to the target. This may
824 824 # be dangerous as branches are permanent in Mercurial.
825 825 # This feature was requested in issue #441.
826 826 shadow_repo_with_hooks._local_push(
827 827 merge_commit_id, self.path, push_branches=True,
828 828 enable_hooks=True)
829 829
830 830 # maybe we also need to push the close_commit_id
831 831 if close_commit_id:
832 832 shadow_repo_with_hooks._local_push(
833 833 close_commit_id, self.path, push_branches=True,
834 834 enable_hooks=True)
835 835 merge_succeeded = True
836 836 except RepositoryError:
837 837 log.exception(
838 838 'Failure when doing local push from the shadow '
839 839 'repository to the target repository.')
840 840 merge_succeeded = False
841 841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
842 842 else:
843 843 merge_succeeded = True
844 844 else:
845 845 merge_succeeded = False
846 846
847 847 return MergeResponse(
848 848 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
849 849
850 850 def _get_shadow_instance(
851 851 self, shadow_repository_path, enable_hooks=False):
852 852 config = self.config.copy()
853 853 if not enable_hooks:
854 854 config.clear_section('hooks')
855 855 return MercurialRepository(shadow_repository_path, config)
856 856
857 857 def _validate_pull_reference(self, reference):
858 858 if not (reference.name in self.bookmarks or
859 859 reference.name in self.branches or
860 860 self.get_commit(reference.commit_id)):
861 861 raise CommitDoesNotExistError(
862 862 'Unknown branch, bookmark or commit id')
863 863
864 864 def _local_pull(self, repository_path, reference):
865 865 """
866 866 Fetch a branch, bookmark or commit from a local repository.
867 867 """
868 868 repository_path = os.path.abspath(repository_path)
869 869 if repository_path == self.path:
870 870 raise ValueError('Cannot pull from the same repository')
871 871
872 872 reference_type_to_option_name = {
873 873 'book': 'bookmark',
874 874 'branch': 'branch',
875 875 }
876 876 option_name = reference_type_to_option_name.get(
877 877 reference.type, 'revision')
878 878
879 879 if option_name == 'revision':
880 880 ref = reference.commit_id
881 881 else:
882 882 ref = reference.name
883 883
884 884 options = {option_name: [ref]}
885 885 self._remote.pull_cmd(repository_path, hooks=False, **options)
886 886 self._remote.invalidate_vcs_cache()
887 887
888 888 def bookmark(self, bookmark, revision=None):
889 889 if isinstance(bookmark, unicode):
890 890 bookmark = safe_str(bookmark)
891 891 self._remote.bookmark(bookmark, revision=revision)
892 892 self._remote.invalidate_vcs_cache()
893 893
894 894 def get_path_permissions(self, username):
895 hgacl_file = self.path + '/.hg/hgacl'
895 hgacl_file = os.path.join(self.path, '.hg/hgacl')
896
897 def read_patterns(suffix):
898 svalue = None
899 try:
900 svalue = hgacl.get('narrowhgacl', username + suffix)
901 except configparser.NoOptionError:
902 try:
903 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
904 except configparser.NoOptionError:
905 pass
906 if not svalue:
907 return None
908 result = ['/']
909 for pattern in svalue.split():
910 result.append(pattern)
911 if '*' not in pattern and '?' not in pattern:
912 result.append(pattern + '/*')
913 return result
914
896 915 if os.path.exists(hgacl_file):
897 916 try:
898 hgacl = ConfigParser.RawConfigParser()
917 hgacl = configparser.RawConfigParser()
899 918 hgacl.read(hgacl_file)
900 def read_patterns(suffix):
901 svalue = None
902 try:
903 svalue = hgacl.get('narrowhgacl', username + suffix)
904 except ConfigParser.NoOptionError:
905 try:
906 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
907 except ConfigParser.NoOptionError:
908 pass
909 if not svalue:
910 return None
911 result = ['/']
912 for pattern in svalue.split():
913 result.append(pattern)
914 if '*' not in pattern and '?' not in pattern:
915 result.append(pattern + '/*')
916 return result
919
917 920 includes = read_patterns('.includes')
918 921 excludes = read_patterns('.excludes')
919 return BasePathPermissionChecker.create_from_patterns(includes, excludes)
922 return BasePathPermissionChecker.create_from_patterns(
923 includes, excludes)
920 924 except BaseException as e:
921 raise exceptions.RepositoryRequirementError('Cannot read ACL settings for {}: {}'.format(self.name, e))
925 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
926 hgacl_file, self.name, e)
927 raise exceptions.RepositoryRequirementError(msg)
922 928 else:
923 929 return None
924 930
931
925 932 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
926 933
927 934 def _commit_factory(self, commit_id):
928 935 return self.repo.get_commit(
929 936 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now