##// END OF EJS Templates
scm: added push options for Mercurial and Git to allow remote repository sync.
marcink -
r2492:d48fe67d default
parent child Browse files
Show More
@@ -1,977 +1,981 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.compat import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 utcdate_fromtimestamp, makedate, date_astimestamp)
35 35 from rhodecode.lib.utils import safe_unicode, safe_str
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 update_after_clone=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else Config()
66 66 self._remote = connection.Git(
67 67 self.path, self.config, with_wire=with_wire)
68 68
69 69 self._init_repo(create, src_url, update_after_clone, bare)
70 70
71 71 # caches
72 72 self._commit_ids = {}
73 73
74 74 self.bookmarks = {}
75 75
76 76 @LazyProperty
77 77 def bare(self):
78 78 return self._remote.bare()
79 79
80 80 @LazyProperty
81 81 def head(self):
82 82 return self._remote.head()
83 83
84 84 @LazyProperty
85 85 def commit_ids(self):
86 86 """
87 87 Returns list of commit ids, in ascending order. Being lazy
88 88 attribute allows external tools to inject commit ids from cache.
89 89 """
90 90 commit_ids = self._get_all_commit_ids()
91 91 self._rebuild_cache(commit_ids)
92 92 return commit_ids
93 93
94 94 def _rebuild_cache(self, commit_ids):
95 95 self._commit_ids = dict((commit_id, index)
96 96 for index, commit_id in enumerate(commit_ids))
97 97
98 98 def run_git_command(self, cmd, **opts):
99 99 """
100 100 Runs given ``cmd`` as git command and returns tuple
101 101 (stdout, stderr).
102 102
103 103 :param cmd: git command to be executed
104 104 :param opts: env options to pass into Subprocess command
105 105 """
106 106 if not isinstance(cmd, list):
107 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 108
109 109 out, err = self._remote.run_git_command(cmd, **opts)
110 110 if err:
111 111 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 112 return out, err
113 113
114 114 @staticmethod
115 115 def check_url(url, config):
116 116 """
117 117 Function will check given url and try to verify if it's a valid
118 118 link. Sometimes it may happened that git will issue basic
119 119 auth request that can cause whole API to hang when used from python
120 120 or other external calls.
121 121
122 122 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 123 when the return code is non 200
124 124 """
125 125 # check first if it's not an url
126 126 if os.path.isdir(url) or url.startswith('file:'):
127 127 return True
128 128
129 129 if '+' in url.split('://', 1)[0]:
130 130 url = url.split('+', 1)[1]
131 131
132 132 # Request the _remote to verify the url
133 133 return connection.Git.check_url(url, config.serialize())
134 134
135 135 @staticmethod
136 136 def is_valid_repository(path):
137 137 if os.path.isdir(os.path.join(path, '.git')):
138 138 return True
139 139 # check case of bare repository
140 140 try:
141 141 GitRepository(path)
142 142 return True
143 143 except VCSError:
144 144 pass
145 145 return False
146 146
147 147 def _init_repo(self, create, src_url=None, update_after_clone=False,
148 148 bare=False):
149 149 if create and os.path.exists(self.path):
150 150 raise RepositoryError(
151 151 "Cannot create repository at %s, location already exist"
152 152 % self.path)
153 153
154 154 try:
155 155 if create and src_url:
156 156 GitRepository.check_url(src_url, self.config)
157 157 self.clone(src_url, update_after_clone, bare)
158 158 elif create:
159 159 os.makedirs(self.path, mode=0755)
160 160
161 161 if bare:
162 162 self._remote.init_bare()
163 163 else:
164 164 self._remote.init()
165 165 else:
166 166 if not self._remote.assert_correct_path():
167 167 raise RepositoryError(
168 168 'Path "%s" does not contain a Git repository' %
169 169 (self.path,))
170 170
171 171 # TODO: johbo: check if we have to translate the OSError here
172 172 except OSError as err:
173 173 raise RepositoryError(err)
174 174
175 175 def _get_all_commit_ids(self, filters=None):
176 176 # we must check if this repo is not empty, since later command
177 177 # fails if it is. And it's cheaper to ask than throw the subprocess
178 178 # errors
179 179 try:
180 180 self._remote.head()
181 181 except KeyError:
182 182 return []
183 183
184 184 rev_filter = ['--branches', '--tags']
185 185 extra_filter = []
186 186
187 187 if filters:
188 188 if filters.get('since'):
189 189 extra_filter.append('--since=%s' % (filters['since']))
190 190 if filters.get('until'):
191 191 extra_filter.append('--until=%s' % (filters['until']))
192 192 if filters.get('branch_name'):
193 193 rev_filter = ['--tags']
194 194 extra_filter.append(filters['branch_name'])
195 195 rev_filter.extend(extra_filter)
196 196
197 197 # if filters.get('start') or filters.get('end'):
198 198 # # skip is offset, max-count is limit
199 199 # if filters.get('start'):
200 200 # extra_filter += ' --skip=%s' % filters['start']
201 201 # if filters.get('end'):
202 202 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
203 203
204 204 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
205 205 try:
206 206 output, __ = self.run_git_command(cmd)
207 207 except RepositoryError:
208 208 # Can be raised for empty repositories
209 209 return []
210 210 return output.splitlines()
211 211
212 212 def _get_commit_id(self, commit_id_or_idx):
213 213 def is_null(value):
214 214 return len(value) == commit_id_or_idx.count('0')
215 215
216 216 if self.is_empty():
217 217 raise EmptyRepositoryError("There are no commits yet")
218 218
219 219 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
220 220 return self.commit_ids[-1]
221 221
222 222 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
223 223 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
224 224 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
225 225 try:
226 226 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
227 227 except Exception:
228 228 msg = "Commit %s does not exist for %s" % (
229 229 commit_id_or_idx, self)
230 230 raise CommitDoesNotExistError(msg)
231 231
232 232 elif is_bstr:
233 233 # check full path ref, eg. refs/heads/master
234 234 ref_id = self._refs.get(commit_id_or_idx)
235 235 if ref_id:
236 236 return ref_id
237 237
238 238 # check branch name
239 239 branch_ids = self.branches.values()
240 240 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
241 241 if ref_id:
242 242 return ref_id
243 243
244 244 # check tag name
245 245 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
246 246 if ref_id:
247 247 return ref_id
248 248
249 249 if (not SHA_PATTERN.match(commit_id_or_idx) or
250 250 commit_id_or_idx not in self.commit_ids):
251 251 msg = "Commit %s does not exist for %s" % (
252 252 commit_id_or_idx, self)
253 253 raise CommitDoesNotExistError(msg)
254 254
255 255 # Ensure we return full id
256 256 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 257 raise CommitDoesNotExistError(
258 258 "Given commit id %s not recognized" % commit_id_or_idx)
259 259 return commit_id_or_idx
260 260
261 261 def get_hook_location(self):
262 262 """
263 263 returns absolute path to location where hooks are stored
264 264 """
265 265 loc = os.path.join(self.path, 'hooks')
266 266 if not self.bare:
267 267 loc = os.path.join(self.path, '.git', 'hooks')
268 268 return loc
269 269
270 270 @LazyProperty
271 271 def last_change(self):
272 272 """
273 273 Returns last change made on this repository as
274 274 `datetime.datetime` object.
275 275 """
276 276 try:
277 277 return self.get_commit().date
278 278 except RepositoryError:
279 279 tzoffset = makedate()[1]
280 280 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281 281
282 282 def _get_fs_mtime(self):
283 283 idx_loc = '' if self.bare else '.git'
284 284 # fallback to filesystem
285 285 in_path = os.path.join(self.path, idx_loc, "index")
286 286 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 287 if os.path.exists(in_path):
288 288 return os.stat(in_path).st_mtime
289 289 else:
290 290 return os.stat(he_path).st_mtime
291 291
292 292 @LazyProperty
293 293 def description(self):
294 294 description = self._remote.get_description()
295 295 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296 296
297 297 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 298 if self.is_empty():
299 299 return OrderedDict()
300 300
301 301 result = []
302 302 for ref, sha in self._refs.iteritems():
303 303 if ref.startswith(prefix):
304 304 ref_name = ref
305 305 if strip_prefix:
306 306 ref_name = ref[len(prefix):]
307 307 result.append((safe_unicode(ref_name), sha))
308 308
309 309 def get_name(entry):
310 310 return entry[0]
311 311
312 312 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313 313
314 314 def _get_branches(self):
315 315 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316 316
317 317 @LazyProperty
318 318 def branches(self):
319 319 return self._get_branches()
320 320
321 321 @LazyProperty
322 322 def branches_closed(self):
323 323 return {}
324 324
325 325 @LazyProperty
326 326 def branches_all(self):
327 327 all_branches = {}
328 328 all_branches.update(self.branches)
329 329 all_branches.update(self.branches_closed)
330 330 return all_branches
331 331
332 332 @LazyProperty
333 333 def tags(self):
334 334 return self._get_tags()
335 335
336 336 def _get_tags(self):
337 337 return self._get_refs_entries(
338 338 prefix='refs/tags/', strip_prefix=True, reverse=True)
339 339
340 340 def tag(self, name, user, commit_id=None, message=None, date=None,
341 341 **kwargs):
342 342 # TODO: fix this method to apply annotated tags correct with message
343 343 """
344 344 Creates and returns a tag for the given ``commit_id``.
345 345
346 346 :param name: name for new tag
347 347 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
348 348 :param commit_id: commit id for which new tag would be created
349 349 :param message: message of the tag's commit
350 350 :param date: date of tag's commit
351 351
352 352 :raises TagAlreadyExistError: if tag with same name already exists
353 353 """
354 354 if name in self.tags:
355 355 raise TagAlreadyExistError("Tag %s already exists" % name)
356 356 commit = self.get_commit(commit_id=commit_id)
357 357 message = message or "Added tag %s for commit %s" % (
358 358 name, commit.raw_id)
359 359 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
360 360
361 361 self._refs = self._get_refs()
362 362 self.tags = self._get_tags()
363 363 return commit
364 364
365 365 def remove_tag(self, name, user, message=None, date=None):
366 366 """
367 367 Removes tag with the given ``name``.
368 368
369 369 :param name: name of the tag to be removed
370 370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 371 :param message: message of the tag's removal commit
372 372 :param date: date of tag's removal commit
373 373
374 374 :raises TagDoesNotExistError: if tag with given name does not exists
375 375 """
376 376 if name not in self.tags:
377 377 raise TagDoesNotExistError("Tag %s does not exist" % name)
378 378 tagpath = vcspath.join(
379 379 self._remote.get_refs_path(), 'refs', 'tags', name)
380 380 try:
381 381 os.remove(tagpath)
382 382 self._refs = self._get_refs()
383 383 self.tags = self._get_tags()
384 384 except OSError as e:
385 385 raise RepositoryError(e.strerror)
386 386
387 387 def _get_refs(self):
388 388 return self._remote.get_refs()
389 389
390 390 @LazyProperty
391 391 def _refs(self):
392 392 return self._get_refs()
393 393
394 394 @property
395 395 def _ref_tree(self):
396 396 node = tree = {}
397 397 for ref, sha in self._refs.iteritems():
398 398 path = ref.split('/')
399 399 for bit in path[:-1]:
400 400 node = node.setdefault(bit, {})
401 401 node[path[-1]] = sha
402 402 node = tree
403 403 return tree
404 404
405 405 def get_remote_ref(self, ref_name):
406 406 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 407 try:
408 408 return self._refs[ref_key]
409 409 except Exception:
410 410 return
411 411
412 412 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
413 413 """
414 414 Returns `GitCommit` object representing commit from git repository
415 415 at the given `commit_id` or head (most recent commit) if None given.
416 416 """
417 417 if commit_id is not None:
418 418 self._validate_commit_id(commit_id)
419 419 elif commit_idx is not None:
420 420 self._validate_commit_idx(commit_idx)
421 421 commit_id = commit_idx
422 422 commit_id = self._get_commit_id(commit_id)
423 423 try:
424 424 # Need to call remote to translate id for tagging scenario
425 425 commit_id = self._remote.get_object(commit_id)["commit_id"]
426 426 idx = self._commit_ids[commit_id]
427 427 except KeyError:
428 428 raise RepositoryError("Cannot get object with id %s" % commit_id)
429 429
430 430 return GitCommit(self, commit_id, idx, pre_load=pre_load)
431 431
432 432 def get_commits(
433 433 self, start_id=None, end_id=None, start_date=None, end_date=None,
434 434 branch_name=None, show_hidden=False, pre_load=None):
435 435 """
436 436 Returns generator of `GitCommit` objects from start to end (both
437 437 are inclusive), in ascending date order.
438 438
439 439 :param start_id: None, str(commit_id)
440 440 :param end_id: None, str(commit_id)
441 441 :param start_date: if specified, commits with commit date less than
442 442 ``start_date`` would be filtered out from returned set
443 443 :param end_date: if specified, commits with commit date greater than
444 444 ``end_date`` would be filtered out from returned set
445 445 :param branch_name: if specified, commits not reachable from given
446 446 branch would be filtered out from returned set
447 447 :param show_hidden: Show hidden commits such as obsolete or hidden from
448 448 Mercurial evolve
449 449 :raise BranchDoesNotExistError: If given `branch_name` does not
450 450 exist.
451 451 :raise CommitDoesNotExistError: If commits for given `start` or
452 452 `end` could not be found.
453 453
454 454 """
455 455 if self.is_empty():
456 456 raise EmptyRepositoryError("There are no commits yet")
457 457 self._validate_branch_name(branch_name)
458 458
459 459 if start_id is not None:
460 460 self._validate_commit_id(start_id)
461 461 if end_id is not None:
462 462 self._validate_commit_id(end_id)
463 463
464 464 start_raw_id = self._get_commit_id(start_id)
465 465 start_pos = self._commit_ids[start_raw_id] if start_id else None
466 466 end_raw_id = self._get_commit_id(end_id)
467 467 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
468 468
469 469 if None not in [start_id, end_id] and start_pos > end_pos:
470 470 raise RepositoryError(
471 471 "Start commit '%s' cannot be after end commit '%s'" %
472 472 (start_id, end_id))
473 473
474 474 if end_pos is not None:
475 475 end_pos += 1
476 476
477 477 filter_ = []
478 478 if branch_name:
479 479 filter_.append({'branch_name': branch_name})
480 480 if start_date and not end_date:
481 481 filter_.append({'since': start_date})
482 482 if end_date and not start_date:
483 483 filter_.append({'until': end_date})
484 484 if start_date and end_date:
485 485 filter_.append({'since': start_date})
486 486 filter_.append({'until': end_date})
487 487
488 488 # if start_pos or end_pos:
489 489 # filter_.append({'start': start_pos})
490 490 # filter_.append({'end': end_pos})
491 491
492 492 if filter_:
493 493 revfilters = {
494 494 'branch_name': branch_name,
495 495 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
496 496 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
497 497 'start': start_pos,
498 498 'end': end_pos,
499 499 }
500 500 commit_ids = self._get_all_commit_ids(filters=revfilters)
501 501
502 502 # pure python stuff, it's slow due to walker walking whole repo
503 503 # def get_revs(walker):
504 504 # for walker_entry in walker:
505 505 # yield walker_entry.commit.id
506 506 # revfilters = {}
507 507 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
508 508 else:
509 509 commit_ids = self.commit_ids
510 510
511 511 if start_pos or end_pos:
512 512 commit_ids = commit_ids[start_pos: end_pos]
513 513
514 514 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
515 515
516 516 def get_diff(
517 517 self, commit1, commit2, path='', ignore_whitespace=False,
518 518 context=3, path1=None):
519 519 """
520 520 Returns (git like) *diff*, as plain text. Shows changes introduced by
521 521 ``commit2`` since ``commit1``.
522 522
523 523 :param commit1: Entry point from which diff is shown. Can be
524 524 ``self.EMPTY_COMMIT`` - in this case, patch showing all
525 525 the changes since empty state of the repository until ``commit2``
526 526 :param commit2: Until which commits changes should be shown.
527 527 :param ignore_whitespace: If set to ``True``, would not show whitespace
528 528 changes. Defaults to ``False``.
529 529 :param context: How many lines before/after changed lines should be
530 530 shown. Defaults to ``3``.
531 531 """
532 532 self._validate_diff_commits(commit1, commit2)
533 533 if path1 is not None and path1 != path:
534 534 raise ValueError("Diff of two different paths not supported.")
535 535
536 536 flags = [
537 537 '-U%s' % context, '--full-index', '--binary', '-p',
538 538 '-M', '--abbrev=40']
539 539 if ignore_whitespace:
540 540 flags.append('-w')
541 541
542 542 if commit1 == self.EMPTY_COMMIT:
543 543 cmd = ['show'] + flags + [commit2.raw_id]
544 544 else:
545 545 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
546 546
547 547 if path:
548 548 cmd.extend(['--', path])
549 549
550 550 stdout, __ = self.run_git_command(cmd)
551 551 # If we used 'show' command, strip first few lines (until actual diff
552 552 # starts)
553 553 if commit1 == self.EMPTY_COMMIT:
554 554 lines = stdout.splitlines()
555 555 x = 0
556 556 for line in lines:
557 557 if line.startswith('diff'):
558 558 break
559 559 x += 1
560 560 # Append new line just like 'diff' command do
561 561 stdout = '\n'.join(lines[x:]) + '\n'
562 562 return GitDiff(stdout)
563 563
564 564 def strip(self, commit_id, branch_name):
565 565 commit = self.get_commit(commit_id=commit_id)
566 566 if commit.merge:
567 567 raise Exception('Cannot reset to merge commit')
568 568
569 569 # parent is going to be the new head now
570 570 commit = commit.parents[0]
571 571 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
572 572
573 573 self.commit_ids = self._get_all_commit_ids()
574 574 self._rebuild_cache(self.commit_ids)
575 575
576 576 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
577 577 if commit_id1 == commit_id2:
578 578 return commit_id1
579 579
580 580 if self != repo2:
581 581 commits = self._remote.get_missing_revs(
582 582 commit_id1, commit_id2, repo2.path)
583 583 if commits:
584 584 commit = repo2.get_commit(commits[-1])
585 585 if commit.parents:
586 586 ancestor_id = commit.parents[0].raw_id
587 587 else:
588 588 ancestor_id = None
589 589 else:
590 590 # no commits from other repo, ancestor_id is the commit_id2
591 591 ancestor_id = commit_id2
592 592 else:
593 593 output, __ = self.run_git_command(
594 594 ['merge-base', commit_id1, commit_id2])
595 595 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
596 596
597 597 return ancestor_id
598 598
599 599 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
600 600 repo1 = self
601 601 ancestor_id = None
602 602
603 603 if commit_id1 == commit_id2:
604 604 commits = []
605 605 elif repo1 != repo2:
606 606 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
607 607 repo2.path)
608 608 commits = [
609 609 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
610 610 for commit_id in reversed(missing_ids)]
611 611 else:
612 612 output, __ = repo1.run_git_command(
613 613 ['log', '--reverse', '--pretty=format: %H', '-s',
614 614 '%s..%s' % (commit_id1, commit_id2)])
615 615 commits = [
616 616 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
617 617 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
618 618
619 619 return commits
620 620
621 621 @LazyProperty
622 622 def in_memory_commit(self):
623 623 """
624 624 Returns ``GitInMemoryCommit`` object for this repository.
625 625 """
626 626 return GitInMemoryCommit(self)
627 627
628 628 def clone(self, url, update_after_clone=True, bare=False):
629 629 """
630 630 Tries to clone commits from external location.
631 631
632 632 :param update_after_clone: If set to ``False``, git won't checkout
633 633 working directory
634 634 :param bare: If set to ``True``, repository would be cloned into
635 635 *bare* git repository (no working directory at all).
636 636 """
637 637 # init_bare and init expect empty dir created to proceed
638 638 if not os.path.exists(self.path):
639 639 os.mkdir(self.path)
640 640
641 641 if bare:
642 642 self._remote.init_bare()
643 643 else:
644 644 self._remote.init()
645 645
646 646 deferred = '^{}'
647 647 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
648 648
649 649 return self._remote.clone(
650 650 url, deferred, valid_refs, update_after_clone)
651 651
652 652 def pull(self, url, commit_ids=None):
653 653 """
654 654 Tries to pull changes from external location. We use fetch here since
655 655 pull in get does merges and we want to be compatible with hg backend so
656 656 pull == fetch in this case
657 657 """
658 658 self.fetch(url, commit_ids=commit_ids)
659 659
660 660 def fetch(self, url, commit_ids=None):
661 661 """
662 662 Tries to fetch changes from external location.
663 663 """
664 664 refs = None
665 665
666 666 if commit_ids is not None:
667 667 remote_refs = self._remote.get_remote_refs(url)
668 668 refs = [
669 669 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 670 self._remote.fetch(url, refs=refs)
671 671
672 def push(self, url):
673 refs = None
674 self._remote.sync_push(url, refs=refs)
675
672 676 def set_refs(self, ref_name, commit_id):
673 677 self._remote.set_refs(ref_name, commit_id)
674 678
675 679 def remove_ref(self, ref_name):
676 680 self._remote.remove_ref(ref_name)
677 681
678 682 def _update_server_info(self):
679 683 """
680 684 runs gits update-server-info command in this repo instance
681 685 """
682 686 self._remote.update_server_info()
683 687
684 688 def _current_branch(self):
685 689 """
686 690 Return the name of the current branch.
687 691
688 692 It only works for non bare repositories (i.e. repositories with a
689 693 working copy)
690 694 """
691 695 if self.bare:
692 696 raise RepositoryError('Bare git repos do not have active branches')
693 697
694 698 if self.is_empty():
695 699 return None
696 700
697 701 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
698 702 return stdout.strip()
699 703
700 704 def _checkout(self, branch_name, create=False):
701 705 """
702 706 Checkout a branch in the working directory.
703 707
704 708 It tries to create the branch if create is True, failing if the branch
705 709 already exists.
706 710
707 711 It only works for non bare repositories (i.e. repositories with a
708 712 working copy)
709 713 """
710 714 if self.bare:
711 715 raise RepositoryError('Cannot checkout branches in a bare git repo')
712 716
713 717 cmd = ['checkout']
714 718 if create:
715 719 cmd.append('-b')
716 720 cmd.append(branch_name)
717 721 self.run_git_command(cmd, fail_on_stderr=False)
718 722
719 723 def _identify(self):
720 724 """
721 725 Return the current state of the working directory.
722 726 """
723 727 if self.bare:
724 728 raise RepositoryError('Bare git repos do not have active branches')
725 729
726 730 if self.is_empty():
727 731 return None
728 732
729 733 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
730 734 return stdout.strip()
731 735
732 736 def _local_clone(self, clone_path, branch_name, source_branch=None):
733 737 """
734 738 Create a local clone of the current repo.
735 739 """
736 740 # N.B.(skreft): the --branch option is required as otherwise the shallow
737 741 # clone will only fetch the active branch.
738 742 cmd = ['clone', '--branch', branch_name,
739 743 self.path, os.path.abspath(clone_path)]
740 744
741 745 self.run_git_command(cmd, fail_on_stderr=False)
742 746
743 747 # if we get the different source branch, make sure we also fetch it for
744 748 # merge conditions
745 749 if source_branch and source_branch != branch_name:
746 750 # check if the ref exists.
747 751 shadow_repo = GitRepository(os.path.abspath(clone_path))
748 752 if shadow_repo.get_remote_ref(source_branch):
749 753 cmd = ['fetch', self.path, source_branch]
750 754 self.run_git_command(cmd, fail_on_stderr=False)
751 755
752 756 def _local_fetch(self, repository_path, branch_name):
753 757 """
754 758 Fetch a branch from a local repository.
755 759 """
756 760 repository_path = os.path.abspath(repository_path)
757 761 if repository_path == self.path:
758 762 raise ValueError('Cannot fetch from the same repository')
759 763
760 764 cmd = ['fetch', '--no-tags', repository_path, branch_name]
761 765 self.run_git_command(cmd, fail_on_stderr=False)
762 766
763 767 def _last_fetch_heads(self):
764 768 """
765 769 Return the last fetched heads that need merging.
766 770
767 771 The algorithm is defined at
768 772 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
769 773 """
770 774 if not self.bare:
771 775 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
772 776 else:
773 777 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
774 778
775 779 heads = []
776 780 with open(fetch_heads_path) as f:
777 781 for line in f:
778 782 if ' not-for-merge ' in line:
779 783 continue
780 784 line = re.sub('\t.*', '', line, flags=re.DOTALL)
781 785 heads.append(line)
782 786
783 787 return heads
784 788
785 789 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
786 790 return GitRepository(shadow_repository_path)
787 791
788 792 def _local_pull(self, repository_path, branch_name):
789 793 """
790 794 Pull a branch from a local repository.
791 795 """
792 796 if self.bare:
793 797 raise RepositoryError('Cannot pull into a bare git repository')
794 798 # N.B.(skreft): The --ff-only option is to make sure this is a
795 799 # fast-forward (i.e., we are only pulling new changes and there are no
796 800 # conflicts with our current branch)
797 801 # Additionally, that option needs to go before --no-tags, otherwise git
798 802 # pull complains about it being an unknown flag.
799 803 cmd = ['pull', '--ff-only', '--no-tags', repository_path, branch_name]
800 804 self.run_git_command(cmd, fail_on_stderr=False)
801 805
802 806 def _local_merge(self, merge_message, user_name, user_email, heads):
803 807 """
804 808 Merge the given head into the checked out branch.
805 809
806 810 It will force a merge commit.
807 811
808 812 Currently it raises an error if the repo is empty, as it is not possible
809 813 to create a merge commit in an empty repo.
810 814
811 815 :param merge_message: The message to use for the merge commit.
812 816 :param heads: the heads to merge.
813 817 """
814 818 if self.bare:
815 819 raise RepositoryError('Cannot merge into a bare git repository')
816 820
817 821 if not heads:
818 822 return
819 823
820 824 if self.is_empty():
821 825 # TODO(skreft): do somehting more robust in this case.
822 826 raise RepositoryError(
823 827 'Do not know how to merge into empty repositories yet')
824 828
825 829 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
826 830 # commit message. We also specify the user who is doing the merge.
827 831 cmd = ['-c', 'user.name=%s' % safe_str(user_name),
828 832 '-c', 'user.email=%s' % safe_str(user_email),
829 833 'merge', '--no-ff', '-m', safe_str(merge_message)]
830 834 cmd.extend(heads)
831 835 try:
832 836 self.run_git_command(cmd, fail_on_stderr=False)
833 837 except RepositoryError:
834 838 # Cleanup any merge leftovers
835 839 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
836 840 raise
837 841
838 842 def _local_push(
839 843 self, source_branch, repository_path, target_branch,
840 844 enable_hooks=False, rc_scm_data=None):
841 845 """
842 846 Push the source_branch to the given repository and target_branch.
843 847
844 848 Currently it if the target_branch is not master and the target repo is
845 849 empty, the push will work, but then GitRepository won't be able to find
846 850 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
847 851 pointing to master, which does not exist).
848 852
849 853 It does not run the hooks in the target repo.
850 854 """
851 855 # TODO(skreft): deal with the case in which the target repo is empty,
852 856 # and the target_branch is not master.
853 857 target_repo = GitRepository(repository_path)
854 858 if (not target_repo.bare and
855 859 target_repo._current_branch() == target_branch):
856 860 # Git prevents pushing to the checked out branch, so simulate it by
857 861 # pulling into the target repository.
858 862 target_repo._local_pull(self.path, source_branch)
859 863 else:
860 864 cmd = ['push', os.path.abspath(repository_path),
861 865 '%s:%s' % (source_branch, target_branch)]
862 866 gitenv = {}
863 867 if rc_scm_data:
864 868 gitenv.update({'RC_SCM_DATA': rc_scm_data})
865 869
866 870 if not enable_hooks:
867 871 gitenv['RC_SKIP_HOOKS'] = '1'
868 872 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
869 873
870 874 def _get_new_pr_branch(self, source_branch, target_branch):
871 875 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
872 876 pr_branches = []
873 877 for branch in self.branches:
874 878 if branch.startswith(prefix):
875 879 pr_branches.append(int(branch[len(prefix):]))
876 880
877 881 if not pr_branches:
878 882 branch_id = 0
879 883 else:
880 884 branch_id = max(pr_branches) + 1
881 885
882 886 return '%s%d' % (prefix, branch_id)
883 887
884 888 def _merge_repo(self, shadow_repository_path, target_ref,
885 889 source_repo, source_ref, merge_message,
886 890 merger_name, merger_email, dry_run=False,
887 891 use_rebase=False, close_branch=False):
888 892 if target_ref.commit_id != self.branches[target_ref.name]:
889 893 return MergeResponse(
890 894 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
891 895
892 896 shadow_repo = GitRepository(shadow_repository_path)
893 897 # checkout source, if it's different. Otherwise we could not
894 898 # fetch proper commits for merge testing
895 899 if source_ref.name != target_ref.name:
896 900 if shadow_repo.get_remote_ref(source_ref.name):
897 901 shadow_repo._checkout(source_ref.name)
898 902
899 903 # checkout target
900 904 shadow_repo._checkout(target_ref.name)
901 905 shadow_repo._local_pull(self.path, target_ref.name)
902 906
903 907 # Need to reload repo to invalidate the cache, or otherwise we cannot
904 908 # retrieve the last target commit.
905 909 shadow_repo = GitRepository(shadow_repository_path)
906 910 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
907 911 return MergeResponse(
908 912 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
909 913
910 914 pr_branch = shadow_repo._get_new_pr_branch(
911 915 source_ref.name, target_ref.name)
912 916 shadow_repo._checkout(pr_branch, create=True)
913 917 try:
914 918 shadow_repo._local_fetch(source_repo.path, source_ref.name)
915 919 except RepositoryError:
916 920 log.exception('Failure when doing local fetch on git shadow repo')
917 921 return MergeResponse(
918 922 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
919 923
920 924 merge_ref = None
921 925 merge_failure_reason = MergeFailureReason.NONE
922 926 try:
923 927 shadow_repo._local_merge(merge_message, merger_name, merger_email,
924 928 [source_ref.commit_id])
925 929 merge_possible = True
926 930
927 931 # Need to reload repo to invalidate the cache, or otherwise we
928 932 # cannot retrieve the merge commit.
929 933 shadow_repo = GitRepository(shadow_repository_path)
930 934 merge_commit_id = shadow_repo.branches[pr_branch]
931 935
932 936 # Set a reference pointing to the merge commit. This reference may
933 937 # be used to easily identify the last successful merge commit in
934 938 # the shadow repository.
935 939 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
936 940 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
937 941 except RepositoryError:
938 942 log.exception('Failure when doing local merge on git shadow repo')
939 943 merge_possible = False
940 944 merge_failure_reason = MergeFailureReason.MERGE_FAILED
941 945
942 946 if merge_possible and not dry_run:
943 947 try:
944 948 shadow_repo._local_push(
945 949 pr_branch, self.path, target_ref.name, enable_hooks=True,
946 950 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
947 951 merge_succeeded = True
948 952 except RepositoryError:
949 953 log.exception(
950 954 'Failure when doing local push on git shadow repo')
951 955 merge_succeeded = False
952 956 merge_failure_reason = MergeFailureReason.PUSH_FAILED
953 957 else:
954 958 merge_succeeded = False
955 959
956 960 return MergeResponse(
957 961 merge_possible, merge_succeeded, merge_ref,
958 962 merge_failure_reason)
959 963
960 964 def _get_shadow_repository_path(self, workspace_id):
961 965 # The name of the shadow repository must start with '.', so it is
962 966 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
963 967 return os.path.join(
964 968 os.path.dirname(self.path),
965 969 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
966 970
967 971 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
968 972 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
969 973 if not os.path.exists(shadow_repository_path):
970 974 self._local_clone(
971 975 shadow_repository_path, target_ref.name, source_ref.name)
972 976
973 977 return shadow_repository_path
974 978
975 979 def cleanup_merge_workspace(self, workspace_id):
976 980 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
977 981 shutil.rmtree(shadow_repository_path, ignore_errors=True)
@@ -1,889 +1,893 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24
25 25 import logging
26 26 import binascii
27 27 import os
28 28 import shutil
29 29 import urllib
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 36 date_astimestamp)
37 37 from rhodecode.lib.utils import safe_unicode, safe_str
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.backends.base import (
40 40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 41 MergeFailureReason, Reference)
42 42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 update_after_clone=False, with_wire=None):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param update_after_clone=False: sets update of working copy after
73 73 making a clone
74 74 """
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 self.config = config if config else Config()
77 77 self._remote = connection.Hg(
78 78 self.path, self.config, with_wire=with_wire)
79 79
80 80 self._init_repo(create, src_url, update_after_clone)
81 81
82 82 # caches
83 83 self._commit_ids = {}
84 84
85 85 @LazyProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject shas from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = dict((commit_id, index)
97 97 for index, commit_id in enumerate(commit_ids))
98 98
99 99 @LazyProperty
100 100 def branches(self):
101 101 return self._get_branches()
102 102
103 103 @LazyProperty
104 104 def branches_closed(self):
105 105 return self._get_branches(active=False, closed=True)
106 106
107 107 @LazyProperty
108 108 def branches_all(self):
109 109 all_branches = {}
110 110 all_branches.update(self.branches)
111 111 all_branches.update(self.branches_closed)
112 112 return all_branches
113 113
114 114 def _get_branches(self, active=True, closed=False):
115 115 """
116 116 Gets branches for this repository
117 117 Returns only not closed active branches by default
118 118
119 119 :param active: return also active branches
120 120 :param closed: return also closed branches
121 121
122 122 """
123 123 if self.is_empty():
124 124 return {}
125 125
126 126 def get_name(ctx):
127 127 return ctx[0]
128 128
129 129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 130 self._remote.branches(active, closed).items()]
131 131
132 132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133 133
134 134 @LazyProperty
135 135 def tags(self):
136 136 """
137 137 Gets tags for this repository
138 138 """
139 139 return self._get_tags()
140 140
141 141 def _get_tags(self):
142 142 if self.is_empty():
143 143 return {}
144 144
145 145 def get_name(ctx):
146 146 return ctx[0]
147 147
148 148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 149 self._remote.tags().items()]
150 150
151 151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152 152
153 153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 154 **kwargs):
155 155 """
156 156 Creates and returns a tag for the given ``commit_id``.
157 157
158 158 :param name: name for new tag
159 159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 160 :param commit_id: commit id for which new tag would be created
161 161 :param message: message of the tag's commit
162 162 :param date: date of tag's commit
163 163
164 164 :raises TagAlreadyExistError: if tag with same name already exists
165 165 """
166 166 if name in self.tags:
167 167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 168 commit = self.get_commit(commit_id=commit_id)
169 169 local = kwargs.setdefault('local', False)
170 170
171 171 if message is None:
172 172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173 173
174 174 date, tz = date_to_timestamp_plus_offset(date)
175 175
176 176 self._remote.tag(
177 177 name, commit.raw_id, message, local, user, date, tz)
178 178 self._remote.invalidate_vcs_cache()
179 179
180 180 # Reinitialize tags
181 181 self.tags = self._get_tags()
182 182 tag_id = self.tags[name]
183 183
184 184 return self.get_commit(commit_id=tag_id)
185 185
186 186 def remove_tag(self, name, user, message=None, date=None):
187 187 """
188 188 Removes tag with the given `name`.
189 189
190 190 :param name: name of the tag to be removed
191 191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 192 :param message: message of the tag's removal commit
193 193 :param date: date of tag's removal commit
194 194
195 195 :raises TagDoesNotExistError: if tag with given name does not exists
196 196 """
197 197 if name not in self.tags:
198 198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 199 if message is None:
200 200 message = "Removed tag %s" % name
201 201 local = False
202 202
203 203 date, tz = date_to_timestamp_plus_offset(date)
204 204
205 205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 206 self._remote.invalidate_vcs_cache()
207 207 self.tags = self._get_tags()
208 208
209 209 @LazyProperty
210 210 def bookmarks(self):
211 211 """
212 212 Gets bookmarks for this repository
213 213 """
214 214 return self._get_bookmarks()
215 215
216 216 def _get_bookmarks(self):
217 217 if self.is_empty():
218 218 return {}
219 219
220 220 def get_name(ctx):
221 221 return ctx[0]
222 222
223 223 _bookmarks = [
224 224 (safe_unicode(n), hexlify(h)) for n, h in
225 225 self._remote.bookmarks().items()]
226 226
227 227 return OrderedDict(sorted(_bookmarks, key=get_name))
228 228
229 229 def _get_all_commit_ids(self):
230 230 return self._remote.get_all_commit_ids('visible')
231 231
232 232 def get_diff(
233 233 self, commit1, commit2, path='', ignore_whitespace=False,
234 234 context=3, path1=None):
235 235 """
236 236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 237 `commit2` since `commit1`.
238 238
239 239 :param commit1: Entry point from which diff is shown. Can be
240 240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 241 the changes since empty state of the repository until `commit2`
242 242 :param commit2: Until which commit changes should be shown.
243 243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 244 changes. Defaults to ``False``.
245 245 :param context: How many lines before/after changed lines should be
246 246 shown. Defaults to ``3``.
247 247 """
248 248 self._validate_diff_commits(commit1, commit2)
249 249 if path1 is not None and path1 != path:
250 250 raise ValueError("Diff of two different paths not supported.")
251 251
252 252 if path:
253 253 file_filter = [self.path, path]
254 254 else:
255 255 file_filter = None
256 256
257 257 diff = self._remote.diff(
258 258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 259 opt_git=True, opt_ignorews=ignore_whitespace,
260 260 context=context)
261 261 return MercurialDiff(diff)
262 262
263 263 def strip(self, commit_id, branch=None):
264 264 self._remote.strip(commit_id, update=False, backup="none")
265 265
266 266 self._remote.invalidate_vcs_cache()
267 267 self.commit_ids = self._get_all_commit_ids()
268 268 self._rebuild_cache(self.commit_ids)
269 269
270 270 def verify(self):
271 271 verify = self._remote.verify()
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 return verify
275 275
276 276 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
277 277 if commit_id1 == commit_id2:
278 278 return commit_id1
279 279
280 280 ancestors = self._remote.revs_from_revspec(
281 281 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
282 282 other_path=repo2.path)
283 283 return repo2[ancestors[0]].raw_id if ancestors else None
284 284
285 285 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
286 286 if commit_id1 == commit_id2:
287 287 commits = []
288 288 else:
289 289 if merge:
290 290 indexes = self._remote.revs_from_revspec(
291 291 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
292 292 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
293 293 else:
294 294 indexes = self._remote.revs_from_revspec(
295 295 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
296 296 commit_id1, other_path=repo2.path)
297 297
298 298 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
299 299 for idx in indexes]
300 300
301 301 return commits
302 302
303 303 @staticmethod
304 304 def check_url(url, config):
305 305 """
306 306 Function will check given url and try to verify if it's a valid
307 307 link. Sometimes it may happened that mercurial will issue basic
308 308 auth request that can cause whole API to hang when used from python
309 309 or other external calls.
310 310
311 311 On failures it'll raise urllib2.HTTPError, exception is also thrown
312 312 when the return code is non 200
313 313 """
314 314 # check first if it's not an local url
315 315 if os.path.isdir(url) or url.startswith('file:'):
316 316 return True
317 317
318 318 # Request the _remote to verify the url
319 319 return connection.Hg.check_url(url, config.serialize())
320 320
321 321 @staticmethod
322 322 def is_valid_repository(path):
323 323 return os.path.isdir(os.path.join(path, '.hg'))
324 324
325 325 def _init_repo(self, create, src_url=None, update_after_clone=False):
326 326 """
327 327 Function will check for mercurial repository in given path. If there
328 328 is no repository in that path it will raise an exception unless
329 329 `create` parameter is set to True - in that case repository would
330 330 be created.
331 331
332 332 If `src_url` is given, would try to clone repository from the
333 333 location at given clone_point. Additionally it'll make update to
334 334 working copy accordingly to `update_after_clone` flag.
335 335 """
336 336 if create and os.path.exists(self.path):
337 337 raise RepositoryError(
338 338 "Cannot create repository at %s, location already exist"
339 339 % self.path)
340 340
341 341 if src_url:
342 342 url = str(self._get_url(src_url))
343 343 MercurialRepository.check_url(url, self.config)
344 344
345 345 self._remote.clone(url, self.path, update_after_clone)
346 346
347 347 # Don't try to create if we've already cloned repo
348 348 create = False
349 349
350 350 if create:
351 351 os.makedirs(self.path, mode=0755)
352 352
353 353 self._remote.localrepository(create)
354 354
355 355 @LazyProperty
356 356 def in_memory_commit(self):
357 357 return MercurialInMemoryCommit(self)
358 358
359 359 @LazyProperty
360 360 def description(self):
361 361 description = self._remote.get_config_value(
362 362 'web', 'description', untrusted=True)
363 363 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
364 364
365 365 @LazyProperty
366 366 def contact(self):
367 367 contact = (
368 368 self._remote.get_config_value("web", "contact") or
369 369 self._remote.get_config_value("ui", "username"))
370 370 return safe_unicode(contact or self.DEFAULT_CONTACT)
371 371
372 372 @LazyProperty
373 373 def last_change(self):
374 374 """
375 375 Returns last change made on this repository as
376 376 `datetime.datetime` object.
377 377 """
378 378 try:
379 379 return self.get_commit().date
380 380 except RepositoryError:
381 381 tzoffset = makedate()[1]
382 382 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
383 383
384 384 def _get_fs_mtime(self):
385 385 # fallback to filesystem
386 386 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
387 387 st_path = os.path.join(self.path, '.hg', "store")
388 388 if os.path.exists(cl_path):
389 389 return os.stat(cl_path).st_mtime
390 390 else:
391 391 return os.stat(st_path).st_mtime
392 392
393 393 def _sanitize_commit_idx(self, idx):
394 394 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
395 395 # number. A `long` is treated in the correct way though. So we convert
396 396 # `int` to `long` here to make sure it is handled correctly.
397 397 if isinstance(idx, int):
398 398 return long(idx)
399 399 return idx
400 400
401 401 def _get_url(self, url):
402 402 """
403 403 Returns normalized url. If schema is not given, would fall
404 404 to filesystem
405 405 (``file:///``) schema.
406 406 """
407 407 url = url.encode('utf8')
408 408 if url != 'default' and '://' not in url:
409 409 url = "file:" + urllib.pathname2url(url)
410 410 return url
411 411
412 412 def get_hook_location(self):
413 413 """
414 414 returns absolute path to location where hooks are stored
415 415 """
416 416 return os.path.join(self.path, '.hg', '.hgrc')
417 417
418 418 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
419 419 """
420 420 Returns ``MercurialCommit`` object representing repository's
421 421 commit at the given `commit_id` or `commit_idx`.
422 422 """
423 423 if self.is_empty():
424 424 raise EmptyRepositoryError("There are no commits yet")
425 425
426 426 if commit_id is not None:
427 427 self._validate_commit_id(commit_id)
428 428 try:
429 429 idx = self._commit_ids[commit_id]
430 430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
431 431 except KeyError:
432 432 pass
433 433 elif commit_idx is not None:
434 434 self._validate_commit_idx(commit_idx)
435 435 commit_idx = self._sanitize_commit_idx(commit_idx)
436 436 try:
437 437 id_ = self.commit_ids[commit_idx]
438 438 if commit_idx < 0:
439 439 commit_idx += len(self.commit_ids)
440 440 return MercurialCommit(
441 441 self, id_, commit_idx, pre_load=pre_load)
442 442 except IndexError:
443 443 commit_id = commit_idx
444 444 else:
445 445 commit_id = "tip"
446 446
447 447 # TODO Paris: Ugly hack to "serialize" long for msgpack
448 448 if isinstance(commit_id, long):
449 449 commit_id = float(commit_id)
450 450
451 451 if isinstance(commit_id, unicode):
452 452 commit_id = safe_str(commit_id)
453 453
454 454 try:
455 455 raw_id, idx = self._remote.lookup(commit_id, both=True)
456 456 except CommitDoesNotExistError:
457 457 msg = "Commit %s does not exist for %s" % (
458 458 commit_id, self)
459 459 raise CommitDoesNotExistError(msg)
460 460
461 461 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
462 462
463 463 def get_commits(
464 464 self, start_id=None, end_id=None, start_date=None, end_date=None,
465 465 branch_name=None, show_hidden=False, pre_load=None):
466 466 """
467 467 Returns generator of ``MercurialCommit`` objects from start to end
468 468 (both are inclusive)
469 469
470 470 :param start_id: None, str(commit_id)
471 471 :param end_id: None, str(commit_id)
472 472 :param start_date: if specified, commits with commit date less than
473 473 ``start_date`` would be filtered out from returned set
474 474 :param end_date: if specified, commits with commit date greater than
475 475 ``end_date`` would be filtered out from returned set
476 476 :param branch_name: if specified, commits not reachable from given
477 477 branch would be filtered out from returned set
478 478 :param show_hidden: Show hidden commits such as obsolete or hidden from
479 479 Mercurial evolve
480 480 :raise BranchDoesNotExistError: If given ``branch_name`` does not
481 481 exist.
482 482 :raise CommitDoesNotExistError: If commit for given ``start`` or
483 483 ``end`` could not be found.
484 484 """
485 485 # actually we should check now if it's not an empty repo
486 486 branch_ancestors = False
487 487 if self.is_empty():
488 488 raise EmptyRepositoryError("There are no commits yet")
489 489 self._validate_branch_name(branch_name)
490 490
491 491 if start_id is not None:
492 492 self._validate_commit_id(start_id)
493 493 c_start = self.get_commit(commit_id=start_id)
494 494 start_pos = self._commit_ids[c_start.raw_id]
495 495 else:
496 496 start_pos = None
497 497
498 498 if end_id is not None:
499 499 self._validate_commit_id(end_id)
500 500 c_end = self.get_commit(commit_id=end_id)
501 501 end_pos = max(0, self._commit_ids[c_end.raw_id])
502 502 else:
503 503 end_pos = None
504 504
505 505 if None not in [start_id, end_id] and start_pos > end_pos:
506 506 raise RepositoryError(
507 507 "Start commit '%s' cannot be after end commit '%s'" %
508 508 (start_id, end_id))
509 509
510 510 if end_pos is not None:
511 511 end_pos += 1
512 512
513 513 commit_filter = []
514 514
515 515 if branch_name and not branch_ancestors:
516 516 commit_filter.append('branch("%s")' % (branch_name,))
517 517 elif branch_name and branch_ancestors:
518 518 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
519 519
520 520 if start_date and not end_date:
521 521 commit_filter.append('date(">%s")' % (start_date,))
522 522 if end_date and not start_date:
523 523 commit_filter.append('date("<%s")' % (end_date,))
524 524 if start_date and end_date:
525 525 commit_filter.append(
526 526 'date(">%s") and date("<%s")' % (start_date, end_date))
527 527
528 528 if not show_hidden:
529 529 commit_filter.append('not obsolete()')
530 530 commit_filter.append('not hidden()')
531 531
532 532 # TODO: johbo: Figure out a simpler way for this solution
533 533 collection_generator = CollectionGenerator
534 534 if commit_filter:
535 535 commit_filter = ' and '.join(map(safe_str, commit_filter))
536 536 revisions = self._remote.rev_range([commit_filter])
537 537 collection_generator = MercurialIndexBasedCollectionGenerator
538 538 else:
539 539 revisions = self.commit_ids
540 540
541 541 if start_pos or end_pos:
542 542 revisions = revisions[start_pos:end_pos]
543 543
544 544 return collection_generator(self, revisions, pre_load=pre_load)
545 545
546 546 def pull(self, url, commit_ids=None):
547 547 """
548 548 Tries to pull changes from external location.
549 549
550 550 :param commit_ids: Optional. Can be set to a list of commit ids
551 551 which shall be pulled from the other repository.
552 552 """
553 553 url = self._get_url(url)
554 554 self._remote.pull(url, commit_ids=commit_ids)
555 555 self._remote.invalidate_vcs_cache()
556 556
557 def push(self, url):
558 url = self._get_url(url)
559 self._remote.sync_push(url)
560
557 561 def _local_clone(self, clone_path):
558 562 """
559 563 Create a local clone of the current repo.
560 564 """
561 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
562 566 hooks=False)
563 567
564 568 def _update(self, revision, clean=False):
565 569 """
566 570 Update the working copy to the specified revision.
567 571 """
568 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
569 573 self._remote.update(revision, clean=clean)
570 574
571 575 def _identify(self):
572 576 """
573 577 Return the current state of the working directory.
574 578 """
575 579 return self._remote.identify().strip().rstrip('+')
576 580
577 581 def _heads(self, branch=None):
578 582 """
579 583 Return the commit ids of the repository heads.
580 584 """
581 585 return self._remote.heads(branch=branch).strip().split(' ')
582 586
583 587 def _ancestor(self, revision1, revision2):
584 588 """
585 589 Return the common ancestor of the two revisions.
586 590 """
587 591 return self._remote.ancestor(revision1, revision2)
588 592
589 593 def _local_push(
590 594 self, revision, repository_path, push_branches=False,
591 595 enable_hooks=False):
592 596 """
593 597 Push the given revision to the specified repository.
594 598
595 599 :param push_branches: allow to create branches in the target repo.
596 600 """
597 601 self._remote.push(
598 602 [revision], repository_path, hooks=enable_hooks,
599 603 push_branches=push_branches)
600 604
601 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
602 606 source_ref, use_rebase=False, dry_run=False):
603 607 """
604 608 Merge the given source_revision into the checked out revision.
605 609
606 610 Returns the commit id of the merge and a boolean indicating if the
607 611 commit needs to be pushed.
608 612 """
609 613 self._update(target_ref.commit_id)
610 614
611 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
612 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
613 617
614 618 if ancestor == source_ref.commit_id:
615 619 # Nothing to do, the changes were already integrated
616 620 return target_ref.commit_id, False
617 621
618 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
619 623 # In this case we should force a commit message
620 624 return source_ref.commit_id, True
621 625
622 626 if use_rebase:
623 627 try:
624 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
625 629 target_ref.commit_id)
626 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
627 631 self._remote.rebase(
628 632 source=source_ref.commit_id, dest=target_ref.commit_id)
629 633 self._remote.invalidate_vcs_cache()
630 634 self._update(bookmark_name)
631 635 return self._identify(), True
632 636 except RepositoryError:
633 637 # The rebase-abort may raise another exception which 'hides'
634 638 # the original one, therefore we log it here.
635 639 log.exception('Error while rebasing shadow repo during merge.')
636 640
637 641 # Cleanup any rebase leftovers
638 642 self._remote.invalidate_vcs_cache()
639 643 self._remote.rebase(abort=True)
640 644 self._remote.invalidate_vcs_cache()
641 645 self._remote.update(clean=True)
642 646 raise
643 647 else:
644 648 try:
645 649 self._remote.merge(source_ref.commit_id)
646 650 self._remote.invalidate_vcs_cache()
647 651 self._remote.commit(
648 652 message=safe_str(merge_message),
649 653 username=safe_str('%s <%s>' % (user_name, user_email)))
650 654 self._remote.invalidate_vcs_cache()
651 655 return self._identify(), True
652 656 except RepositoryError:
653 657 # Cleanup any merge leftovers
654 658 self._remote.update(clean=True)
655 659 raise
656 660
657 661 def _local_close(self, target_ref, user_name, user_email,
658 662 source_ref, close_message=''):
659 663 """
660 664 Close the branch of the given source_revision
661 665
662 666 Returns the commit id of the close and a boolean indicating if the
663 667 commit needs to be pushed.
664 668 """
665 669 self._update(source_ref.commit_id)
666 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
667 671 try:
668 672 self._remote.commit(
669 673 message=safe_str(message),
670 674 username=safe_str('%s <%s>' % (user_name, user_email)),
671 675 close_branch=True)
672 676 self._remote.invalidate_vcs_cache()
673 677 return self._identify(), True
674 678 except RepositoryError:
675 679 # Cleanup any commit leftovers
676 680 self._remote.update(clean=True)
677 681 raise
678 682
679 683 def _is_the_same_branch(self, target_ref, source_ref):
680 684 return (
681 685 self._get_branch_name(target_ref) ==
682 686 self._get_branch_name(source_ref))
683 687
684 688 def _get_branch_name(self, ref):
685 689 if ref.type == 'branch':
686 690 return ref.name
687 691 return self._remote.ctx_branch(ref.commit_id)
688 692
689 693 def _get_shadow_repository_path(self, workspace_id):
690 694 # The name of the shadow repository must start with '.', so it is
691 695 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
692 696 return os.path.join(
693 697 os.path.dirname(self.path),
694 698 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
695 699
696 700 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
697 701 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
698 702 if not os.path.exists(shadow_repository_path):
699 703 self._local_clone(shadow_repository_path)
700 704 log.debug(
701 705 'Prepared shadow repository in %s', shadow_repository_path)
702 706
703 707 return shadow_repository_path
704 708
705 709 def cleanup_merge_workspace(self, workspace_id):
706 710 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
707 711 shutil.rmtree(shadow_repository_path, ignore_errors=True)
708 712
709 713 def _merge_repo(self, shadow_repository_path, target_ref,
710 714 source_repo, source_ref, merge_message,
711 715 merger_name, merger_email, dry_run=False,
712 716 use_rebase=False, close_branch=False):
713 717
714 718 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
715 719 'rebase' if use_rebase else 'merge', dry_run)
716 720 if target_ref.commit_id not in self._heads():
717 721 return MergeResponse(
718 722 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
719 723
720 724 try:
721 725 if (target_ref.type == 'branch' and
722 726 len(self._heads(target_ref.name)) != 1):
723 727 return MergeResponse(
724 728 False, False, None,
725 729 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
726 730 except CommitDoesNotExistError:
727 731 log.exception('Failure when looking up branch heads on hg target')
728 732 return MergeResponse(
729 733 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
730 734
731 735 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732 736
733 737 log.debug('Pulling in target reference %s', target_ref)
734 738 self._validate_pull_reference(target_ref)
735 739 shadow_repo._local_pull(self.path, target_ref)
736 740 try:
737 741 log.debug('Pulling in source reference %s', source_ref)
738 742 source_repo._validate_pull_reference(source_ref)
739 743 shadow_repo._local_pull(source_repo.path, source_ref)
740 744 except CommitDoesNotExistError:
741 745 log.exception('Failure when doing local pull on hg shadow repo')
742 746 return MergeResponse(
743 747 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
744 748
745 749 merge_ref = None
746 750 merge_commit_id = None
747 751 close_commit_id = None
748 752 merge_failure_reason = MergeFailureReason.NONE
749 753
750 754 # enforce that close branch should be used only in case we source from
751 755 # an actual Branch
752 756 close_branch = close_branch and source_ref.type == 'branch'
753 757
754 758 # don't allow to close branch if source and target are the same
755 759 close_branch = close_branch and source_ref.name != target_ref.name
756 760
757 761 needs_push_on_close = False
758 762 if close_branch and not use_rebase and not dry_run:
759 763 try:
760 764 close_commit_id, needs_push_on_close = shadow_repo._local_close(
761 765 target_ref, merger_name, merger_email, source_ref)
762 766 merge_possible = True
763 767 except RepositoryError:
764 768 log.exception(
765 769 'Failure when doing close branch on hg shadow repo')
766 770 merge_possible = False
767 771 merge_failure_reason = MergeFailureReason.MERGE_FAILED
768 772 else:
769 773 merge_possible = True
770 774
771 775 if merge_possible:
772 776 try:
773 777 merge_commit_id, needs_push = shadow_repo._local_merge(
774 778 target_ref, merge_message, merger_name, merger_email,
775 779 source_ref, use_rebase=use_rebase, dry_run=dry_run)
776 780 merge_possible = True
777 781
778 782 # read the state of the close action, if it
779 783 # maybe required a push
780 784 needs_push = needs_push or needs_push_on_close
781 785
782 786 # Set a bookmark pointing to the merge commit. This bookmark
783 787 # may be used to easily identify the last successful merge
784 788 # commit in the shadow repository.
785 789 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
786 790 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
787 791 except SubrepoMergeError:
788 792 log.exception(
789 793 'Subrepo merge error during local merge on hg shadow repo.')
790 794 merge_possible = False
791 795 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
792 796 needs_push = False
793 797 except RepositoryError:
794 798 log.exception('Failure when doing local merge on hg shadow repo')
795 799 merge_possible = False
796 800 merge_failure_reason = MergeFailureReason.MERGE_FAILED
797 801 needs_push = False
798 802
799 803 if merge_possible and not dry_run:
800 804 if needs_push:
801 805 # In case the target is a bookmark, update it, so after pushing
802 806 # the bookmarks is also updated in the target.
803 807 if target_ref.type == 'book':
804 808 shadow_repo.bookmark(
805 809 target_ref.name, revision=merge_commit_id)
806 810 try:
807 811 shadow_repo_with_hooks = self._get_shadow_instance(
808 812 shadow_repository_path,
809 813 enable_hooks=True)
810 814 # This is the actual merge action, we push from shadow
811 815 # into origin.
812 816 # Note: the push_branches option will push any new branch
813 817 # defined in the source repository to the target. This may
814 818 # be dangerous as branches are permanent in Mercurial.
815 819 # This feature was requested in issue #441.
816 820 shadow_repo_with_hooks._local_push(
817 821 merge_commit_id, self.path, push_branches=True,
818 822 enable_hooks=True)
819 823
820 824 # maybe we also need to push the close_commit_id
821 825 if close_commit_id:
822 826 shadow_repo_with_hooks._local_push(
823 827 close_commit_id, self.path, push_branches=True,
824 828 enable_hooks=True)
825 829 merge_succeeded = True
826 830 except RepositoryError:
827 831 log.exception(
828 832 'Failure when doing local push from the shadow '
829 833 'repository to the target repository.')
830 834 merge_succeeded = False
831 835 merge_failure_reason = MergeFailureReason.PUSH_FAILED
832 836 else:
833 837 merge_succeeded = True
834 838 else:
835 839 merge_succeeded = False
836 840
837 841 return MergeResponse(
838 842 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
839 843
840 844 def _get_shadow_instance(
841 845 self, shadow_repository_path, enable_hooks=False):
842 846 config = self.config.copy()
843 847 if not enable_hooks:
844 848 config.clear_section('hooks')
845 849 return MercurialRepository(shadow_repository_path, config)
846 850
847 851 def _validate_pull_reference(self, reference):
848 852 if not (reference.name in self.bookmarks or
849 853 reference.name in self.branches or
850 854 self.get_commit(reference.commit_id)):
851 855 raise CommitDoesNotExistError(
852 856 'Unknown branch, bookmark or commit id')
853 857
854 858 def _local_pull(self, repository_path, reference):
855 859 """
856 860 Fetch a branch, bookmark or commit from a local repository.
857 861 """
858 862 repository_path = os.path.abspath(repository_path)
859 863 if repository_path == self.path:
860 864 raise ValueError('Cannot pull from the same repository')
861 865
862 866 reference_type_to_option_name = {
863 867 'book': 'bookmark',
864 868 'branch': 'branch',
865 869 }
866 870 option_name = reference_type_to_option_name.get(
867 871 reference.type, 'revision')
868 872
869 873 if option_name == 'revision':
870 874 ref = reference.commit_id
871 875 else:
872 876 ref = reference.name
873 877
874 878 options = {option_name: [ref]}
875 879 self._remote.pull_cmd(repository_path, hooks=False, **options)
876 880 self._remote.invalidate_vcs_cache()
877 881
878 882 def bookmark(self, bookmark, revision=None):
879 883 if isinstance(bookmark, unicode):
880 884 bookmark = safe_str(bookmark)
881 885 self._remote.bookmark(bookmark, revision=revision)
882 886 self._remote.invalidate_vcs_cache()
883 887
884 888
885 889 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
886 890
887 891 def _commit_factory(self, commit_id):
888 892 return self.repo.get_commit(
889 893 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,907 +1,922 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from sqlalchemy import func
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 import rhodecode
37 37 from rhodecode.lib.vcs import get_backend
38 38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 39 from rhodecode.lib.vcs.nodes import FileNode
40 40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 41 from rhodecode.lib import helpers as h
42 42 from rhodecode.lib.auth import (
43 43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 44 HasUserGroupPermissionAny)
45 45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 46 from rhodecode.lib import hooks_utils, caches
47 47 from rhodecode.lib.utils import (
48 48 get_filesystem_repos, make_db_config)
49 49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 50 from rhodecode.lib.system_info import get_system_info
51 51 from rhodecode.model import BaseModel
52 52 from rhodecode.model.db import (
53 53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 54 PullRequest)
55 55 from rhodecode.model.settings import VcsSettingsModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class UserTemp(object):
61 61 def __init__(self, user_id):
62 62 self.user_id = user_id
63 63
64 64 def __repr__(self):
65 65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66 66
67 67
68 68 class RepoTemp(object):
69 69 def __init__(self, repo_id):
70 70 self.repo_id = repo_id
71 71
72 72 def __repr__(self):
73 73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74 74
75 75
76 76 class SimpleCachedRepoList(object):
77 77 """
78 78 Lighter version of of iteration of repos without the scm initialisation,
79 79 and with cache usage
80 80 """
81 81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 82 self.db_repo_list = db_repo_list
83 83 self.repos_path = repos_path
84 84 self.order_by = order_by
85 85 self.reversed = (order_by or '').startswith('-')
86 86 if not perm_set:
87 87 perm_set = ['repository.read', 'repository.write',
88 88 'repository.admin']
89 89 self.perm_set = perm_set
90 90
91 91 def __len__(self):
92 92 return len(self.db_repo_list)
93 93
94 94 def __repr__(self):
95 95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96 96
97 97 def __iter__(self):
98 98 for dbr in self.db_repo_list:
99 99 # check permission at this level
100 100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 101 dbr.repo_name, 'SimpleCachedRepoList check')
102 102 if not has_perm:
103 103 continue
104 104
105 105 tmp_d = {
106 106 'name': dbr.repo_name,
107 107 'dbrepo': dbr.get_dict(),
108 108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 109 }
110 110 yield tmp_d
111 111
112 112
113 113 class _PermCheckIterator(object):
114 114
115 115 def __init__(
116 116 self, obj_list, obj_attr, perm_set, perm_checker,
117 117 extra_kwargs=None):
118 118 """
119 119 Creates iterator from given list of objects, additionally
120 120 checking permission for them from perm_set var
121 121
122 122 :param obj_list: list of db objects
123 123 :param obj_attr: attribute of object to pass into perm_checker
124 124 :param perm_set: list of permissions to check
125 125 :param perm_checker: callable to check permissions against
126 126 """
127 127 self.obj_list = obj_list
128 128 self.obj_attr = obj_attr
129 129 self.perm_set = perm_set
130 130 self.perm_checker = perm_checker
131 131 self.extra_kwargs = extra_kwargs or {}
132 132
133 133 def __len__(self):
134 134 return len(self.obj_list)
135 135
136 136 def __repr__(self):
137 137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138 138
139 139 def __iter__(self):
140 140 checker = self.perm_checker(*self.perm_set)
141 141 for db_obj in self.obj_list:
142 142 # check permission at this level
143 143 name = getattr(db_obj, self.obj_attr, None)
144 144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 145 continue
146 146
147 147 yield db_obj
148 148
149 149
150 150 class RepoList(_PermCheckIterator):
151 151
152 152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 153 if not perm_set:
154 154 perm_set = [
155 155 'repository.read', 'repository.write', 'repository.admin']
156 156
157 157 super(RepoList, self).__init__(
158 158 obj_list=db_repo_list,
159 159 obj_attr='repo_name', perm_set=perm_set,
160 160 perm_checker=HasRepoPermissionAny,
161 161 extra_kwargs=extra_kwargs)
162 162
163 163
164 164 class RepoGroupList(_PermCheckIterator):
165 165
166 166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 167 if not perm_set:
168 168 perm_set = ['group.read', 'group.write', 'group.admin']
169 169
170 170 super(RepoGroupList, self).__init__(
171 171 obj_list=db_repo_group_list,
172 172 obj_attr='group_name', perm_set=perm_set,
173 173 perm_checker=HasRepoGroupPermissionAny,
174 174 extra_kwargs=extra_kwargs)
175 175
176 176
177 177 class UserGroupList(_PermCheckIterator):
178 178
179 179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 180 if not perm_set:
181 181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182 182
183 183 super(UserGroupList, self).__init__(
184 184 obj_list=db_user_group_list,
185 185 obj_attr='users_group_name', perm_set=perm_set,
186 186 perm_checker=HasUserGroupPermissionAny,
187 187 extra_kwargs=extra_kwargs)
188 188
189 189
190 190 class ScmModel(BaseModel):
191 191 """
192 192 Generic Scm Model
193 193 """
194 194
195 195 @LazyProperty
196 196 def repos_path(self):
197 197 """
198 198 Gets the repositories root path from database
199 199 """
200 200
201 201 settings_model = VcsSettingsModel(sa=self.sa)
202 202 return settings_model.get_repos_location()
203 203
204 204 def repo_scan(self, repos_path=None):
205 205 """
206 206 Listing of repositories in given path. This path should not be a
207 207 repository itself. Return a dictionary of repository objects
208 208
209 209 :param repos_path: path to directory containing repositories
210 210 """
211 211
212 212 if repos_path is None:
213 213 repos_path = self.repos_path
214 214
215 215 log.info('scanning for repositories in %s', repos_path)
216 216
217 217 config = make_db_config()
218 218 config.set('extensions', 'largefiles', '')
219 219 repos = {}
220 220
221 221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 222 # name need to be decomposed and put back together using the /
223 223 # since this is internal storage separator for rhodecode
224 224 name = Repository.normalize_repo_name(name)
225 225
226 226 try:
227 227 if name in repos:
228 228 raise RepositoryError('Duplicate repository name %s '
229 229 'found in %s' % (name, path))
230 230 elif path[0] in rhodecode.BACKENDS:
231 231 klass = get_backend(path[0])
232 232 repos[name] = klass(path[1], config=config)
233 233 except OSError:
234 234 continue
235 235 log.debug('found %s paths with repositories', len(repos))
236 236 return repos
237 237
238 238 def get_repos(self, all_repos=None, sort_key=None):
239 239 """
240 240 Get all repositories from db and for each repo create it's
241 241 backend instance and fill that backed with information from database
242 242
243 243 :param all_repos: list of repository names as strings
244 244 give specific repositories list, good for filtering
245 245
246 246 :param sort_key: initial sorting of repositories
247 247 """
248 248 if all_repos is None:
249 249 all_repos = self.sa.query(Repository)\
250 250 .filter(Repository.group_id == None)\
251 251 .order_by(func.lower(Repository.repo_name)).all()
252 252 repo_iter = SimpleCachedRepoList(
253 253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 254 return repo_iter
255 255
256 256 def get_repo_groups(self, all_groups=None):
257 257 if all_groups is None:
258 258 all_groups = RepoGroup.query()\
259 259 .filter(RepoGroup.group_parent_id == None).all()
260 260 return [x for x in RepoGroupList(all_groups)]
261 261
262 262 def mark_for_invalidation(self, repo_name, delete=False):
263 263 """
264 264 Mark caches of this repo invalid in the database. `delete` flag
265 265 removes the cache entries
266 266
267 267 :param repo_name: the repo_name for which caches should be marked
268 268 invalid, or deleted
269 269 :param delete: delete the entry keys instead of setting bool
270 270 flag on them
271 271 """
272 272 CacheKey.set_invalidate(repo_name, delete=delete)
273 273 repo = Repository.get_by_repo_name(repo_name)
274 274
275 275 if repo:
276 276 config = repo._config
277 277 config.set('extensions', 'largefiles', '')
278 278 repo.update_commit_cache(config=config, cs_cache=None)
279 279 caches.clear_repo_caches(repo_name)
280 280
281 281 def toggle_following_repo(self, follow_repo_id, user_id):
282 282
283 283 f = self.sa.query(UserFollowing)\
284 284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 285 .filter(UserFollowing.user_id == user_id).scalar()
286 286
287 287 if f is not None:
288 288 try:
289 289 self.sa.delete(f)
290 290 return
291 291 except Exception:
292 292 log.error(traceback.format_exc())
293 293 raise
294 294
295 295 try:
296 296 f = UserFollowing()
297 297 f.user_id = user_id
298 298 f.follows_repo_id = follow_repo_id
299 299 self.sa.add(f)
300 300 except Exception:
301 301 log.error(traceback.format_exc())
302 302 raise
303 303
304 304 def toggle_following_user(self, follow_user_id, user_id):
305 305 f = self.sa.query(UserFollowing)\
306 306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 307 .filter(UserFollowing.user_id == user_id).scalar()
308 308
309 309 if f is not None:
310 310 try:
311 311 self.sa.delete(f)
312 312 return
313 313 except Exception:
314 314 log.error(traceback.format_exc())
315 315 raise
316 316
317 317 try:
318 318 f = UserFollowing()
319 319 f.user_id = user_id
320 320 f.follows_user_id = follow_user_id
321 321 self.sa.add(f)
322 322 except Exception:
323 323 log.error(traceback.format_exc())
324 324 raise
325 325
326 326 def is_following_repo(self, repo_name, user_id, cache=False):
327 327 r = self.sa.query(Repository)\
328 328 .filter(Repository.repo_name == repo_name).scalar()
329 329
330 330 f = self.sa.query(UserFollowing)\
331 331 .filter(UserFollowing.follows_repository == r)\
332 332 .filter(UserFollowing.user_id == user_id).scalar()
333 333
334 334 return f is not None
335 335
336 336 def is_following_user(self, username, user_id, cache=False):
337 337 u = User.get_by_username(username)
338 338
339 339 f = self.sa.query(UserFollowing)\
340 340 .filter(UserFollowing.follows_user == u)\
341 341 .filter(UserFollowing.user_id == user_id).scalar()
342 342
343 343 return f is not None
344 344
345 345 def get_followers(self, repo):
346 346 repo = self._get_repo(repo)
347 347
348 348 return self.sa.query(UserFollowing)\
349 349 .filter(UserFollowing.follows_repository == repo).count()
350 350
351 351 def get_forks(self, repo):
352 352 repo = self._get_repo(repo)
353 353 return self.sa.query(Repository)\
354 354 .filter(Repository.fork == repo).count()
355 355
356 356 def get_pull_requests(self, repo):
357 357 repo = self._get_repo(repo)
358 358 return self.sa.query(PullRequest)\
359 359 .filter(PullRequest.target_repo == repo)\
360 360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361 361
362 362 def mark_as_fork(self, repo, fork, user):
363 363 repo = self._get_repo(repo)
364 364 fork = self._get_repo(fork)
365 365 if fork and repo.repo_id == fork.repo_id:
366 366 raise Exception("Cannot set repository as fork of itself")
367 367
368 368 if fork and repo.repo_type != fork.repo_type:
369 369 raise RepositoryError(
370 370 "Cannot set repository as fork of repository with other type")
371 371
372 372 repo.fork = fork
373 373 self.sa.add(repo)
374 374 return repo
375 375
376 def pull_changes(self, repo, username):
376 def pull_changes(self, repo, username, remote_uri=None):
377 377 dbrepo = self._get_repo(repo)
378 clone_uri = dbrepo.clone_uri
379 if not clone_uri:
378 remote_uri = remote_uri or dbrepo.clone_uri
379 if not remote_uri:
380 380 raise Exception("This repository doesn't have a clone uri")
381 381
382 382 repo = dbrepo.scm_instance(cache=False)
383 383 # TODO: marcink fix this an re-enable since we need common logic
384 384 # for hg/git remove hooks so we don't trigger them on fetching
385 385 # commits from remote
386 386 repo.config.clear_section('hooks')
387 387
388 388 repo_name = dbrepo.repo_name
389 389 try:
390 390 # TODO: we need to make sure those operations call proper hooks !
391 repo.pull(clone_uri)
391 repo.pull(remote_uri)
392 392
393 393 self.mark_for_invalidation(repo_name)
394 394 except Exception:
395 395 log.error(traceback.format_exc())
396 396 raise
397 397
398 def push_changes(self, repo, username, remote_uri=None):
399 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.clone_uri
401 if not remote_uri:
402 raise Exception("This repository doesn't have a clone uri")
403
404 repo = dbrepo.scm_instance(cache=False)
405 repo.config.clear_section('hooks')
406
407 try:
408 repo.push(remote_uri)
409 except Exception:
410 log.error(traceback.format_exc())
411 raise
412
398 413 def commit_change(self, repo, repo_name, commit, user, author, message,
399 414 content, f_path):
400 415 """
401 416 Commits changes
402 417
403 418 :param repo: SCM instance
404 419
405 420 """
406 421 user = self._get_user(user)
407 422
408 423 # decoding here will force that we have proper encoded values
409 424 # in any other case this will throw exceptions and deny commit
410 425 content = safe_str(content)
411 426 path = safe_str(f_path)
412 427 # message and author needs to be unicode
413 428 # proper backend should then translate that into required type
414 429 message = safe_unicode(message)
415 430 author = safe_unicode(author)
416 431 imc = repo.in_memory_commit
417 432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
418 433 try:
419 434 # TODO: handle pre-push action !
420 435 tip = imc.commit(
421 436 message=message, author=author, parents=[commit],
422 437 branch=commit.branch)
423 438 except Exception as e:
424 439 log.error(traceback.format_exc())
425 440 raise IMCCommitError(str(e))
426 441 finally:
427 442 # always clear caches, if commit fails we want fresh object also
428 443 self.mark_for_invalidation(repo_name)
429 444
430 445 # We trigger the post-push action
431 446 hooks_utils.trigger_post_push_hook(
432 447 username=user.username, action='push_local', repo_name=repo_name,
433 448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
434 449 return tip
435 450
436 451 def _sanitize_path(self, f_path):
437 452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
438 453 raise NonRelativePathError('%s is not an relative path' % f_path)
439 454 if f_path:
440 455 f_path = os.path.normpath(f_path)
441 456 return f_path
442 457
443 458 def get_dirnode_metadata(self, request, commit, dir_node):
444 459 if not dir_node.is_dir():
445 460 return []
446 461
447 462 data = []
448 463 for node in dir_node:
449 464 if not node.is_file():
450 465 # we skip file-nodes
451 466 continue
452 467
453 468 last_commit = node.last_commit
454 469 last_commit_date = last_commit.date
455 470 data.append({
456 471 'name': node.name,
457 472 'size': h.format_byte_size_binary(node.size),
458 473 'modified_at': h.format_date(last_commit_date),
459 474 'modified_ts': last_commit_date.isoformat(),
460 475 'revision': last_commit.revision,
461 476 'short_id': last_commit.short_id,
462 477 'message': h.escape(last_commit.message),
463 478 'author': h.escape(last_commit.author),
464 479 'user_profile': h.gravatar_with_user(
465 480 request, last_commit.author),
466 481 })
467 482
468 483 return data
469 484
470 485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
471 486 extended_info=False, content=False, max_file_bytes=None):
472 487 """
473 488 recursive walk in root dir and return a set of all path in that dir
474 489 based on repository walk function
475 490
476 491 :param repo_name: name of repository
477 492 :param commit_id: commit id for which to list nodes
478 493 :param root_path: root path to list
479 494 :param flat: return as a list, if False returns a dict with description
480 495 :param max_file_bytes: will not return file contents over this limit
481 496
482 497 """
483 498 _files = list()
484 499 _dirs = list()
485 500 try:
486 501 _repo = self._get_repo(repo_name)
487 502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
488 503 root_path = root_path.lstrip('/')
489 504 for __, dirs, files in commit.walk(root_path):
490 505 for f in files:
491 506 _content = None
492 507 _data = f.unicode_path
493 508 over_size_limit = (max_file_bytes is not None
494 509 and f.size > max_file_bytes)
495 510
496 511 if not flat:
497 512 _data = {
498 513 "name": h.escape(f.unicode_path),
499 514 "type": "file",
500 515 }
501 516 if extended_info:
502 517 _data.update({
503 518 "md5": f.md5,
504 519 "binary": f.is_binary,
505 520 "size": f.size,
506 521 "extension": f.extension,
507 522 "mimetype": f.mimetype,
508 523 "lines": f.lines()[0]
509 524 })
510 525
511 526 if content:
512 527 full_content = None
513 528 if not f.is_binary and not over_size_limit:
514 529 full_content = safe_str(f.content)
515 530
516 531 _data.update({
517 532 "content": full_content,
518 533 })
519 534 _files.append(_data)
520 535 for d in dirs:
521 536 _data = d.unicode_path
522 537 if not flat:
523 538 _data = {
524 539 "name": h.escape(d.unicode_path),
525 540 "type": "dir",
526 541 }
527 542 if extended_info:
528 543 _data.update({
529 544 "md5": None,
530 545 "binary": None,
531 546 "size": None,
532 547 "extension": None,
533 548 })
534 549 if content:
535 550 _data.update({
536 551 "content": None
537 552 })
538 553 _dirs.append(_data)
539 554 except RepositoryError:
540 555 log.debug("Exception in get_nodes", exc_info=True)
541 556 raise
542 557
543 558 return _dirs, _files
544 559
545 560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
546 561 author=None, trigger_push_hook=True):
547 562 """
548 563 Commits given multiple nodes into repo
549 564
550 565 :param user: RhodeCode User object or user_id, the commiter
551 566 :param repo: RhodeCode Repository object
552 567 :param message: commit message
553 568 :param nodes: mapping {filename:{'content':content},...}
554 569 :param parent_commit: parent commit, can be empty than it's
555 570 initial commit
556 571 :param author: author of commit, cna be different that commiter
557 572 only for git
558 573 :param trigger_push_hook: trigger push hooks
559 574
560 575 :returns: new commited commit
561 576 """
562 577
563 578 user = self._get_user(user)
564 579 scm_instance = repo.scm_instance(cache=False)
565 580
566 581 processed_nodes = []
567 582 for f_path in nodes:
568 583 f_path = self._sanitize_path(f_path)
569 584 content = nodes[f_path]['content']
570 585 f_path = safe_str(f_path)
571 586 # decoding here will force that we have proper encoded values
572 587 # in any other case this will throw exceptions and deny commit
573 588 if isinstance(content, (basestring,)):
574 589 content = safe_str(content)
575 590 elif isinstance(content, (file, cStringIO.OutputType,)):
576 591 content = content.read()
577 592 else:
578 593 raise Exception('Content is of unrecognized type %s' % (
579 594 type(content)
580 595 ))
581 596 processed_nodes.append((f_path, content))
582 597
583 598 message = safe_unicode(message)
584 599 commiter = user.full_contact
585 600 author = safe_unicode(author) if author else commiter
586 601
587 602 imc = scm_instance.in_memory_commit
588 603
589 604 if not parent_commit:
590 605 parent_commit = EmptyCommit(alias=scm_instance.alias)
591 606
592 607 if isinstance(parent_commit, EmptyCommit):
593 608 # EmptyCommit means we we're editing empty repository
594 609 parents = None
595 610 else:
596 611 parents = [parent_commit]
597 612 # add multiple nodes
598 613 for path, content in processed_nodes:
599 614 imc.add(FileNode(path, content=content))
600 615 # TODO: handle pre push scenario
601 616 tip = imc.commit(message=message,
602 617 author=author,
603 618 parents=parents,
604 619 branch=parent_commit.branch)
605 620
606 621 self.mark_for_invalidation(repo.repo_name)
607 622 if trigger_push_hook:
608 623 hooks_utils.trigger_post_push_hook(
609 624 username=user.username, action='push_local',
610 625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
611 626 commit_ids=[tip.raw_id])
612 627 return tip
613 628
614 629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
615 630 author=None, trigger_push_hook=True):
616 631 user = self._get_user(user)
617 632 scm_instance = repo.scm_instance(cache=False)
618 633
619 634 message = safe_unicode(message)
620 635 commiter = user.full_contact
621 636 author = safe_unicode(author) if author else commiter
622 637
623 638 imc = scm_instance.in_memory_commit
624 639
625 640 if not parent_commit:
626 641 parent_commit = EmptyCommit(alias=scm_instance.alias)
627 642
628 643 if isinstance(parent_commit, EmptyCommit):
629 644 # EmptyCommit means we we're editing empty repository
630 645 parents = None
631 646 else:
632 647 parents = [parent_commit]
633 648
634 649 # add multiple nodes
635 650 for _filename, data in nodes.items():
636 651 # new filename, can be renamed from the old one, also sanitaze
637 652 # the path for any hack around relative paths like ../../ etc.
638 653 filename = self._sanitize_path(data['filename'])
639 654 old_filename = self._sanitize_path(_filename)
640 655 content = data['content']
641 656
642 657 filenode = FileNode(old_filename, content=content)
643 658 op = data['op']
644 659 if op == 'add':
645 660 imc.add(filenode)
646 661 elif op == 'del':
647 662 imc.remove(filenode)
648 663 elif op == 'mod':
649 664 if filename != old_filename:
650 665 # TODO: handle renames more efficient, needs vcs lib
651 666 # changes
652 667 imc.remove(filenode)
653 668 imc.add(FileNode(filename, content=content))
654 669 else:
655 670 imc.change(filenode)
656 671
657 672 try:
658 673 # TODO: handle pre push scenario
659 674 # commit changes
660 675 tip = imc.commit(message=message,
661 676 author=author,
662 677 parents=parents,
663 678 branch=parent_commit.branch)
664 679 except NodeNotChangedError:
665 680 raise
666 681 except Exception as e:
667 682 log.exception("Unexpected exception during call to imc.commit")
668 683 raise IMCCommitError(str(e))
669 684 finally:
670 685 # always clear caches, if commit fails we want fresh object also
671 686 self.mark_for_invalidation(repo.repo_name)
672 687
673 688 if trigger_push_hook:
674 689 hooks_utils.trigger_post_push_hook(
675 690 username=user.username, action='push_local',
676 691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
677 692 commit_ids=[tip.raw_id])
678 693
679 694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
680 695 author=None, trigger_push_hook=True):
681 696 """
682 697 Deletes given multiple nodes into `repo`
683 698
684 699 :param user: RhodeCode User object or user_id, the committer
685 700 :param repo: RhodeCode Repository object
686 701 :param message: commit message
687 702 :param nodes: mapping {filename:{'content':content},...}
688 703 :param parent_commit: parent commit, can be empty than it's initial
689 704 commit
690 705 :param author: author of commit, cna be different that commiter only
691 706 for git
692 707 :param trigger_push_hook: trigger push hooks
693 708
694 709 :returns: new commit after deletion
695 710 """
696 711
697 712 user = self._get_user(user)
698 713 scm_instance = repo.scm_instance(cache=False)
699 714
700 715 processed_nodes = []
701 716 for f_path in nodes:
702 717 f_path = self._sanitize_path(f_path)
703 718 # content can be empty but for compatabilty it allows same dicts
704 719 # structure as add_nodes
705 720 content = nodes[f_path].get('content')
706 721 processed_nodes.append((f_path, content))
707 722
708 723 message = safe_unicode(message)
709 724 commiter = user.full_contact
710 725 author = safe_unicode(author) if author else commiter
711 726
712 727 imc = scm_instance.in_memory_commit
713 728
714 729 if not parent_commit:
715 730 parent_commit = EmptyCommit(alias=scm_instance.alias)
716 731
717 732 if isinstance(parent_commit, EmptyCommit):
718 733 # EmptyCommit means we we're editing empty repository
719 734 parents = None
720 735 else:
721 736 parents = [parent_commit]
722 737 # add multiple nodes
723 738 for path, content in processed_nodes:
724 739 imc.remove(FileNode(path, content=content))
725 740
726 741 # TODO: handle pre push scenario
727 742 tip = imc.commit(message=message,
728 743 author=author,
729 744 parents=parents,
730 745 branch=parent_commit.branch)
731 746
732 747 self.mark_for_invalidation(repo.repo_name)
733 748 if trigger_push_hook:
734 749 hooks_utils.trigger_post_push_hook(
735 750 username=user.username, action='push_local',
736 751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
737 752 commit_ids=[tip.raw_id])
738 753 return tip
739 754
740 755 def strip(self, repo, commit_id, branch):
741 756 scm_instance = repo.scm_instance(cache=False)
742 757 scm_instance.config.clear_section('hooks')
743 758 scm_instance.strip(commit_id, branch)
744 759 self.mark_for_invalidation(repo.repo_name)
745 760
746 761 def get_unread_journal(self):
747 762 return self.sa.query(UserLog).count()
748 763
749 764 def get_repo_landing_revs(self, translator, repo=None):
750 765 """
751 766 Generates select option with tags branches and bookmarks (for hg only)
752 767 grouped by type
753 768
754 769 :param repo:
755 770 """
756 771 _ = translator
757 772 repo = self._get_repo(repo)
758 773
759 774 hist_l = [
760 775 ['rev:tip', _('latest tip')]
761 776 ]
762 777 choices = [
763 778 'rev:tip'
764 779 ]
765 780
766 781 if not repo:
767 782 return choices, hist_l
768 783
769 784 repo = repo.scm_instance()
770 785
771 786 branches_group = (
772 787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
773 788 for b in repo.branches],
774 789 _("Branches"))
775 790 hist_l.append(branches_group)
776 791 choices.extend([x[0] for x in branches_group[0]])
777 792
778 793 if repo.alias == 'hg':
779 794 bookmarks_group = (
780 795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
781 796 for b in repo.bookmarks],
782 797 _("Bookmarks"))
783 798 hist_l.append(bookmarks_group)
784 799 choices.extend([x[0] for x in bookmarks_group[0]])
785 800
786 801 tags_group = (
787 802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
788 803 for t in repo.tags],
789 804 _("Tags"))
790 805 hist_l.append(tags_group)
791 806 choices.extend([x[0] for x in tags_group[0]])
792 807
793 808 return choices, hist_l
794 809
795 810 def install_git_hook(self, repo, force_create=False):
796 811 """
797 812 Creates a rhodecode hook inside a git repository
798 813
799 814 :param repo: Instance of VCS repo
800 815 :param force_create: Create even if same name hook exists
801 816 """
802 817
803 818 loc = os.path.join(repo.path, 'hooks')
804 819 if not repo.bare:
805 820 loc = os.path.join(repo.path, '.git', 'hooks')
806 821 if not os.path.isdir(loc):
807 822 os.makedirs(loc, mode=0777)
808 823
809 824 tmpl_post = pkg_resources.resource_string(
810 825 'rhodecode', '/'.join(
811 826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
812 827 tmpl_pre = pkg_resources.resource_string(
813 828 'rhodecode', '/'.join(
814 829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
815 830
816 831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
817 832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
818 833 log.debug('Installing git hook in repo %s', repo)
819 834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
820 835
821 836 if _rhodecode_hook or force_create:
822 837 log.debug('writing %s hook file !', h_type)
823 838 try:
824 839 with open(_hook_file, 'wb') as f:
825 840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
826 841 tmpl = tmpl.replace('_ENV_', sys.executable)
827 842 f.write(tmpl)
828 843 os.chmod(_hook_file, 0755)
829 844 except IOError:
830 845 log.exception('error writing hook file %s', _hook_file)
831 846 else:
832 847 log.debug('skipping writing hook file')
833 848
834 849 def install_svn_hooks(self, repo, force_create=False):
835 850 """
836 851 Creates rhodecode hooks inside a svn repository
837 852
838 853 :param repo: Instance of VCS repo
839 854 :param force_create: Create even if same name hook exists
840 855 """
841 856 hooks_path = os.path.join(repo.path, 'hooks')
842 857 if not os.path.isdir(hooks_path):
843 858 os.makedirs(hooks_path)
844 859 post_commit_tmpl = pkg_resources.resource_string(
845 860 'rhodecode', '/'.join(
846 861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
847 862 pre_commit_template = pkg_resources.resource_string(
848 863 'rhodecode', '/'.join(
849 864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
850 865 templates = {
851 866 'post-commit': post_commit_tmpl,
852 867 'pre-commit': pre_commit_template
853 868 }
854 869 for filename in templates:
855 870 _hook_file = os.path.join(hooks_path, filename)
856 871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
857 872 if _rhodecode_hook or force_create:
858 873 log.debug('writing %s hook file !', filename)
859 874 template = templates[filename]
860 875 try:
861 876 with open(_hook_file, 'wb') as f:
862 877 template = template.replace(
863 878 '_TMPL_', rhodecode.__version__)
864 879 template = template.replace('_ENV_', sys.executable)
865 880 f.write(template)
866 881 os.chmod(_hook_file, 0755)
867 882 except IOError:
868 883 log.exception('error writing hook file %s', filename)
869 884 else:
870 885 log.debug('skipping writing hook file')
871 886
872 887 def install_hooks(self, repo, repo_type):
873 888 if repo_type == 'git':
874 889 self.install_git_hook(repo)
875 890 elif repo_type == 'svn':
876 891 self.install_svn_hooks(repo)
877 892
878 893 def get_server_info(self, environ=None):
879 894 server_info = get_system_info(environ)
880 895 return server_info
881 896
882 897
883 898 def _check_rhodecode_hook(hook_path):
884 899 """
885 900 Check if the hook was created by RhodeCode
886 901 """
887 902 if not os.path.exists(hook_path):
888 903 return True
889 904
890 905 log.debug('hook exists, checking if it is from rhodecode')
891 906 hook_content = _read_hook(hook_path)
892 907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
893 908 if matches:
894 909 try:
895 910 version = matches.groups()[0]
896 911 log.debug('got %s, it is rhodecode', version)
897 912 return True
898 913 except Exception:
899 914 log.exception("Exception while reading the hook version.")
900 915
901 916 return False
902 917
903 918
904 919 def _read_hook(hook_path):
905 920 with open(hook_path, 'rb') as f:
906 921 content = f.read()
907 922 return content
General Comments 0
You need to be logged in to leave comments. Login now