##// END OF EJS Templates
git: use safe configparser for git submodules to prevent from errors on submodules with % sign....
git: use safe configparser for git submodules to prevent from errors on submodules with % sign. - This is treated as interpolation sign and can cause errors on regular ConfigParser instance.

File last commit:

r4087:697a75c3 default
r4239:7c8ad7af stable
Show More
repository.py
1017 lines | 37.6 KiB | text/x-python | PythonLexer
project: added all source files and assets
r1 # -*- coding: utf-8 -*-
docs: updated copyrights to 2019
r3363 # Copyright (C) 2014-2019 RhodeCode GmbH
project: added all source files and assets
r1 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
"""
GIT repository module
"""
import logging
import os
import re
from zope.cachedescriptors.property import Lazy as LazyProperty
from rhodecode.lib.compat import OrderedDict
hg,git: change method of extracting last_change to use actualy date object...
r1349 from rhodecode.lib.datelib import (
utcdate_fromtimestamp, makedate, date_astimestamp)
project: added all source files and assets
r1 from rhodecode.lib.utils import safe_unicode, safe_str
git: adjusted code for new libgit2 backend...
r3842 from rhodecode.lib.utils2 import CachedProperty
project: added all source files and assets
r1 from rhodecode.lib.vcs import connection, path as vcspath
from rhodecode.lib.vcs.backends.base import (
BaseRepository, CollectionGenerator, Config, MergeResponse,
Martin Bornhold
vcs: Return merge reference in merge response....
r1051 MergeFailureReason, Reference)
project: added all source files and assets
r1 from rhodecode.lib.vcs.backends.git.commit import GitCommit
from rhodecode.lib.vcs.backends.git.diff import GitDiff
from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
from rhodecode.lib.vcs.exceptions import (
CommitDoesNotExistError, EmptyRepositoryError,
pull-requests: expose unresolved files in merge response.
r4080 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
project: added all source files and assets
r1
SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
log = logging.getLogger(__name__)
class GitRepository(BaseRepository):
"""
Git repository backend.
"""
DEFAULT_BRANCH_NAME = 'master'
contact = BaseRepository.DEFAULT_CONTACT
def __init__(self, repo_path, config=None, create=False, src_url=None,
git: use a fetch_sync based creation of remote repos....
r3078 do_workspace_checkout=False, with_wire=None, bare=False):
project: added all source files and assets
r1
self.path = safe_str(os.path.abspath(repo_path))
mercurial: fix new 4.4.X code change that does strict requirement checks....
r2518 self.config = config if config else self.get_default_config()
caches: updated cache backend to new vcsserver caches implementation.
r3848 self.with_wire = with_wire or {"cache": False} # default should not use cache
project: added all source files and assets
r1
git: use a fetch_sync based creation of remote repos....
r3078 self._init_repo(create, src_url, do_workspace_checkout, bare)
project: added all source files and assets
r1
# caches
self._commit_ids = {}
@LazyProperty
caches: make gevent curl connection cache friendly....
r2946 def _remote(self):
caches: updated cache backend to new vcsserver caches implementation.
r3848 repo_id = self.path
return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
caches: make gevent curl connection cache friendly....
r2946
@LazyProperty
project: added all source files and assets
r1 def bare(self):
return self._remote.bare()
@LazyProperty
def head(self):
return self._remote.head()
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
project: added all source files and assets
r1 def commit_ids(self):
"""
Returns list of commit ids, in ascending order. Being lazy
attribute allows external tools to inject commit ids from cache.
"""
commit_ids = self._get_all_commit_ids()
self._rebuild_cache(commit_ids)
return commit_ids
def _rebuild_cache(self, commit_ids):
self._commit_ids = dict((commit_id, index)
for index, commit_id in enumerate(commit_ids))
def run_git_command(self, cmd, **opts):
"""
Runs given ``cmd`` as git command and returns tuple
(stdout, stderr).
:param cmd: git command to be executed
:param opts: env options to pass into Subprocess command
"""
if not isinstance(cmd, list):
raise ValueError('cmd must be a list, got %s instead' % type(cmd))
git: add option to skip stderr logging.
r2782 skip_stderr_log = opts.pop('skip_stderr_log', False)
project: added all source files and assets
r1 out, err = self._remote.run_git_command(cmd, **opts)
git: add option to skip stderr logging.
r2782 if err and not skip_stderr_log:
git: show stderr only if it happen for calling GIT commands
r1290 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
project: added all source files and assets
r1 return out, err
@staticmethod
def check_url(url, config):
"""
Function will check given url and try to verify if it's a valid
link. Sometimes it may happened that git will issue basic
auth request that can cause whole API to hang when used from python
or other external calls.
On failures it'll raise urllib2.HTTPError, exception is also thrown
when the return code is non 200
"""
# check first if it's not an url
if os.path.isdir(url) or url.startswith('file:'):
return True
if '+' in url.split('://', 1)[0]:
url = url.split('+', 1)[1]
# Request the _remote to verify the url
return connection.Git.check_url(url, config.serialize())
@staticmethod
def is_valid_repository(path):
if os.path.isdir(os.path.join(path, '.git')):
return True
# check case of bare repository
try:
GitRepository(path)
return True
except VCSError:
pass
return False
git: use a fetch_sync based creation of remote repos....
r3078 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
project: added all source files and assets
r1 bare=False):
if create and os.path.exists(self.path):
raise RepositoryError(
"Cannot create repository at %s, location already exist"
% self.path)
git: use a fetch_sync based creation of remote repos....
r3078 if bare and do_workspace_checkout:
raise RepositoryError("Cannot update a bare repository")
project: added all source files and assets
r1 try:
git: use a fetch_sync based creation of remote repos....
r3078
if src_url:
# check URL before any actions
project: added all source files and assets
r1 GitRepository.check_url(src_url, self.config)
git: use a fetch_sync based creation of remote repos....
r3078
if create:
code: fixed deprecated octal calls for py3 compat.
r3268 os.makedirs(self.path, mode=0o755)
project: added all source files and assets
r1
if bare:
self._remote.init_bare()
else:
self._remote.init()
git: use a fetch_sync based creation of remote repos....
r3078
if src_url and bare:
# bare repository only allows a fetch and checkout is not allowed
self.fetch(src_url, commit_ids=None)
elif src_url:
self.pull(src_url, commit_ids=None,
update_after=do_workspace_checkout)
project: added all source files and assets
r1 else:
git: use bool condition to check if the git path is fine. Using exceptions...
r2452 if not self._remote.assert_correct_path():
raise RepositoryError(
'Path "%s" does not contain a Git repository' %
(self.path,))
project: added all source files and assets
r1 # TODO: johbo: check if we have to translate the OSError here
except OSError as err:
raise RepositoryError(err)
git: adjusted code for new libgit2 backend...
r3842 def _get_all_commit_ids(self):
return self._remote.get_all_commit_ids()
def _get_commit_ids(self, filters=None):
project: added all source files and assets
r1 # we must check if this repo is not empty, since later command
# fails if it is. And it's cheaper to ask than throw the subprocess
# errors
git: use smarter way for checking if repo is empty. This doesn't spam logs with some dulwich exceptions, we shouldn't really care about here.`
r2955
head = self._remote.head(show_exc=False)
git: adjusted code for new libgit2 backend...
r3842
git: use smarter way for checking if repo is empty. This doesn't spam logs with some dulwich exceptions, we shouldn't really care about here.`
r2955 if not head:
project: added all source files and assets
r1 return []
rev_filter = ['--branches', '--tags']
extra_filter = []
if filters:
if filters.get('since'):
extra_filter.append('--since=%s' % (filters['since']))
if filters.get('until'):
extra_filter.append('--until=%s' % (filters['until']))
if filters.get('branch_name'):
git: adjusted code for new libgit2 backend...
r3842 rev_filter = []
project: added all source files and assets
r1 extra_filter.append(filters['branch_name'])
rev_filter.extend(extra_filter)
# if filters.get('start') or filters.get('end'):
# # skip is offset, max-count is limit
# if filters.get('start'):
# extra_filter += ' --skip=%s' % filters['start']
# if filters.get('end'):
# extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
try:
output, __ = self.run_git_command(cmd)
except RepositoryError:
# Can be raised for empty repositories
return []
return output.splitlines()
commits: re-implemented fetching a single commit for git case....
r3740 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
project: added all source files and assets
r1 def is_null(value):
return len(value) == commit_id_or_idx.count('0')
if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
return self.commit_ids[-1]
git: adjusted code for new libgit2 backend...
r3842 commit_missing_err = "Commit {} does not exist for `{}`".format(
*map(safe_str, [commit_id_or_idx, self.name]))
project: added all source files and assets
r1
is_bstr = isinstance(commit_id_or_idx, (str, unicode))
if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
try:
commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
except Exception:
git: adjusted code for new libgit2 backend...
r3842 raise CommitDoesNotExistError(commit_missing_err)
project: added all source files and assets
r1
elif is_bstr:
git: adjusted code for new libgit2 backend...
r3842 # Need to call remote to translate id for tagging scenario
try:
remote_data = self._remote.get_object(commit_id_or_idx)
commit_id_or_idx = remote_data["commit_id"]
except (CommitDoesNotExistError,):
raise CommitDoesNotExistError(commit_missing_err)
project: added all source files and assets
r1
# Ensure we return full id
if not SHA_PATTERN.match(str(commit_id_or_idx)):
raise CommitDoesNotExistError(
"Given commit id %s not recognized" % commit_id_or_idx)
return commit_id_or_idx
def get_hook_location(self):
"""
returns absolute path to location where hooks are stored
"""
loc = os.path.join(self.path, 'hooks')
if not self.bare:
loc = os.path.join(self.path, '.git', 'hooks')
return loc
@LazyProperty
def last_change(self):
"""
Returns last change made on this repository as
`datetime.datetime` object.
"""
try:
hg,git: change method of extracting last_change to use actualy date object...
r1349 return self.get_commit().date
project: added all source files and assets
r1 except RepositoryError:
hg,git: change method of extracting last_change to use actualy date object...
r1349 tzoffset = makedate()[1]
return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
def _get_fs_mtime(self):
idx_loc = '' if self.bare else '.git'
# fallback to filesystem
in_path = os.path.join(self.path, idx_loc, "index")
he_path = os.path.join(self.path, idx_loc, "HEAD")
if os.path.exists(in_path):
return os.stat(in_path).st_mtime
else:
return os.stat(he_path).st_mtime
project: added all source files and assets
r1
@LazyProperty
def description(self):
description = self._remote.get_description()
return safe_unicode(description or self.DEFAULT_DESCRIPTION)
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
project: added all source files and assets
r1 if self.is_empty():
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 return OrderedDict()
project: added all source files and assets
r1
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 result = []
for ref, sha in self._refs.iteritems():
if ref.startswith(prefix):
ref_name = ref
if strip_prefix:
ref_name = ref[len(prefix):]
result.append((safe_unicode(ref_name), sha))
project: added all source files and assets
r1
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 def get_name(entry):
return entry[0]
return OrderedDict(sorted(result, key=get_name, reverse=reverse))
project: added all source files and assets
r1
def _get_branches(self):
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
project: added all source files and assets
r1
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
project: added all source files and assets
r1 def branches(self):
return self._get_branches()
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
project: added all source files and assets
r1 def branches_closed(self):
return {}
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
vcs: use proper attributes/inits on base and child classes.
r2617 def bookmarks(self):
return {}
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
project: added all source files and assets
r1 def branches_all(self):
all_branches = {}
all_branches.update(self.branches)
all_branches.update(self.branches_closed)
return all_branches
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
project: added all source files and assets
r1 def tags(self):
return self._get_tags()
def _get_tags(self):
git: adjusted code for new libgit2 backend...
r3842 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
project: added all source files and assets
r1
def tag(self, name, user, commit_id=None, message=None, date=None,
**kwargs):
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 # TODO: fix this method to apply annotated tags correct with message
project: added all source files and assets
r1 """
Creates and returns a tag for the given ``commit_id``.
:param name: name for new tag
:param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
:param commit_id: commit id for which new tag would be created
:param message: message of the tag's commit
:param date: date of tag's commit
:raises TagAlreadyExistError: if tag with same name already exists
"""
if name in self.tags:
raise TagAlreadyExistError("Tag %s already exists" % name)
commit = self.get_commit(commit_id=commit_id)
git: adjusted code for new libgit2 backend...
r3842 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
project: added all source files and assets
r1
git: adjusted code for new libgit2 backend...
r3842 self._invalidate_prop_cache('tags')
self._invalidate_prop_cache('_refs')
project: added all source files and assets
r1 return commit
def remove_tag(self, name, user, message=None, date=None):
"""
Removes tag with the given ``name``.
:param name: name of the tag to be removed
:param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
:param message: message of the tag's removal commit
:param date: date of tag's removal commit
:raises TagDoesNotExistError: if tag with given name does not exists
"""
if name not in self.tags:
raise TagDoesNotExistError("Tag %s does not exist" % name)
git: adjusted code for new libgit2 backend...
r3842
self._remote.tag_remove(name)
self._invalidate_prop_cache('tags')
self._invalidate_prop_cache('_refs')
project: added all source files and assets
r1
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 def _get_refs(self):
return self._remote.get_refs()
git: adjusted code for new libgit2 backend...
r3842 @CachedProperty
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 def _refs(self):
return self._get_refs()
project: added all source files and assets
r1
dan
vcs: change way refs are retrieved for git so same name branch/tags...
r784 @property
def _ref_tree(self):
node = tree = {}
for ref, sha in self._refs.iteritems():
path = ref.split('/')
for bit in path[:-1]:
node = node.setdefault(bit, {})
node[path[-1]] = sha
node = tree
return tree
project: added all source files and assets
r1
git: merge test, fetch other branch if it's different from target. This...
r2471 def get_remote_ref(self, ref_name):
git: fix diff rendering with branches with '/'.
r2472 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
git: merge test, fetch other branch if it's different from target. This...
r2471 try:
git: fix diff rendering with branches with '/'.
r2472 return self._refs[ref_key]
git: merge test, fetch other branch if it's different from target. This...
r2471 except Exception:
return
commits: allow tag commit translation to be skipped for faster commit fetching in big chunks.
r3468 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
project: added all source files and assets
r1 """
Returns `GitCommit` object representing commit from git repository
at the given `commit_id` or head (most recent commit) if None given.
"""
commits: re-implemented fetching a single commit for git case....
r3740 if self.is_empty():
raise EmptyRepositoryError("There are no commits yet")
project: added all source files and assets
r1 if commit_id is not None:
self._validate_commit_id(commit_id)
commits: re-implemented fetching a single commit for git case....
r3740 try:
# we have cached idx, use it without contacting the remote
idx = self._commit_ids[commit_id]
return GitCommit(self, commit_id, idx, pre_load=pre_load)
except KeyError:
pass
project: added all source files and assets
r1 elif commit_idx is not None:
self._validate_commit_idx(commit_idx)
commits: re-implemented fetching a single commit for git case....
r3740 try:
_commit_id = self.commit_ids[commit_idx]
if commit_idx < 0:
commit_idx = self.commit_ids.index(_commit_id)
return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
except IndexError:
commit_id = commit_idx
else:
commit_id = "tip"
if translate_tag:
git: adjusted code for new libgit2 backend...
r3842 commit_id = self._lookup_commit(commit_id)
commits: re-implemented fetching a single commit for git case....
r3740
project: added all source files and assets
r1 try:
idx = self._commit_ids[commit_id]
except KeyError:
git: adjusted code for new libgit2 backend...
r3842 idx = -1
project: added all source files and assets
r1
return GitCommit(self, commit_id, idx, pre_load=pre_load)
def get_commits(
self, start_id=None, end_id=None, start_date=None, end_date=None,
commits: allow tag commit translation to be skipped for faster commit fetching in big chunks.
r3468 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
project: added all source files and assets
r1 """
Returns generator of `GitCommit` objects from start to end (both
are inclusive), in ascending date order.
:param start_id: None, str(commit_id)
:param end_id: None, str(commit_id)
:param start_date: if specified, commits with commit date less than
``start_date`` would be filtered out from returned set
:param end_date: if specified, commits with commit date greater than
``end_date`` would be filtered out from returned set
:param branch_name: if specified, commits not reachable from given
branch would be filtered out from returned set
commits: hide evolve commits. Fixes #5392
r2144 :param show_hidden: Show hidden commits such as obsolete or hidden from
Mercurial evolve
project: added all source files and assets
r1 :raise BranchDoesNotExistError: If given `branch_name` does not
exist.
:raise CommitDoesNotExistError: If commits for given `start` or
`end` could not be found.
"""
if self.is_empty():
raise EmptyRepositoryError("There are no commits yet")
commits: re-implemented fetching a single commit for git case....
r3740
project: added all source files and assets
r1 self._validate_branch_name(branch_name)
if start_id is not None:
self._validate_commit_id(start_id)
if end_id is not None:
self._validate_commit_id(end_id)
commits: re-implemented fetching a single commit for git case....
r3740 start_raw_id = self._lookup_commit(start_id)
project: added all source files and assets
r1 start_pos = self._commit_ids[start_raw_id] if start_id else None
commits: re-implemented fetching a single commit for git case....
r3740 end_raw_id = self._lookup_commit(end_id)
project: added all source files and assets
r1 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
if None not in [start_id, end_id] and start_pos > end_pos:
raise RepositoryError(
"Start commit '%s' cannot be after end commit '%s'" %
(start_id, end_id))
if end_pos is not None:
end_pos += 1
filter_ = []
if branch_name:
filter_.append({'branch_name': branch_name})
if start_date and not end_date:
filter_.append({'since': start_date})
if end_date and not start_date:
filter_.append({'until': end_date})
if start_date and end_date:
filter_.append({'since': start_date})
filter_.append({'until': end_date})
# if start_pos or end_pos:
# filter_.append({'start': start_pos})
# filter_.append({'end': end_pos})
if filter_:
revfilters = {
'branch_name': branch_name,
'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
'start': start_pos,
'end': end_pos,
}
git: adjusted code for new libgit2 backend...
r3842 commit_ids = self._get_commit_ids(filters=revfilters)
project: added all source files and assets
r1
else:
commit_ids = self.commit_ids
if start_pos or end_pos:
commit_ids = commit_ids[start_pos: end_pos]
commits: allow tag commit translation to be skipped for faster commit fetching in big chunks.
r3468 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
translate_tag=translate_tags)
project: added all source files and assets
r1
def get_diff(
self, commit1, commit2, path='', ignore_whitespace=False,
context=3, path1=None):
"""
Returns (git like) *diff*, as plain text. Shows changes introduced by
``commit2`` since ``commit1``.
:param commit1: Entry point from which diff is shown. Can be
``self.EMPTY_COMMIT`` - in this case, patch showing all
the changes since empty state of the repository until ``commit2``
:param commit2: Until which commits changes should be shown.
:param ignore_whitespace: If set to ``True``, would not show whitespace
changes. Defaults to ``False``.
:param context: How many lines before/after changed lines should be
shown. Defaults to ``3``.
"""
self._validate_diff_commits(commit1, commit2)
if path1 is not None and path1 != path:
raise ValueError("Diff of two different paths not supported.")
git: replaced some raw subprocess commands with dedicated GIT vcsserver commands.
r3862 if path:
file_filter = path
project: added all source files and assets
r1 else:
git: replaced some raw subprocess commands with dedicated GIT vcsserver commands.
r3862 file_filter = None
project: added all source files and assets
r1
git: replaced some raw subprocess commands with dedicated GIT vcsserver commands.
r3862 diff = self._remote.diff(
commit1.raw_id, commit2.raw_id, file_filter=file_filter,
opt_ignorews=ignore_whitespace,
context=context)
return GitDiff(diff)
project: added all source files and assets
r1
def strip(self, commit_id, branch_name):
commit = self.get_commit(commit_id=commit_id)
if commit.merge:
raise Exception('Cannot reset to merge commit')
# parent is going to be the new head now
commit = commit.parents[0]
self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
git: adjusted code for new libgit2 backend...
r3842 # clear cached properties
self._invalidate_prop_cache('commit_ids')
self._invalidate_prop_cache('_refs')
self._invalidate_prop_cache('branches')
commits: updated logic of in-memory-commits, fixed tests and re-architectured a bit how commit_ids are calculated and updated....
r3743 return len(self.commit_ids)
project: added all source files and assets
r1
def get_common_ancestor(self, commit_id1, commit_id2, repo2):
if commit_id1 == commit_id2:
return commit_id1
if self != repo2:
commits = self._remote.get_missing_revs(
commit_id1, commit_id2, repo2.path)
if commits:
commit = repo2.get_commit(commits[-1])
if commit.parents:
ancestor_id = commit.parents[0].raw_id
else:
ancestor_id = None
else:
# no commits from other repo, ancestor_id is the commit_id2
ancestor_id = commit_id2
else:
output, __ = self.run_git_command(
['merge-base', commit_id1, commit_id2])
ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
return ancestor_id
def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
repo1 = self
ancestor_id = None
if commit_id1 == commit_id2:
commits = []
elif repo1 != repo2:
missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
repo2.path)
commits = [
repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
for commit_id in reversed(missing_ids)]
else:
output, __ = repo1.run_git_command(
['log', '--reverse', '--pretty=format: %H', '-s',
'%s..%s' % (commit_id1, commit_id2)])
commits = [
repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
return commits
@LazyProperty
def in_memory_commit(self):
"""
Returns ``GitInMemoryCommit`` object for this repository.
"""
return GitInMemoryCommit(self)
git: use a fetch_sync based creation of remote repos....
r3078 def pull(self, url, commit_ids=None, update_after=False):
project: added all source files and assets
r1 """
git: use a fetch_sync based creation of remote repos....
r3078 Pull changes from external location. Pull is different in GIT
that fetch since it's doing a checkout
project: added all source files and assets
r1
git: use a fetch_sync based creation of remote repos....
r3078 :param commit_ids: Optional. Can be set to a list of commit ids
which shall be pulled from the other repository.
project: added all source files and assets
r1 """
git: use a fetch_sync based creation of remote repos....
r3078 refs = None
if commit_ids is not None:
remote_refs = self._remote.get_remote_refs(url)
refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
self._remote.pull(url, refs=refs, update_after=update_after)
self._remote.invalidate_vcs_cache()
project: added all source files and assets
r1
def fetch(self, url, commit_ids=None):
"""
git: use a fetch_sync based creation of remote repos....
r3078 Fetch all git objects from external location.
project: added all source files and assets
r1 """
git: use a fetch_sync based creation of remote repos....
r3078 self._remote.sync_fetch(url, refs=commit_ids)
self._remote.invalidate_vcs_cache()
project: added all source files and assets
r1
scm: added push options for Mercurial and Git to allow remote repository sync.
r2492 def push(self, url):
refs = None
self._remote.sync_push(url, refs=refs)
project: added all source files and assets
r1 def set_refs(self, ref_name, commit_id):
self._remote.set_refs(ref_name, commit_id)
git: adjusted code for new libgit2 backend...
r3842 self._invalidate_prop_cache('_refs')
project: added all source files and assets
r1
def remove_ref(self, ref_name):
self._remote.remove_ref(ref_name)
git: adjusted code for new libgit2 backend...
r3842 self._invalidate_prop_cache('_refs')
project: added all source files and assets
r1
def _update_server_info(self):
"""
runs gits update-server-info command in this repo instance
"""
self._remote.update_server_info()
def _current_branch(self):
"""
Return the name of the current branch.
It only works for non bare repositories (i.e. repositories with a
working copy)
"""
if self.bare:
raise RepositoryError('Bare git repos do not have active branches')
if self.is_empty():
return None
stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
return stdout.strip()
git: allow force checkouts for git repository
r2501 def _checkout(self, branch_name, create=False, force=False):
project: added all source files and assets
r1 """
Checkout a branch in the working directory.
It tries to create the branch if create is True, failing if the branch
already exists.
It only works for non bare repositories (i.e. repositories with a
working copy)
"""
if self.bare:
raise RepositoryError('Cannot checkout branches in a bare git repo')
cmd = ['checkout']
git: allow force checkouts for git repository
r2501 if force:
cmd.append('-f')
project: added all source files and assets
r1 if create:
cmd.append('-b')
cmd.append(branch_name)
self.run_git_command(cmd, fail_on_stderr=False)
git: adjusted code for new libgit2 backend...
r3842 def _create_branch(self, branch_name, commit_id):
"""
creates a branch in a GIT repo
"""
self._remote.create_branch(branch_name, commit_id)
git: added identify command for checking the current checkout commit
r2057 def _identify(self):
"""
Return the current state of the working directory.
"""
if self.bare:
raise RepositoryError('Bare git repos do not have active branches')
if self.is_empty():
return None
stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
return stdout.strip()
git: merge test, fetch other branch if it's different from target. This...
r2471 def _local_clone(self, clone_path, branch_name, source_branch=None):
project: added all source files and assets
r1 """
Create a local clone of the current repo.
"""
# N.B.(skreft): the --branch option is required as otherwise the shallow
# clone will only fetch the active branch.
git: merge test, fetch other branch if it's different from target. This...
r2471 cmd = ['clone', '--branch', branch_name,
project: added all source files and assets
r1 self.path, os.path.abspath(clone_path)]
git: merge test, fetch other branch if it's different from target. This...
r2471
project: added all source files and assets
r1 self.run_git_command(cmd, fail_on_stderr=False)
git: merge test, fetch other branch if it's different from target. This...
r2471 # if we get the different source branch, make sure we also fetch it for
# merge conditions
if source_branch and source_branch != branch_name:
# check if the ref exists.
shadow_repo = GitRepository(os.path.abspath(clone_path))
if shadow_repo.get_remote_ref(source_branch):
cmd = ['fetch', self.path, source_branch]
self.run_git_command(cmd, fail_on_stderr=False)
git: use force fetch and update for target ref. This solves a case...
r2784 def _local_fetch(self, repository_path, branch_name, use_origin=False):
project: added all source files and assets
r1 """
Fetch a branch from a local repository.
"""
repository_path = os.path.abspath(repository_path)
if repository_path == self.path:
raise ValueError('Cannot fetch from the same repository')
git: use force fetch and update for target ref. This solves a case...
r2784 if use_origin:
branch_name = '+{branch}:refs/heads/{branch}'.format(
branch=branch_name)
cmd = ['fetch', '--no-tags', '--update-head-ok',
repository_path, branch_name]
self.run_git_command(cmd, fail_on_stderr=False)
def _local_reset(self, branch_name):
branch_name = '{}'.format(branch_name)
dan
git: reset should use explicit -- to distinguish we want reset of branch name.
r3127 cmd = ['reset', '--hard', branch_name, '--']
project: added all source files and assets
r1 self.run_git_command(cmd, fail_on_stderr=False)
def _last_fetch_heads(self):
"""
Return the last fetched heads that need merging.
The algorithm is defined at
https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
"""
if not self.bare:
fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
else:
fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
heads = []
with open(fetch_heads_path) as f:
for line in f:
if ' not-for-merge ' in line:
continue
line = re.sub('\t.*', '', line, flags=re.DOTALL)
heads.append(line)
return heads
caches: updated cache backend to new vcsserver caches implementation.
r3848 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
return GitRepository(shadow_repository_path, with_wire={"cache": cache})
vcs-core: add a shadow-repo extractor for pull-requests, and git objects.
r1363
pull-request, git: add option to skip --ff-only
r2543 def _local_pull(self, repository_path, branch_name, ff_only=True):
project: added all source files and assets
r1 """
Pull a branch from a local repository.
"""
if self.bare:
raise RepositoryError('Cannot pull into a bare git repository')
# N.B.(skreft): The --ff-only option is to make sure this is a
# fast-forward (i.e., we are only pulling new changes and there are no
# conflicts with our current branch)
# Additionally, that option needs to go before --no-tags, otherwise git
# pull complains about it being an unknown flag.
pull-request, git: add option to skip --ff-only
r2543 cmd = ['pull']
if ff_only:
cmd.append('--ff-only')
cmd.extend(['--no-tags', repository_path, branch_name])
project: added all source files and assets
r1 self.run_git_command(cmd, fail_on_stderr=False)
def _local_merge(self, merge_message, user_name, user_email, heads):
"""
Merge the given head into the checked out branch.
It will force a merge commit.
Currently it raises an error if the repo is empty, as it is not possible
to create a merge commit in an empty repo.
:param merge_message: The message to use for the merge commit.
:param heads: the heads to merge.
"""
if self.bare:
raise RepositoryError('Cannot merge into a bare git repository')
if not heads:
return
if self.is_empty():
pull-requests: expose unresolved files in merge response.
r4080 # TODO(skreft): do something more robust in this case.
project: added all source files and assets
r1 raise RepositoryError(
'Do not know how to merge into empty repositories yet')
pull-requests: expose unresolved files in merge response.
r4080 unresolved = None
project: added all source files and assets
r1
# N.B.(skreft): the --no-ff option is used to enforce the creation of a
# commit message. We also specify the user who is doing the merge.
git: quote fake merge user to prevent this command to potentially fail
r2778 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
project: added all source files and assets
r1 '-c', 'user.email=%s' % safe_str(user_email),
'merge', '--no-ff', '-m', safe_str(merge_message)]
cmd.extend(heads)
try:
git: use force fetch and update for target ref. This solves a case...
r2784 output = self.run_git_command(cmd, fail_on_stderr=False)
project: added all source files and assets
r1 except RepositoryError:
pull-requests: expose unresolved files in merge response.
r4080 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
fail_on_stderr=False)[0].splitlines()
# NOTE(marcink): we add U notation for consistent with HG backend output
unresolved = ['U {}'.format(f) for f in files]
project: added all source files and assets
r1 # Cleanup any merge leftovers
self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
pull-requests: expose unresolved files in merge response.
r4080
if unresolved:
raise UnresolvedFilesInRepo(unresolved)
else:
raise
project: added all source files and assets
r1
def _local_push(
self, source_branch, repository_path, target_branch,
enable_hooks=False, rc_scm_data=None):
"""
Push the source_branch to the given repository and target_branch.
Currently it if the target_branch is not master and the target repo is
empty, the push will work, but then GitRepository won't be able to find
the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
pointing to master, which does not exist).
It does not run the hooks in the target repo.
"""
# TODO(skreft): deal with the case in which the target repo is empty,
# and the target_branch is not master.
target_repo = GitRepository(repository_path)
if (not target_repo.bare and
target_repo._current_branch() == target_branch):
# Git prevents pushing to the checked out branch, so simulate it by
# pulling into the target repository.
target_repo._local_pull(self.path, source_branch)
else:
cmd = ['push', os.path.abspath(repository_path),
'%s:%s' % (source_branch, target_branch)]
gitenv = {}
if rc_scm_data:
gitenv.update({'RC_SCM_DATA': rc_scm_data})
if not enable_hooks:
gitenv['RC_SKIP_HOOKS'] = '1'
self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
def _get_new_pr_branch(self, source_branch, target_branch):
prefix = 'pr_%s-%s_' % (source_branch, target_branch)
pr_branches = []
for branch in self.branches:
if branch.startswith(prefix):
pr_branches.append(int(branch[len(prefix):]))
if not pr_branches:
branch_id = 0
else:
branch_id = max(pr_branches) + 1
return '%s%d' % (prefix, branch_id)
shadow-repos: use numeric repo id for creation of shadow repos....
r2810 def _maybe_prepare_merge_workspace(
self, repo_id, workspace_id, target_ref, source_ref):
shadow_repository_path = self._get_shadow_repository_path(
shadow-repos: skip init of full repo to generate shadow repo path.
r3931 self.path, repo_id, workspace_id)
shadow-repos: use numeric repo id for creation of shadow repos....
r2810 if not os.path.exists(shadow_repository_path):
self._local_clone(
shadow_repository_path, target_ref.name, source_ref.name)
caches: updated cache backend to new vcsserver caches implementation.
r3848 log.debug('Prepared %s shadow repository in %s',
self.alias, shadow_repository_path)
shadow-repos: use numeric repo id for creation of shadow repos....
r2810
return shadow_repository_path
def _merge_repo(self, repo_id, workspace_id, target_ref,
project: added all source files and assets
r1 source_repo, source_ref, merge_message,
Martin Bornhold
settings: Add argument to select rebase as merge strategy.
r360 merger_name, merger_email, dry_run=False,
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 use_rebase=False, close_branch=False):
dan
pull-requests: ensure merge response provide more details...
r3339
log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
'rebase' if use_rebase else 'merge', dry_run)
project: added all source files and assets
r1 if target_ref.commit_id != self.branches[target_ref.name]:
git: use force fetch and update for target ref. This solves a case...
r2784 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
target_ref.commit_id, self.branches[target_ref.name])
project: added all source files and assets
r1 return MergeResponse(
dan
pull-requests: ensure merge response provide more details...
r3339 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
metadata={'target_ref': target_ref})
project: added all source files and assets
r1
shadow-repos: use numeric repo id for creation of shadow repos....
r2810 shadow_repository_path = self._maybe_prepare_merge_workspace(
repo_id, workspace_id, target_ref, source_ref)
caches: updated cache backend to new vcsserver caches implementation.
r3848 shadow_repo = self.get_shadow_instance(shadow_repository_path)
shadow-repos: use numeric repo id for creation of shadow repos....
r2810
git: merge test, fetch other branch if it's different from target. This...
r2471 # checkout source, if it's different. Otherwise we could not
# fetch proper commits for merge testing
if source_ref.name != target_ref.name:
if shadow_repo.get_remote_ref(source_ref.name):
git: use force branch checkout to prevent cases when .gitattributes modifies...
r2502 shadow_repo._checkout(source_ref.name, force=True)
git: merge test, fetch other branch if it's different from target. This...
r2471
git: use force fetch and update for target ref. This solves a case...
r2784 # checkout target, and fetch changes
git: use force branch checkout to prevent cases when .gitattributes modifies...
r2502 shadow_repo._checkout(target_ref.name, force=True)
git: use force fetch and update for target ref. This solves a case...
r2784
# fetch/reset pull the target, in case it is changed
# this handles even force changes
shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
shadow_repo._local_reset(target_ref.name)
git: merge test, fetch other branch if it's different from target. This...
r2471
project: added all source files and assets
r1 # Need to reload repo to invalidate the cache, or otherwise we cannot
# retrieve the last target commit.
caches: updated cache backend to new vcsserver caches implementation.
r3848 shadow_repo = self.get_shadow_instance(shadow_repository_path)
project: added all source files and assets
r1 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
git: use force fetch and update for target ref. This solves a case...
r2784 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
target_ref, target_ref.commit_id,
shadow_repo.branches[target_ref.name])
project: added all source files and assets
r1 return MergeResponse(
dan
pull-requests: ensure merge response provide more details...
r3339 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
metadata={'target_ref': target_ref})
project: added all source files and assets
r1
git: use force fetch and update for target ref. This solves a case...
r2784 # calculate new branch
project: added all source files and assets
r1 pr_branch = shadow_repo._get_new_pr_branch(
source_ref.name, target_ref.name)
pull-request, git: add option to skip --ff-only
r2543 log.debug('using pull-request merge branch: `%s`', pr_branch)
git: use force fetch and update for target ref. This solves a case...
r2784 # checkout to temp branch, and fetch changes
project: added all source files and assets
r1 shadow_repo._checkout(pr_branch, create=True)
try:
shadow_repo._local_fetch(source_repo.path, source_ref.name)
Martin Bornhold
pep8: Remove unused imports and variables.
r1050 except RepositoryError:
dan
pull-requests: ensure merge response provide more details...
r3339 log.exception('Failure when doing local fetch on '
'shadow repo: %s', shadow_repo)
project: added all source files and assets
r1 return MergeResponse(
dan
pull-requests: ensure merge response provide more details...
r3339 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
metadata={'source_ref': source_ref})
project: added all source files and assets
r1
Martin Bornhold
vcs: Return merge reference in merge response....
r1051 merge_ref = None
project: added all source files and assets
r1 merge_failure_reason = MergeFailureReason.NONE
dan
pull-requests: ensure merge response provide more details...
r3339 metadata = {}
project: added all source files and assets
r1 try:
shadow_repo._local_merge(merge_message, merger_name, merger_email,
[source_ref.commit_id])
merge_possible = True
Martin Bornhold
pr-shadow: Set reference in git shadow repositories on successful merge. #1055
r1039
caches: updated cache backend to new vcsserver caches implementation.
r3848 # Need to invalidate the cache, or otherwise we
Martin Bornhold
pr-shadow: Set reference in git shadow repositories on successful merge. #1055
r1039 # cannot retrieve the merge commit.
caches: updated cache backend to new vcsserver caches implementation.
r3848 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
Martin Bornhold
pr-shadow: Set reference in git shadow repositories on successful merge. #1055
r1039 merge_commit_id = shadow_repo.branches[pr_branch]
# Set a reference pointing to the merge commit. This reference may
# be used to easily identify the last successful merge commit in
# the shadow repository.
shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
Martin Bornhold
vcs: Return merge reference in merge response....
r1051 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
pull-requests: expose unresolved files in merge response.
r4080 except RepositoryError as e:
dan
logging: log the original error when a merge failure occurs
r63 log.exception('Failure when doing local merge on git shadow repo')
pull-requests: expose unresolved files in merge response.
r4080 if isinstance(e, UnresolvedFilesInRepo):
pull-requests: added nicer formatting for merge conflicting files
r4087 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
pull-requests: expose unresolved files in merge response.
r4080
project: added all source files and assets
r1 merge_possible = False
merge_failure_reason = MergeFailureReason.MERGE_FAILED
if merge_possible and not dry_run:
try:
shadow_repo._local_push(
pr_branch, self.path, target_ref.name, enable_hooks=True,
rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
merge_succeeded = True
Martin Bornhold
pep8: Remove unused imports and variables.
r1050 except RepositoryError:
dan
logging: log the original error when a merge failure occurs
r63 log.exception(
dan
pull-requests: ensure merge response provide more details...
r3339 'Failure when doing local push from the shadow '
'repository to the target repository at %s.', self.path)
project: added all source files and assets
r1 merge_succeeded = False
merge_failure_reason = MergeFailureReason.PUSH_FAILED
dan
pull-requests: ensure merge response provide more details...
r3339 metadata['target'] = 'git shadow repo'
metadata['merge_commit'] = pr_branch
project: added all source files and assets
r1 else:
merge_succeeded = False
return MergeResponse(
dan
pull-requests: ensure merge response provide more details...
r3339 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
metadata=metadata)