git.py
1192 lines
| 43.2 KiB
| text/x-python
|
PythonLexer
/ vcsserver / git.py
r0 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
r850 | # Copyright (C) 2014-2020 RhodeCode GmbH | |||
r0 | # | |||
# This program is free software; you can redistribute it and/or modify | ||||
# it under the terms of the GNU General Public License as published by | ||||
# the Free Software Foundation; either version 3 of the License, or | ||||
# (at your option) any later version. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU General Public License | ||||
# along with this program; if not, write to the Free Software Foundation, | ||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||||
r739 | ||||
r549 | import collections | |||
r0 | import logging | |||
import os | ||||
import posixpath as vcspath | ||||
import re | ||||
import stat | ||||
r346 | import traceback | |||
r0 | import urllib | |||
import urllib2 | ||||
from functools import wraps | ||||
r622 | import more_itertools | |||
r725 | import pygit2 | |||
from pygit2 import Repository as LibGit2Repo | ||||
r0 | from dulwich import index, objects | |||
from dulwich.client import HttpGitClient, LocalGitClient | ||||
from dulwich.errors import ( | ||||
NotGitRepository, ChecksumMismatch, WrongObjectException, | ||||
MissingCommitError, ObjectMissing, HangupException, | ||||
UnexpectedCommandError) | ||||
r725 | from dulwich.repo import Repo as DulwichRepo | |||
r0 | from dulwich.server import update_server_info | |||
from vcsserver import exceptions, settings, subprocessio | ||||
r825 | from vcsserver.utils import safe_str, safe_int, safe_unicode | |||
r739 | from vcsserver.base import RepoFactory, obfuscate_qs | |||
r0 | from vcsserver.hgcompat import ( | |||
r105 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) | |||
r182 | from vcsserver.git_lfs.lib import LFSOidStore | |||
r749 | from vcsserver.vcs_base import RemoteBase | |||
r0 | ||||
DIR_STAT = stat.S_IFDIR | ||||
FILE_MODE = stat.S_IFMT | ||||
GIT_LINK = objects.S_IFGITLINK | ||||
r739 | PEELED_REF_MARKER = '^{}' | |||
r0 | ||||
log = logging.getLogger(__name__) | ||||
r739 | def str_to_dulwich(value): | |||
""" | ||||
Dulwich 0.10.1a requires `unicode` objects to be passed in. | ||||
""" | ||||
return value.decode(settings.WIRE_ENCODING) | ||||
r0 | def reraise_safe_exceptions(func): | |||
"""Converts Dulwich exceptions to something neutral.""" | ||||
r725 | ||||
r0 | @wraps(func) | |||
def wrapper(*args, **kwargs): | ||||
try: | ||||
return func(*args, **kwargs) | ||||
r725 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e: | |||
exc = exceptions.LookupException(org_exc=e) | ||||
raise exc(safe_str(e)) | ||||
r0 | except (HangupException, UnexpectedCommandError) as e: | |||
r725 | exc = exceptions.VcsException(org_exc=e) | |||
raise exc(safe_str(e)) | ||||
r171 | except Exception as e: | |||
r172 | # NOTE(marcink): becuase of how dulwich handles some exceptions | |||
# (KeyError on empty repos), we cannot track this and catch all | ||||
# exceptions, it's an exceptions from other handlers | ||||
#if not hasattr(e, '_vcs_kind'): | ||||
#log.exception("Unhandled exception in git remote call") | ||||
#raise_from_original(exceptions.UnhandledException) | ||||
r171 | raise | |||
r0 | return wrapper | |||
class Repo(DulwichRepo): | ||||
""" | ||||
A wrapper for dulwich Repo class. | ||||
Since dulwich is sometimes keeping .idx file descriptors open, it leads to | ||||
"Too many open files" error. We need to close all opened file descriptors | ||||
once the repo object is destroyed. | ||||
""" | ||||
def __del__(self): | ||||
if hasattr(self, 'object_store'): | ||||
self.close() | ||||
r725 | class Repository(LibGit2Repo): | |||
def __enter__(self): | ||||
return self | ||||
def __exit__(self, exc_type, exc_val, exc_tb): | ||||
self.free() | ||||
r0 | class GitFactory(RepoFactory): | |||
r483 | repo_type = 'git' | |||
r0 | ||||
r725 | def _create_repo(self, wire, create, use_libgit2=False): | |||
if use_libgit2: | ||||
return Repository(wire['path']) | ||||
else: | ||||
repo_path = str_to_dulwich(wire['path']) | ||||
return Repo(repo_path) | ||||
def repo(self, wire, create=False, use_libgit2=False): | ||||
""" | ||||
Get a repository instance for the given path. | ||||
""" | ||||
r739 | return self._create_repo(wire, create, use_libgit2) | |||
r725 | ||||
def repo_libgit2(self, wire): | ||||
return self.repo(wire, use_libgit2=True) | ||||
r0 | ||||
r749 | class GitRemote(RemoteBase): | |||
r0 | ||||
def __init__(self, factory): | ||||
self._factory = factory | ||||
self._bulk_methods = { | ||||
r725 | "date": self.date, | |||
"author": self.author, | ||||
r739 | "branch": self.branch, | |||
r725 | "message": self.message, | |||
"parents": self.parents, | ||||
r0 | "_commit": self.revision, | |||
} | ||||
r182 | def _wire_to_config(self, wire): | |||
if 'config' in wire: | ||||
return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) | ||||
return {} | ||||
r607 | def _remote_conf(self, config): | |||
params = [ | ||||
'-c', 'core.askpass=""', | ||||
] | ||||
ssl_cert_dir = config.get('vcs_ssl_dir') | ||||
if ssl_cert_dir: | ||||
params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)]) | ||||
return params | ||||
r739 | @reraise_safe_exceptions | |||
def discover_git_version(self): | ||||
stdout, _ = self.run_git_command( | ||||
{}, ['--version'], _bare=True, _safe=True) | ||||
prefix = 'git version' | ||||
if stdout.startswith(prefix): | ||||
stdout = stdout[len(prefix):] | ||||
return stdout.strip() | ||||
r0 | @reraise_safe_exceptions | |||
r698 | def is_empty(self, wire): | |||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
r730 | ||||
r731 | try: | |||
has_head = repo.head.name | ||||
if has_head: | ||||
return False | ||||
r730 | ||||
r731 | # NOTE(marcink): check again using more expensive method | |||
return repo.is_empty | ||||
except Exception: | ||||
pass | ||||
return True | ||||
r698 | ||||
@reraise_safe_exceptions | ||||
r0 | def assert_correct_path(self, wire): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _assert_correct_path(_context_uid, _repo_id): | ||||
try: | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
pass | ||||
except pygit2.GitError: | ||||
path = wire.get('path') | ||||
tb = traceback.format_exc() | ||||
log.debug("Invalid Git path `%s`, tb: %s", path, tb) | ||||
return False | ||||
r346 | ||||
r739 | return True | |||
return _assert_correct_path(context_uid, repo_id) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def bare(self, wire): | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
return repo.is_bare | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def blob_as_pretty_string(self, wire, sha): | ||||
r725 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
blob_obj = repo[sha] | ||||
blob = blob_obj.data | ||||
return blob | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def blob_raw_length(self, wire, sha): | ||||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
r746 | def _blob_raw_length(_repo_id, _sha): | |||
r739 | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
blob = repo[sha] | ||||
return blob.size | ||||
r746 | return _blob_raw_length(repo_id, sha) | |||
r0 | ||||
r182 | def _parse_lfs_pointer(self, raw_content): | |||
spec_string = 'version https://git-lfs.github.com/spec' | ||||
if raw_content and raw_content.startswith(spec_string): | ||||
pattern = re.compile(r""" | ||||
(?:\n)? | ||||
^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n | ||||
^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n | ||||
^size[ ](?P<oid_size>[0-9]+)\n | ||||
(?:\n)? | ||||
""", re.VERBOSE | re.MULTILINE) | ||||
match = pattern.match(raw_content) | ||||
if match: | ||||
return match.groupdict() | ||||
return {} | ||||
@reraise_safe_exceptions | ||||
r746 | def is_large_file(self, wire, commit_id): | |||
r769 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
r726 | ||||
r739 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |||
r746 | def _is_large_file(_repo_id, _sha): | |||
r739 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
r746 | blob = repo[commit_id] | |||
r739 | if blob.is_binary: | |||
return {} | ||||
return self._parse_lfs_pointer(blob.data) | ||||
r746 | return _is_large_file(repo_id, commit_id) | |||
r182 | ||||
@reraise_safe_exceptions | ||||
r769 | def is_binary(self, wire, tree_id): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _is_binary(_repo_id, _tree_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
blob_obj = repo[tree_id] | ||||
return blob_obj.is_binary | ||||
return _is_binary(repo_id, tree_id) | ||||
@reraise_safe_exceptions | ||||
r182 | def in_largefiles_store(self, wire, oid): | |||
conf = self._wire_to_config(wire) | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
repo_name = repo.path | ||||
r182 | ||||
store_location = conf.get('vcs_git_lfs_store_location') | ||||
if store_location: | ||||
r731 | ||||
r182 | store = LFSOidStore( | |||
oid=oid, repo=repo_name, store_location=store_location) | ||||
return store.has_oid() | ||||
return False | ||||
@reraise_safe_exceptions | ||||
def store_path(self, wire, oid): | ||||
conf = self._wire_to_config(wire) | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
repo_name = repo.path | ||||
r182 | ||||
store_location = conf.get('vcs_git_lfs_store_location') | ||||
if store_location: | ||||
store = LFSOidStore( | ||||
oid=oid, repo=repo_name, store_location=store_location) | ||||
return store.oid_path | ||||
raise ValueError('Unable to fetch oid with path {}'.format(oid)) | ||||
r0 | @reraise_safe_exceptions | |||
def bulk_request(self, wire, rev, pre_load): | ||||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
r746 | def _bulk_request(_repo_id, _rev, _pre_load): | |||
r739 | result = {} | |||
for attr in pre_load: | ||||
try: | ||||
method = self._bulk_methods[attr] | ||||
args = [wire, rev] | ||||
result[attr] = method(*args) | ||||
except KeyError as e: | ||||
raise exceptions.VcsException(e)( | ||||
"Unknown bulk attribute: %s" % attr) | ||||
return result | ||||
r746 | return _bulk_request(repo_id, rev, sorted(pre_load)) | |||
r0 | ||||
def _build_opener(self, url): | ||||
handlers = [] | ||||
r105 | url_obj = url_parser(url) | |||
r0 | _, authinfo = url_obj.authinfo() | |||
if authinfo: | ||||
# create a password manager | ||||
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() | ||||
passmgr.add_password(*authinfo) | ||||
handlers.extend((httpbasicauthhandler(passmgr), | ||||
httpdigestauthhandler(passmgr))) | ||||
return urllib2.build_opener(*handlers) | ||||
r725 | def _type_id_to_name(self, type_id): | |||
return { | ||||
1: b'commit', | ||||
2: b'tree', | ||||
3: b'blob', | ||||
4: b'tag' | ||||
}[type_id] | ||||
r0 | @reraise_safe_exceptions | |||
def check_url(self, url, config): | ||||
r105 | url_obj = url_parser(url) | |||
r0 | test_uri, _ = url_obj.authinfo() | |||
r114 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd | |||
r106 | url_obj.query = obfuscate_qs(url_obj.query) | |||
r0 | cleaned_uri = str(url_obj) | |||
r105 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) | |||
r0 | ||||
if not test_uri.endswith('info/refs'): | ||||
test_uri = test_uri.rstrip('/') + '/info/refs' | ||||
o = self._build_opener(url) | ||||
o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | ||||
q = {"service": 'git-upload-pack'} | ||||
qs = '?%s' % urllib.urlencode(q) | ||||
cu = "%s%s" % (test_uri, qs) | ||||
req = urllib2.Request(cu, None, {}) | ||||
try: | ||||
r105 | log.debug("Trying to open URL %s", cleaned_uri) | |||
r0 | resp = o.open(req) | |||
if resp.code != 200: | ||||
r490 | raise exceptions.URLError()('Return Code is not 200') | |||
r0 | except Exception as e: | |||
r105 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) | |||
r0 | # means it cannot be cloned | |||
r490 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) | |||
r0 | ||||
# now detect if it's proper git repo | ||||
gitdata = resp.read() | ||||
if 'service=git-upload-pack' in gitdata: | ||||
pass | ||||
elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata): | ||||
# old style git can return some other format ! | ||||
pass | ||||
else: | ||||
r490 | raise exceptions.URLError()( | |||
r0 | "url [%s] does not look like an git" % (cleaned_uri,)) | |||
return True | ||||
@reraise_safe_exceptions | ||||
def clone(self, wire, url, deferred, valid_refs, update_after_clone): | ||||
r550 | # TODO(marcink): deprecate this method. Last i checked we don't use it anymore | |||
remote_refs = self.pull(wire, url, apply_refs=False) | ||||
r0 | repo = self._factory.repo(wire) | |||
if isinstance(valid_refs, list): | ||||
valid_refs = tuple(valid_refs) | ||||
for k in remote_refs: | ||||
# only parse heads/tags and skip so called deferred tags | ||||
if k.startswith(valid_refs) and not k.endswith(deferred): | ||||
repo[k] = remote_refs[k] | ||||
if update_after_clone: | ||||
# we want to checkout HEAD | ||||
repo["HEAD"] = remote_refs["HEAD"] | ||||
index.build_index_from_tree(repo.path, repo.index_path(), | ||||
repo.object_store, repo["HEAD"].tree) | ||||
r739 | @reraise_safe_exceptions | |||
def branch(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _branch(_context_uid, _repo_id, _commit_id): | ||||
regex = re.compile('^refs/heads') | ||||
def filter_with(ref): | ||||
return regex.match(ref[0]) and ref[1] == _commit_id | ||||
branches = filter(filter_with, self.get_refs(wire).items()) | ||||
return [x[0].split('refs/heads/')[-1] for x in branches] | ||||
return _branch(context_uid, repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def commit_branches(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _commit_branches(_context_uid, _repo_id, _commit_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
branches = [x for x in repo.branches.with_commit(_commit_id)] | ||||
return branches | ||||
return _commit_branches(context_uid, repo_id, commit_id) | ||||
r746 | @reraise_safe_exceptions | |||
def add_object(self, wire, content): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
blob = objects.Blob() | ||||
blob.set_raw_string(content) | ||||
repo.object_store.add_object(blob) | ||||
return blob.id | ||||
r0 | # TODO: this is quite complex, check if that can be simplified | |||
@reraise_safe_exceptions | ||||
def commit(self, wire, commit_data, branch, commit_tree, updated, removed): | ||||
repo = self._factory.repo(wire) | ||||
object_store = repo.object_store | ||||
# Create tree and populates it with blobs | ||||
commit_tree = commit_tree and repo[commit_tree] or objects.Tree() | ||||
for node in updated: | ||||
# Compute subdirs if needed | ||||
dirpath, nodename = vcspath.split(node['path']) | ||||
dirnames = map(safe_str, dirpath and dirpath.split('/') or []) | ||||
parent = commit_tree | ||||
ancestors = [('', parent)] | ||||
# Tries to dig for the deepest existing tree | ||||
while dirnames: | ||||
curdir = dirnames.pop(0) | ||||
try: | ||||
dir_id = parent[curdir][1] | ||||
except KeyError: | ||||
# put curdir back into dirnames and stops | ||||
dirnames.insert(0, curdir) | ||||
break | ||||
else: | ||||
# If found, updates parent | ||||
parent = repo[dir_id] | ||||
ancestors.append((curdir, parent)) | ||||
# Now parent is deepest existing tree and we need to create | ||||
# subtrees for dirnames (in reverse order) | ||||
# [this only applies for nodes from added] | ||||
new_trees = [] | ||||
blob = objects.Blob.from_string(node['content']) | ||||
if dirnames: | ||||
# If there are trees which should be created we need to build | ||||
# them now (in reverse order) | ||||
reversed_dirnames = list(reversed(dirnames)) | ||||
curtree = objects.Tree() | ||||
curtree[node['node_path']] = node['mode'], blob.id | ||||
new_trees.append(curtree) | ||||
for dirname in reversed_dirnames[:-1]: | ||||
newtree = objects.Tree() | ||||
newtree[dirname] = (DIR_STAT, curtree.id) | ||||
new_trees.append(newtree) | ||||
curtree = newtree | ||||
parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id) | ||||
else: | ||||
r725 | parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id) | |||
r0 | ||||
new_trees.append(parent) | ||||
# Update ancestors | ||||
reversed_ancestors = reversed( | ||||
[(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])]) | ||||
for parent, tree, path in reversed_ancestors: | ||||
parent[path] = (DIR_STAT, tree.id) | ||||
object_store.add_object(tree) | ||||
object_store.add_object(blob) | ||||
for tree in new_trees: | ||||
object_store.add_object(tree) | ||||
for node_path in removed: | ||||
paths = node_path.split('/') | ||||
tree = commit_tree | ||||
trees = [tree] | ||||
# Traverse deep into the forest... | ||||
for path in paths: | ||||
try: | ||||
obj = repo[tree[path][1]] | ||||
if isinstance(obj, objects.Tree): | ||||
trees.append(obj) | ||||
tree = obj | ||||
except KeyError: | ||||
break | ||||
# Cut down the blob and all rotten trees on the way back... | ||||
for path, tree in reversed(zip(paths, trees)): | ||||
del tree[path] | ||||
if tree: | ||||
# This tree still has elements - don't remove it or any | ||||
# of it's parents | ||||
break | ||||
object_store.add_object(commit_tree) | ||||
# Create commit | ||||
commit = objects.Commit() | ||||
commit.tree = commit_tree.id | ||||
for k, v in commit_data.iteritems(): | ||||
setattr(commit, k, v) | ||||
object_store.add_object(commit) | ||||
r725 | self.create_branch(wire, branch, commit.id) | |||
# dulwich set-ref | ||||
r0 | ref = 'refs/heads/%s' % branch | |||
repo.refs[ref] = commit.id | ||||
return commit.id | ||||
@reraise_safe_exceptions | ||||
r551 | def pull(self, wire, url, apply_refs=True, refs=None, update_after=False): | |||
r0 | if url != 'default' and '://' not in url: | |||
client = LocalGitClient(url) | ||||
else: | ||||
r105 | url_obj = url_parser(url) | |||
r0 | o = self._build_opener(url) | |||
url, _ = url_obj.authinfo() | ||||
client = HttpGitClient(base_url=url, opener=o) | ||||
repo = self._factory.repo(wire) | ||||
determine_wants = repo.object_store.determine_wants_all | ||||
if refs: | ||||
def determine_wants_requested(references): | ||||
return [references[r] for r in references if r in refs] | ||||
determine_wants = determine_wants_requested | ||||
try: | ||||
remote_refs = client.fetch( | ||||
path=url, target=repo, determine_wants=determine_wants) | ||||
r120 | except NotGitRepository as e: | |||
r0 | log.warning( | |||
'Trying to fetch from "%s" failed, not a Git repository.', url) | ||||
r120 | # Exception can contain unicode which we convert | |||
r490 | raise exceptions.AbortException(e)(repr(e)) | |||
r0 | ||||
# mikhail: client.fetch() returns all the remote refs, but fetches only | ||||
# refs filtered by `determine_wants` function. We need to filter result | ||||
# as well | ||||
if refs: | ||||
remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs} | ||||
if apply_refs: | ||||
# TODO: johbo: Needs proper test coverage with a git repository | ||||
# that contains a tag object, so that we would end up with | ||||
# a peeled ref at this point. | ||||
for k in remote_refs: | ||||
r739 | if k.endswith(PEELED_REF_MARKER): | |||
r550 | log.debug("Skipping peeled reference %s", k) | |||
r0 | continue | |||
repo[k] = remote_refs[k] | ||||
r551 | if refs and not update_after: | |||
r0 | # mikhail: explicitly set the head to the last ref. | |||
repo['HEAD'] = remote_refs[refs[-1]] | ||||
r551 | if update_after: | |||
# we want to checkout HEAD | ||||
repo["HEAD"] = remote_refs["HEAD"] | ||||
index.build_index_from_tree(repo.path, repo.index_path(), | ||||
repo.object_store, repo["HEAD"].tree) | ||||
return remote_refs | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
r766 | def sync_fetch(self, wire, url, refs=None, all_refs=False): | |||
r549 | repo = self._factory.repo(wire) | |||
if refs and not isinstance(refs, (list, tuple)): | ||||
refs = [refs] | ||||
r766 | ||||
r607 | config = self._wire_to_config(wire) | |||
r550 | # get all remote refs we'll use to fetch later | |||
r766 | cmd = ['ls-remote'] | |||
if not all_refs: | ||||
cmd += ['--heads', '--tags'] | ||||
cmd += [url] | ||||
r549 | output, __ = self.run_git_command( | |||
r766 | wire, cmd, fail_on_stderr=False, | |||
r607 | _copts=self._remote_conf(config), | |||
r549 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) | |||
remote_refs = collections.OrderedDict() | ||||
fetch_refs = [] | ||||
r550 | ||||
r549 | for ref_line in output.splitlines(): | |||
sha, ref = ref_line.split('\t') | ||||
sha = sha.strip() | ||||
r550 | if ref in remote_refs: | |||
# duplicate, skip | ||||
continue | ||||
r739 | if ref.endswith(PEELED_REF_MARKER): | |||
r550 | log.debug("Skipping peeled reference %s", ref) | |||
continue | ||||
r558 | # don't sync HEAD | |||
if ref in ['HEAD']: | ||||
continue | ||||
r549 | remote_refs[ref] = sha | |||
if refs and sha in refs: | ||||
# we filter fetch using our specified refs | ||||
fetch_refs.append('{}:{}'.format(ref, ref)) | ||||
elif not refs: | ||||
fetch_refs.append('{}:{}'.format(ref, ref)) | ||||
r622 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) | |||
r766 | ||||
r550 | if fetch_refs: | |||
r622 | for chunk in more_itertools.chunked(fetch_refs, 1024 * 4): | |||
fetch_refs_chunks = list(chunk) | ||||
log.debug('Fetching %s refs from import url', len(fetch_refs_chunks)) | ||||
_out, _err = self.run_git_command( | ||||
wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks, | ||||
fail_on_stderr=False, | ||||
_copts=self._remote_conf(config), | ||||
extra_env={'GIT_TERMINAL_PROMPT': '0'}) | ||||
r549 | ||||
return remote_refs | ||||
@reraise_safe_exceptions | ||||
r351 | def sync_push(self, wire, url, refs=None): | |||
r549 | if not self.check_url(url, wire): | |||
return | ||||
r607 | config = self._wire_to_config(wire) | |||
r739 | self._factory.repo(wire) | |||
r549 | self.run_git_command( | |||
wire, ['push', url, '--mirror'], fail_on_stderr=False, | ||||
r607 | _copts=self._remote_conf(config), | |||
r549 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) | |||
r351 | ||||
@reraise_safe_exceptions | ||||
r0 | def get_remote_refs(self, wire, url): | |||
repo = Repo(url) | ||||
return repo.get_refs() | ||||
@reraise_safe_exceptions | ||||
def get_description(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
return repo.get_description() | ||||
@reraise_safe_exceptions | ||||
def get_missing_revs(self, wire, rev1, rev2, path2): | ||||
repo = self._factory.repo(wire) | ||||
LocalGitClient(thin_packs=False).fetch(path2, repo) | ||||
wire_remote = wire.copy() | ||||
wire_remote['path'] = path2 | ||||
repo_remote = self._factory.repo(wire_remote) | ||||
LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote) | ||||
revs = [ | ||||
x.commit.id | ||||
for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])] | ||||
return revs | ||||
@reraise_safe_exceptions | ||||
r848 | def get_object(self, wire, sha, maybe_unreachable=False): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _get_object(_context_uid, _repo_id, _sha): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
r0 | ||||
r739 | missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path']) | |||
try: | ||||
commit = repo.revparse_single(sha) | ||||
r843 | except KeyError: | |||
# NOTE(marcink): KeyError doesn't give us any meaningful information | ||||
# here, we instead give something more explicit | ||||
e = exceptions.RefNotFoundException('SHA: %s not found', sha) | ||||
raise exceptions.LookupException(e)(missing_commit_err) | ||||
except ValueError as e: | ||||
r739 | raise exceptions.LookupException(e)(missing_commit_err) | |||
r725 | ||||
r747 | is_tag = False | |||
r739 | if isinstance(commit, pygit2.Tag): | |||
commit = repo.get(commit.target) | ||||
r747 | is_tag = True | |||
r725 | ||||
r767 | check_dangling = True | |||
if is_tag: | ||||
check_dangling = False | ||||
r848 | if check_dangling and maybe_unreachable: | |||
check_dangling = False | ||||
r767 | # we used a reference and it parsed means we're not having a dangling commit | |||
if sha != commit.hex: | ||||
check_dangling = False | ||||
if check_dangling: | ||||
r747 | # check for dangling commit | |||
r767 | for branch in repo.branches.with_commit(commit.hex): | |||
if branch: | ||||
break | ||||
else: | ||||
r843 | # NOTE(marcink): Empty error doesn't give us any meaningful information | |||
# here, we instead give something more explicit | ||||
e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha) | ||||
raise exceptions.LookupException(e)(missing_commit_err) | ||||
r739 | ||||
commit_id = commit.hex | ||||
type_id = commit.type | ||||
r727 | ||||
r739 | return { | |||
'id': commit_id, | ||||
'type': self._type_id_to_name(type_id), | ||||
'commit_id': commit_id, | ||||
'idx': 0 | ||||
} | ||||
r0 | ||||
r739 | return _get_object(context_uid, repo_id, sha) | |||
r0 | ||||
@reraise_safe_exceptions | ||||
r725 | def get_refs(self, wire): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _get_refs(_context_uid, _repo_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
regex = re.compile('^refs/(heads|tags)/') | ||||
return {x.name: x.target.hex for x in | ||||
filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())} | ||||
return _get_refs(context_uid, repo_id) | ||||
r0 | ||||
r739 | @reraise_safe_exceptions | |||
def get_branch_pointers(self, wire): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _get_branch_pointers(_context_uid, _repo_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
regex = re.compile('^refs/heads') | ||||
with repo_init as repo: | ||||
branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects()) | ||||
return {x.target.hex: x.shorthand for x in branches} | ||||
return _get_branch_pointers(context_uid, repo_id) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
r504 | def head(self, wire, show_exc=True): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _head(_context_uid, _repo_id, _show_exc): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
try: | ||||
return repo.head.peel().hex | ||||
except Exception: | ||||
if show_exc: | ||||
raise | ||||
return _head(context_uid, repo_id, show_exc) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def init(self, wire): | ||||
repo_path = str_to_dulwich(wire['path']) | ||||
self.repo = Repo.init(repo_path) | ||||
@reraise_safe_exceptions | ||||
def init_bare(self, wire): | ||||
repo_path = str_to_dulwich(wire['path']) | ||||
self.repo = Repo.init_bare(repo_path) | ||||
@reraise_safe_exceptions | ||||
def revision(self, wire, rev): | ||||
r725 | ||||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _revision(_context_uid, _repo_id, _rev): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
commit = repo[rev] | ||||
obj_data = { | ||||
'id': commit.id.hex, | ||||
} | ||||
# tree objects itself don't have tree_id attribute | ||||
if hasattr(commit, 'tree_id'): | ||||
obj_data['tree'] = commit.tree_id.hex | ||||
return obj_data | ||||
return _revision(context_uid, repo_id, rev) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
r746 | def date(self, wire, commit_id): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _date(_repo_id, _commit_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
commit = repo[commit_id] | ||||
r763 | ||||
if hasattr(commit, 'commit_time'): | ||||
commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | ||||
else: | ||||
commit = commit.get_object() | ||||
commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | ||||
r746 | # TODO(marcink): check dulwich difference of offset vs timezone | |||
r763 | return [commit_time, commit_time_offset] | |||
r746 | return _date(repo_id, commit_id) | |||
@reraise_safe_exceptions | ||||
def author(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _author(_repo_id, _commit_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
commit = repo[commit_id] | ||||
r763 | if hasattr(commit, 'author'): | |||
author = commit.author | ||||
else: | ||||
author = commit.get_object().author | ||||
if author.email: | ||||
return u"{} <{}>".format(author.name, author.email) | ||||
r825 | try: | |||
return u"{}".format(author.name) | ||||
except Exception: | ||||
return u"{}".format(safe_unicode(author.raw_name)) | ||||
r746 | return _author(repo_id, commit_id) | |||
r725 | ||||
@reraise_safe_exceptions | ||||
r746 | def message(self, wire, commit_id): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _message(_repo_id, _commit_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
commit = repo[commit_id] | ||||
return commit.message | ||||
return _message(repo_id, commit_id) | ||||
r725 | ||||
@reraise_safe_exceptions | ||||
r746 | def parents(self, wire, commit_id): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _parents(_repo_id, _commit_id): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
commit = repo[commit_id] | ||||
r763 | if hasattr(commit, 'parent_ids'): | |||
parent_ids = commit.parent_ids | ||||
else: | ||||
parent_ids = commit.get_object().parent_ids | ||||
return [x.hex for x in parent_ids] | ||||
r746 | return _parents(repo_id, commit_id) | |||
r725 | ||||
@reraise_safe_exceptions | ||||
r746 | def children(self, wire, commit_id): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
r746 | def _children(_repo_id, _commit_id): | |||
output, __ = self.run_git_command( | ||||
wire, ['rev-list', '--all', '--children']) | ||||
child_ids = [] | ||||
pat = re.compile(r'^%s' % commit_id) | ||||
for l in output.splitlines(): | ||||
if pat.match(l): | ||||
found_ids = l.split(' ')[1:] | ||||
child_ids.extend(found_ids) | ||||
return child_ids | ||||
return _children(repo_id, commit_id) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def set_refs(self, wire, key, value): | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
repo.references.create(key, value, force=True) | ||||
r725 | ||||
@reraise_safe_exceptions | ||||
def create_branch(self, wire, branch_name, commit_id, force=False): | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
commit = repo[commit_id] | ||||
r725 | ||||
r731 | if force: | |||
repo.branches.local.create(branch_name, commit, force=force) | ||||
elif not repo.branches.get(branch_name): | ||||
# create only if that branch isn't existing | ||||
repo.branches.local.create(branch_name, commit, force=force) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def remove_ref(self, wire, key): | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
repo.references.delete(key) | ||||
r725 | ||||
@reraise_safe_exceptions | ||||
def tag_remove(self, wire, tag_name): | ||||
r731 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
key = 'refs/tags/{}'.format(tag_name) | ||||
repo.references.delete(key) | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def tree_changes(self, wire, source_id, target_id): | ||||
r725 | # TODO(marcink): remove this seems it's only used by tests | |||
r0 | repo = self._factory.repo(wire) | |||
source = repo[source_id].tree if source_id else None | ||||
target = repo[target_id].tree | ||||
result = repo.object_store.tree_changes(source, target) | ||||
return list(result) | ||||
@reraise_safe_exceptions | ||||
r726 | def tree_and_type_for_path(self, wire, commit_id, path): | |||
r739 | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
r726 | ||||
r739 | with repo_init as repo: | |||
commit = repo[commit_id] | ||||
try: | ||||
tree = commit.tree[path] | ||||
except KeyError: | ||||
return None, None, None | ||||
r726 | ||||
r739 | return tree.id.hex, tree.type, tree.filemode | |||
return _tree_and_type_for_path(context_uid, repo_id, commit_id, path) | ||||
r726 | ||||
@reraise_safe_exceptions | ||||
r0 | def tree_items(self, wire, tree_id): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
r746 | def _tree_items(_repo_id, _tree_id): | |||
r0 | ||||
r739 | repo_init = self._factory.repo_libgit2(wire) | |||
with repo_init as repo: | ||||
try: | ||||
tree = repo[tree_id] | ||||
except KeyError: | ||||
raise ObjectMissing('No tree with id: {}'.format(tree_id)) | ||||
r0 | ||||
r739 | result = [] | |||
for item in tree: | ||||
item_sha = item.hex | ||||
item_mode = item.filemode | ||||
item_type = item.type | ||||
r0 | ||||
r739 | if item_type == 'commit': | |||
# NOTE(marcink): submodules we translate to 'link' for backward compat | ||||
item_type = 'link' | ||||
r725 | ||||
r739 | result.append((item.name, item_mode, item_sha, item_type)) | |||
return result | ||||
r746 | return _tree_items(repo_id, tree_id) | |||
@reraise_safe_exceptions | ||||
r755 | def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | |||
""" | ||||
Old version that uses subprocess to call diff | ||||
""" | ||||
r746 | ||||
flags = [ | ||||
r755 | '-U%s' % context, '--patch', | |||
'--binary', | ||||
'--find-renames', | ||||
'--no-indent-heuristic', | ||||
# '--indent-heuristic', | ||||
#'--full-index', | ||||
#'--abbrev=40' | ||||
] | ||||
r746 | ||||
if opt_ignorews: | ||||
r754 | flags.append('--ignore-all-space') | |||
r746 | ||||
if commit_id_1 == self.EMPTY_COMMIT: | ||||
cmd = ['show'] + flags + [commit_id_2] | ||||
else: | ||||
cmd = ['diff'] + flags + [commit_id_1, commit_id_2] | ||||
if file_filter: | ||||
cmd.extend(['--', file_filter]) | ||||
diff, __ = self.run_git_command(wire, cmd) | ||||
# If we used 'show' command, strip first few lines (until actual diff | ||||
# starts) | ||||
if commit_id_1 == self.EMPTY_COMMIT: | ||||
lines = diff.splitlines() | ||||
x = 0 | ||||
for line in lines: | ||||
if line.startswith('diff'): | ||||
break | ||||
x += 1 | ||||
# Append new line just like 'diff' command do | ||||
diff = '\n'.join(lines[x:]) + '\n' | ||||
r755 | return diff | |||
r754 | ||||
r755 | @reraise_safe_exceptions | |||
def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | ||||
repo_init = self._factory.repo_libgit2(wire) | ||||
with repo_init as repo: | ||||
swap = True | ||||
flags = 0 | ||||
flags |= pygit2.GIT_DIFF_SHOW_BINARY | ||||
if opt_ignorews: | ||||
flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE | ||||
if commit_id_1 == self.EMPTY_COMMIT: | ||||
comm1 = repo[commit_id_2] | ||||
diff_obj = comm1.tree.diff_to_tree( | ||||
flags=flags, context_lines=context, swap=swap) | ||||
else: | ||||
comm1 = repo[commit_id_2] | ||||
comm2 = repo[commit_id_1] | ||||
diff_obj = comm1.tree.diff_to_tree( | ||||
comm2.tree, flags=flags, context_lines=context, swap=swap) | ||||
r759 | similar_flags = 0 | |||
similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES | ||||
diff_obj.find_similar(flags=similar_flags) | ||||
r755 | ||||
if file_filter: | ||||
for p in diff_obj: | ||||
if p.delta.old_file.path == file_filter: | ||||
r759 | return p.patch or '' | |||
r762 | # fo matching path == no diff | |||
return '' | ||||
r759 | return diff_obj.patch or '' | |||
r746 | ||||
@reraise_safe_exceptions | ||||
def node_history(self, wire, commit_id, path, limit): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit): | ||||
# optimize for n==1, rev-list is much faster for that use-case | ||||
if limit == 1: | ||||
cmd = ['rev-list', '-1', commit_id, '--', path] | ||||
else: | ||||
cmd = ['log'] | ||||
if limit: | ||||
cmd.extend(['-n', str(safe_int(limit, 0))]) | ||||
cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path]) | ||||
output, __ = self.run_git_command(wire, cmd) | ||||
commit_ids = re.findall(r'[0-9a-fA-F]{40}', output) | ||||
return [x for x in commit_ids] | ||||
return _node_history(context_uid, repo_id, commit_id, path, limit) | ||||
@reraise_safe_exceptions | ||||
def node_annotate(self, wire, commit_id, path): | ||||
cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path] | ||||
# -l ==> outputs long shas (and we need all 40 characters) | ||||
# --root ==> doesn't put '^' character for boundaries | ||||
# -r commit_id ==> blames for the given commit | ||||
output, __ = self.run_git_command(wire, cmd) | ||||
result = [] | ||||
for i, blame_line in enumerate(output.split('\n')[:-1]): | ||||
line_no = i + 1 | ||||
commit_id, line = re.split(r' ', blame_line, 1) | ||||
result.append((line_no, commit_id, line)) | ||||
return result | ||||
r0 | ||||
@reraise_safe_exceptions | ||||
def update_server_info(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
update_server_info(repo) | ||||
@reraise_safe_exceptions | ||||
r725 | def get_all_commit_ids(self, wire): | |||
r739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |||
@self.region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _get_all_commit_ids(_context_uid, _repo_id): | ||||
cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags'] | ||||
try: | ||||
output, __ = self.run_git_command(wire, cmd) | ||||
return output.splitlines() | ||||
except Exception: | ||||
# Can be raised for empty repositories | ||||
return [] | ||||
return _get_all_commit_ids(context_uid, repo_id) | ||||
r725 | ||||
@reraise_safe_exceptions | ||||
r0 | def run_git_command(self, wire, cmd, **opts): | |||
path = wire.get('path', None) | ||||
if path and os.path.isdir(path): | ||||
opts['cwd'] = path | ||||
if '_bare' in opts: | ||||
_copts = [] | ||||
del opts['_bare'] | ||||
else: | ||||
_copts = ['-c', 'core.quotepath=false', ] | ||||
safe_call = False | ||||
if '_safe' in opts: | ||||
# no exc on failure | ||||
del opts['_safe'] | ||||
safe_call = True | ||||
r381 | if '_copts' in opts: | |||
_copts.extend(opts['_copts'] or []) | ||||
del opts['_copts'] | ||||
r0 | gitenv = os.environ.copy() | |||
gitenv.update(opts.pop('extra_env', {})) | ||||
# need to clean fix GIT_DIR ! | ||||
if 'GIT_DIR' in gitenv: | ||||
del gitenv['GIT_DIR'] | ||||
gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | ||||
r460 | gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1' | |||
r0 | ||||
cmd = [settings.GIT_EXECUTABLE] + _copts + cmd | ||||
r554 | _opts = {'env': gitenv, 'shell': False} | |||
r0 | ||||
r799 | proc = None | |||
r0 | try: | |||
_opts.update(opts) | ||||
r799 | proc = subprocessio.SubprocessIOChunker(cmd, **_opts) | |||
r0 | ||||
r799 | return ''.join(proc), ''.join(proc.error) | |||
r0 | except (EnvironmentError, OSError) as err: | |||
r271 | cmd = ' '.join(cmd) # human friendly CMD | |||
r0 | tb_err = ("Couldn't run git command (%s).\n" | |||
r554 | "Original error was:%s\n" | |||
"Call options:%s\n" | ||||
% (cmd, err, _opts)) | ||||
r0 | log.exception(tb_err) | |||
if safe_call: | ||||
return '', err | ||||
else: | ||||
r490 | raise exceptions.VcsException()(tb_err) | |||
r799 | finally: | |||
if proc: | ||||
proc.close() | ||||
r0 | ||||
r407 | @reraise_safe_exceptions | |||
def install_hooks(self, wire, force=False): | ||||
from vcsserver.hook_utils import install_git_hooks | ||||
r739 | bare = self.bare(wire) | |||
path = wire['path'] | ||||
return install_git_hooks(path, bare, force_create=force) | ||||
r407 | ||||
r623 | @reraise_safe_exceptions | |||
def get_hooks_info(self, wire): | ||||
from vcsserver.hook_utils import ( | ||||
get_git_pre_hook_version, get_git_post_hook_version) | ||||
r739 | bare = self.bare(wire) | |||
path = wire['path'] | ||||
r623 | return { | |||
r739 | 'pre_version': get_git_pre_hook_version(path, bare), | |||
'post_version': get_git_post_hook_version(path, bare), | ||||
r623 | } | |||