##// END OF EJS Templates
py3: import/code fixes
marcink -
r987:1d5adfe8 python3
parent child Browse files
Show More
@@ -1,99 +1,96 b''
1 self: super: {
1 self: super: {
2
2
3 # change GIT version
3 # change GIT version
4 # latest supported are in: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
4 # latest supported are in: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
5 git = super.lib.overrideDerivation super.git (oldAttrs: {
5 git = super.lib.overrideDerivation super.git (oldAttrs: {
6 name = "git-2.25.3";
6 name = "git-2.25.3";
7 src = self.fetchurl {
7 src = self.fetchurl {
8 url = "https://www.kernel.org/pub/software/scm/git/git-2.25.3.tar.xz";
8 url = "https://www.kernel.org/pub/software/scm/git/git-2.25.3.tar.xz";
9 sha256 = "0yvr97cl0dvj3fwblq1mb0cp97v8hrn9l98p8b1jx8815mbsnz9h";
9 sha256 = "0yvr97cl0dvj3fwblq1mb0cp97v8hrn9l98p8b1jx8815mbsnz9h";
10 };
10 };
11
11
12 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
12 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
13 patches = [
13 patches = [
14 ./patches/git/docbook2texi.patch
14 ./patches/git/docbook2texi.patch
15 ./patches/git/git-sh-i18n.patch
15 ./patches/git/git-sh-i18n.patch
16 ./patches/git/ssh-path.patch
16 ./patches/git/ssh-path.patch
17 ./patches/git/git-send-email-honor-PATH.patch
17 ./patches/git/git-send-email-honor-PATH.patch
18 ./patches/git/installCheck-path.patch
18 ./patches/git/installCheck-path.patch
19 ];
19 ];
20
20
21 });
21 });
22
22
23 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
23 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
24 name = "libgit2-1.0.1";
24 name = "libgit2-1.0.1";
25 version = "1.0.1";
25 version = "1.0.1";
26
26
27 src = self.fetchFromGitHub {
27 src = self.fetchFromGitHub {
28 owner = "libgit2";
28 owner = "libgit2";
29 repo = "libgit2";
29 repo = "libgit2";
30 rev = "v1.0.1";
30 rev = "v1.0.1";
31 sha256 = "1cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b";
31 sha256 = "0xqdnvrq1bnf8hxh9xjw25y2cg91agvd9jr5qwd30z2a0dzll22v";
32 };
32 };
33
33
34 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
34 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
35
35
36 buildInputs = [
36 buildInputs = [
37 super.zlib
37 super.zlib
38 super.libssh2
38 super.libssh2
39 super.openssl
39 super.openssl
40 super.curl
40 super.curl
41 ];
41 ];
42
42
43
44 });
43 });
45
44
46
47
48 # Override subversion derivation to
45 # Override subversion derivation to
49 # - activate python bindings
46 # - activate special python bindings
50 subversionrc =
47 subversionrc =
51 let
48 let
52 py3c = self.python37Packages.buildPythonPackage rec {
49 py3c = self.python37Packages.buildPythonPackage rec {
53 pname = "py3c";
50 pname = "py3c";
54 version = "1.0";
51 version = "1.0";
55 src = self.fetchurl {
52 src = self.fetchurl {
56 url = "https://files.pythonhosted.org/packages/6a/aa/9f1a69a8c71e72553b281603633e42501de932aa4d9912bccbf9a2884093/py3c-1.0.tar.gz";
53 url = "https://files.pythonhosted.org/packages/6a/aa/9f1a69a8c71e72553b281603633e42501de932aa4d9912bccbf9a2884093/py3c-1.0.tar.gz";
57 sha256 = "1h80jqi6r64kppxb4kshsiadrgc5hwk5arp3zcki01jf4ahknjz9";
54 sha256 = "1h80jqi6r64kppxb4kshsiadrgc5hwk5arp3zcki01jf4ahknjz9";
58 };
55 };
59 format = "setuptools";
56 format = "setuptools";
60 doCheck = false;
57 doCheck = false;
61 buildInputs = [];
58 buildInputs = [];
62 checkInputs = [];
59 checkInputs = [];
63 nativeBuildInputs = [];
60 nativeBuildInputs = [];
64 propagatedBuildInputs = [];
61 propagatedBuildInputs = [];
65 meta = {
62 meta = {
66 license = [ ];
63 license = [ ];
67 };
64 };
68 };
65 };
69 in
66 in
70 let
67 let
71 pythonWithEnv = self.python37Packages.python.buildEnv.override {
68 pythonWithEnv = self.python37Packages.python.buildEnv.override {
72 extraLibs = [ py3c ];
69 extraLibs = [ py3c ];
73 };
70 };
74 in
71 in
75 let
72 let
76 subversionWithPython = super.subversion.override {
73 subversionWithPython = super.subversion.override {
77 httpSupport = true; # client must support http
74 httpSupport = true; # client must support http
78 pythonBindings = true;
75 pythonBindings = true;
79 python = pythonWithEnv;
76 python = pythonWithEnv;
80 };
77 };
81
78
82 in
79 in
83 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
80 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
84 name = "subversion-1.14.0";
81 name = "subversion-1.14.0";
85 src = self.fetchurl {
82 src = self.fetchurl {
86 url = "https://archive.apache.org/dist/subversion/subversion-1.14.0.tar.gz";
83 url = "https://archive.apache.org/dist/subversion/subversion-1.14.0.tar.gz";
87 sha256 = "1l1px5kva5a13pi2rkxfgxfvypvl6bmbkdag6168fhayad3i2ggg";
84 sha256 = "1l1px5kva5a13pi2rkxfgxfvypvl6bmbkdag6168fhayad3i2ggg";
88 };
85 };
89
86
90 ## use internal lz4/utf8proc because it is stable and shipped with SVN
87 ## use internal lz4/utf8proc because it is stable and shipped with SVN
91 configureFlags = oldAttrs.configureFlags ++ [
88 configureFlags = oldAttrs.configureFlags ++ [
92 " --with-lz4=internal"
89 " --with-lz4=internal"
93 " --with-utf8proc=internal"
90 " --with-utf8proc=internal"
94 ];
91 ];
95 });
92 });
96
93
97
94
98
95
99 }
96 }
@@ -1,76 +1,76 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urllib.parse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class RepoFactory(object):
27 class RepoFactory(object):
28 """
28 """
29 Utility to create instances of repository
29 Utility to create instances of repository
30
30
31 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
32 the :term:`call context`.
32 the :term:`call context`.
33 """
33 """
34 repo_type = None
34 repo_type = None
35
35
36 def __init__(self):
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38
38
39 def _create_config(self, path, config):
39 def _create_config(self, path, config):
40 config = {}
40 config = {}
41 return config
41 return config
42
42
43 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
44 raise NotImplementedError()
44 raise NotImplementedError()
45
45
46 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
47 raise NotImplementedError()
47 raise NotImplementedError()
48
48
49
49
50 def obfuscate_qs(query_string):
50 def obfuscate_qs(query_string):
51 if query_string is None:
51 if query_string is None:
52 return None
52 return None
53
53
54 parsed = []
54 parsed = []
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
55 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
56 if k in ['auth_token', 'api_key']:
56 if k in ['auth_token', 'api_key']:
57 v = "*****"
57 v = "*****"
58 parsed.append((k, v))
58 parsed.append((k, v))
59
59
60 return '&'.join('{}{}'.format(
60 return '&'.join('{}{}'.format(
61 k, '={}'.format(v) if v else '') for k, v in parsed)
61 k, '={}'.format(v) if v else '') for k, v in parsed)
62
62
63
63
64 def raise_from_original(new_type):
64 def raise_from_original(new_type):
65 """
65 """
66 Raise a new exception type with original args and traceback.
66 Raise a new exception type with original args and traceback.
67 """
67 """
68 exc_type, exc_value, exc_traceback = sys.exc_info()
68 exc_type, exc_value, exc_traceback = sys.exc_info()
69 new_exc = new_type(*exc_value.args)
69 new_exc = new_type(*exc_value.args)
70 # store the original traceback into the new exc
70 # store the original traceback into the new exc
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72
72
73 try:
73 try:
74 raise new_exc, None, exc_traceback
74 raise new_exc.with_traceback(exc_traceback)
75 finally:
75 finally:
76 del exc_traceback
76 del exc_traceback
@@ -1,1192 +1,1192 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib.request, urllib.parse, urllib.error
26 import urllib2
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.vcs_base import RemoteBase
48
48
49 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def str_to_dulwich(value):
58 def str_to_dulwich(value):
59 """
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
61 """
62 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
63
63
64
64
65 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
67
68 @wraps(func)
68 @wraps(func)
69 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
70 try:
70 try:
71 return func(*args, **kwargs)
71 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
74 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
77 raise exc(safe_str(e))
78 except Exception as e:
78 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
85 raise
85 raise
86 return wrapper
86 return wrapper
87
87
88
88
89 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
90 """
90 """
91 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
92
92
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
95 once the repo object is destroyed.
96 """
96 """
97 def __del__(self):
97 def __del__(self):
98 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
99 self.close()
99 self.close()
100
100
101
101
102 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
103
103
104 def __enter__(self):
104 def __enter__(self):
105 return self
105 return self
106
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
108 self.free()
109
109
110
110
111 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
112 repo_type = 'git'
112 repo_type = 'git'
113
113
114 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
115 if use_libgit2:
116 return Repository(wire['path'])
116 return Repository(wire['path'])
117 else:
117 else:
118 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
119 return Repo(repo_path)
120
120
121 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
122 """
123 Get a repository instance for the given path.
123 Get a repository instance for the given path.
124 """
124 """
125 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
126
126
127 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
129
129
130
130
131 class GitRemote(RemoteBase):
131 class GitRemote(RemoteBase):
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143
143
144 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
145 if 'config' in wire:
145 if 'config' in wire:
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return {}
147 return {}
148
148
149 def _remote_conf(self, config):
149 def _remote_conf(self, config):
150 params = [
150 params = [
151 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
152 ]
152 ]
153 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 if ssl_cert_dir:
154 if ssl_cert_dir:
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 return params
156 return params
157
157
158 @reraise_safe_exceptions
158 @reraise_safe_exceptions
159 def discover_git_version(self):
159 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
160 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
161 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
162 prefix = 'git version'
163 if stdout.startswith(prefix):
163 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
164 stdout = stdout[len(prefix):]
165 return stdout.strip()
165 return stdout.strip()
166
166
167 @reraise_safe_exceptions
167 @reraise_safe_exceptions
168 def is_empty(self, wire):
168 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
169 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
170 with repo_init as repo:
171
171
172 try:
172 try:
173 has_head = repo.head.name
173 has_head = repo.head.name
174 if has_head:
174 if has_head:
175 return False
175 return False
176
176
177 # NOTE(marcink): check again using more expensive method
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
178 return repo.is_empty
179 except Exception:
179 except Exception:
180 pass
180 pass
181
181
182 return True
182 return True
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
186 cache_on, context_uid, repo_id = self._cache_on(wire)
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
189 try:
189 try:
190 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
191 with repo_init as repo:
192 pass
192 pass
193 except pygit2.GitError:
193 except pygit2.GitError:
194 path = wire.get('path')
194 path = wire.get('path')
195 tb = traceback.format_exc()
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
197 return False
198
198
199 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
201
201
202 @reraise_safe_exceptions
202 @reraise_safe_exceptions
203 def bare(self, wire):
203 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
205 with repo_init as repo:
206 return repo.is_bare
206 return repo.is_bare
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
211 with repo_init as repo:
212 blob_obj = repo[sha]
212 blob_obj = repo[sha]
213 blob = blob_obj.data
213 blob = blob_obj.data
214 return blob
214 return blob
215
215
216 @reraise_safe_exceptions
216 @reraise_safe_exceptions
217 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
220 def _blob_raw_length(_repo_id, _sha):
221
221
222 repo_init = self._factory.repo_libgit2(wire)
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
223 with repo_init as repo:
224 blob = repo[sha]
224 blob = repo[sha]
225 return blob.size
225 return blob.size
226
226
227 return _blob_raw_length(repo_id, sha)
227 return _blob_raw_length(repo_id, sha)
228
228
229 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
230
230
231 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
233 pattern = re.compile(r"""
233 pattern = re.compile(r"""
234 (?:\n)?
234 (?:\n)?
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
238 (?:\n)?
238 (?:\n)?
239 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
240 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
241 if match:
241 if match:
242 return match.groupdict()
242 return match.groupdict()
243
243
244 return {}
244 return {}
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def is_large_file(self, wire, commit_id):
247 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
248 cache_on, context_uid, repo_id = self._cache_on(wire)
249
249
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
253 with repo_init as repo:
254 blob = repo[commit_id]
254 blob = repo[commit_id]
255 if blob.is_binary:
255 if blob.is_binary:
256 return {}
256 return {}
257
257
258 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
259
259
260 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
271 return blob_obj.is_binary
272
272
273 return _is_binary(repo_id, tree_id)
273 return _is_binary(repo_id, tree_id)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def in_largefiles_store(self, wire, oid):
276 def in_largefiles_store(self, wire, oid):
277 conf = self._wire_to_config(wire)
277 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
278 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
279 with repo_init as repo:
280 repo_name = repo.path
280 repo_name = repo.path
281
281
282 store_location = conf.get('vcs_git_lfs_store_location')
282 store_location = conf.get('vcs_git_lfs_store_location')
283 if store_location:
283 if store_location:
284
284
285 store = LFSOidStore(
285 store = LFSOidStore(
286 oid=oid, repo=repo_name, store_location=store_location)
286 oid=oid, repo=repo_name, store_location=store_location)
287 return store.has_oid()
287 return store.has_oid()
288
288
289 return False
289 return False
290
290
291 @reraise_safe_exceptions
291 @reraise_safe_exceptions
292 def store_path(self, wire, oid):
292 def store_path(self, wire, oid):
293 conf = self._wire_to_config(wire)
293 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
294 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
295 with repo_init as repo:
296 repo_name = repo.path
296 repo_name = repo.path
297
297
298 store_location = conf.get('vcs_git_lfs_store_location')
298 store_location = conf.get('vcs_git_lfs_store_location')
299 if store_location:
299 if store_location:
300 store = LFSOidStore(
300 store = LFSOidStore(
301 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
302 return store.oid_path
302 return store.oid_path
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def bulk_request(self, wire, rev, pre_load):
306 def bulk_request(self, wire, rev, pre_load):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 def _bulk_request(_repo_id, _rev, _pre_load):
309 def _bulk_request(_repo_id, _rev, _pre_load):
310 result = {}
310 result = {}
311 for attr in pre_load:
311 for attr in pre_load:
312 try:
312 try:
313 method = self._bulk_methods[attr]
313 method = self._bulk_methods[attr]
314 args = [wire, rev]
314 args = [wire, rev]
315 result[attr] = method(*args)
315 result[attr] = method(*args)
316 except KeyError as e:
316 except KeyError as e:
317 raise exceptions.VcsException(e)(
317 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
318 "Unknown bulk attribute: %s" % attr)
319 return result
319 return result
320
320
321 return _bulk_request(repo_id, rev, sorted(pre_load))
321 return _bulk_request(repo_id, rev, sorted(pre_load))
322
322
323 def _build_opener(self, url):
323 def _build_opener(self, url):
324 handlers = []
324 handlers = []
325 url_obj = url_parser(url)
325 url_obj = url_parser(url)
326 _, authinfo = url_obj.authinfo()
326 _, authinfo = url_obj.authinfo()
327
327
328 if authinfo:
328 if authinfo:
329 # create a password manager
329 # create a password manager
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
331 passmgr.add_password(*authinfo)
331 passmgr.add_password(*authinfo)
332
332
333 handlers.extend((httpbasicauthhandler(passmgr),
333 handlers.extend((httpbasicauthhandler(passmgr),
334 httpdigestauthhandler(passmgr)))
334 httpdigestauthhandler(passmgr)))
335
335
336 return urllib2.build_opener(*handlers)
336 return urllib.request.build_opener(*handlers)
337
337
338 def _type_id_to_name(self, type_id):
338 def _type_id_to_name(self, type_id):
339 return {
339 return {
340 1: b'commit',
340 1: b'commit',
341 2: b'tree',
341 2: b'tree',
342 3: b'blob',
342 3: b'blob',
343 4: b'tag'
343 4: b'tag'
344 }[type_id]
344 }[type_id]
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def check_url(self, url, config):
347 def check_url(self, url, config):
348 url_obj = url_parser(url)
348 url_obj = url_parser(url)
349 test_uri, _ = url_obj.authinfo()
349 test_uri, _ = url_obj.authinfo()
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 url_obj.query = obfuscate_qs(url_obj.query)
351 url_obj.query = obfuscate_qs(url_obj.query)
352 cleaned_uri = str(url_obj)
352 cleaned_uri = str(url_obj)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354
354
355 if not test_uri.endswith('info/refs'):
355 if not test_uri.endswith('info/refs'):
356 test_uri = test_uri.rstrip('/') + '/info/refs'
356 test_uri = test_uri.rstrip('/') + '/info/refs'
357
357
358 o = self._build_opener(url)
358 o = self._build_opener(url)
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360
360
361 q = {"service": 'git-upload-pack'}
361 q = {"service": 'git-upload-pack'}
362 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.parse.urlencode(q)
363 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
364 req = urllib2.Request(cu, None, {})
364 req = urllib.request.Request(cu, None, {})
365
365
366 try:
366 try:
367 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
368 resp = o.open(req)
368 resp = o.open(req)
369 if resp.code != 200:
369 if resp.code != 200:
370 raise exceptions.URLError()('Return Code is not 200')
370 raise exceptions.URLError()('Return Code is not 200')
371 except Exception as e:
371 except Exception as e:
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 # means it cannot be cloned
373 # means it cannot be cloned
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375
375
376 # now detect if it's proper git repo
376 # now detect if it's proper git repo
377 gitdata = resp.read()
377 gitdata = resp.read()
378 if 'service=git-upload-pack' in gitdata:
378 if 'service=git-upload-pack' in gitdata:
379 pass
379 pass
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 # old style git can return some other format !
381 # old style git can return some other format !
382 pass
382 pass
383 else:
383 else:
384 raise exceptions.URLError()(
384 raise exceptions.URLError()(
385 "url [%s] does not look like an git" % (cleaned_uri,))
385 "url [%s] does not look like an git" % (cleaned_uri,))
386
386
387 return True
387 return True
388
388
389 @reraise_safe_exceptions
389 @reraise_safe_exceptions
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 remote_refs = self.pull(wire, url, apply_refs=False)
392 remote_refs = self.pull(wire, url, apply_refs=False)
393 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
394 if isinstance(valid_refs, list):
394 if isinstance(valid_refs, list):
395 valid_refs = tuple(valid_refs)
395 valid_refs = tuple(valid_refs)
396
396
397 for k in remote_refs:
397 for k in remote_refs:
398 # only parse heads/tags and skip so called deferred tags
398 # only parse heads/tags and skip so called deferred tags
399 if k.startswith(valid_refs) and not k.endswith(deferred):
399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 repo[k] = remote_refs[k]
400 repo[k] = remote_refs[k]
401
401
402 if update_after_clone:
402 if update_after_clone:
403 # we want to checkout HEAD
403 # we want to checkout HEAD
404 repo["HEAD"] = remote_refs["HEAD"]
404 repo["HEAD"] = remote_refs["HEAD"]
405 index.build_index_from_tree(repo.path, repo.index_path(),
405 index.build_index_from_tree(repo.path, repo.index_path(),
406 repo.object_store, repo["HEAD"].tree)
406 repo.object_store, repo["HEAD"].tree)
407
407
408 @reraise_safe_exceptions
408 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
409 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
412 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
413 regex = re.compile('^refs/heads')
414
414
415 def filter_with(ref):
415 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
416 return regex.match(ref[0]) and ref[1] == _commit_id
417
417
418 branches = filter(filter_with, self.get_refs(wire).items())
418 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
419 return [x[0].split('refs/heads/')[-1] for x in branches]
419 return [x[0].split('refs/heads/')[-1] for x in branches]
420
420
421 return _branch(context_uid, repo_id, commit_id)
421 return _branch(context_uid, repo_id, commit_id)
422
422
423 @reraise_safe_exceptions
423 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
424 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
428 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
429 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
431 return branches
432
432
433 return _commit_branches(context_uid, repo_id, commit_id)
433 return _commit_branches(context_uid, repo_id, commit_id)
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def add_object(self, wire, content):
436 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
437 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
438 with repo_init as repo:
439 blob = objects.Blob()
439 blob = objects.Blob()
440 blob.set_raw_string(content)
440 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
441 repo.object_store.add_object(blob)
442 return blob.id
442 return blob.id
443
443
444 # TODO: this is quite complex, check if that can be simplified
444 # TODO: this is quite complex, check if that can be simplified
445 @reraise_safe_exceptions
445 @reraise_safe_exceptions
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
448 object_store = repo.object_store
448 object_store = repo.object_store
449
449
450 # Create tree and populates it with blobs
450 # Create tree and populates it with blobs
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452
452
453 for node in updated:
453 for node in updated:
454 # Compute subdirs if needed
454 # Compute subdirs if needed
455 dirpath, nodename = vcspath.split(node['path'])
455 dirpath, nodename = vcspath.split(node['path'])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
457 parent = commit_tree
457 parent = commit_tree
458 ancestors = [('', parent)]
458 ancestors = [('', parent)]
459
459
460 # Tries to dig for the deepest existing tree
460 # Tries to dig for the deepest existing tree
461 while dirnames:
461 while dirnames:
462 curdir = dirnames.pop(0)
462 curdir = dirnames.pop(0)
463 try:
463 try:
464 dir_id = parent[curdir][1]
464 dir_id = parent[curdir][1]
465 except KeyError:
465 except KeyError:
466 # put curdir back into dirnames and stops
466 # put curdir back into dirnames and stops
467 dirnames.insert(0, curdir)
467 dirnames.insert(0, curdir)
468 break
468 break
469 else:
469 else:
470 # If found, updates parent
470 # If found, updates parent
471 parent = repo[dir_id]
471 parent = repo[dir_id]
472 ancestors.append((curdir, parent))
472 ancestors.append((curdir, parent))
473 # Now parent is deepest existing tree and we need to create
473 # Now parent is deepest existing tree and we need to create
474 # subtrees for dirnames (in reverse order)
474 # subtrees for dirnames (in reverse order)
475 # [this only applies for nodes from added]
475 # [this only applies for nodes from added]
476 new_trees = []
476 new_trees = []
477
477
478 blob = objects.Blob.from_string(node['content'])
478 blob = objects.Blob.from_string(node['content'])
479
479
480 if dirnames:
480 if dirnames:
481 # If there are trees which should be created we need to build
481 # If there are trees which should be created we need to build
482 # them now (in reverse order)
482 # them now (in reverse order)
483 reversed_dirnames = list(reversed(dirnames))
483 reversed_dirnames = list(reversed(dirnames))
484 curtree = objects.Tree()
484 curtree = objects.Tree()
485 curtree[node['node_path']] = node['mode'], blob.id
485 curtree[node['node_path']] = node['mode'], blob.id
486 new_trees.append(curtree)
486 new_trees.append(curtree)
487 for dirname in reversed_dirnames[:-1]:
487 for dirname in reversed_dirnames[:-1]:
488 newtree = objects.Tree()
488 newtree = objects.Tree()
489 newtree[dirname] = (DIR_STAT, curtree.id)
489 newtree[dirname] = (DIR_STAT, curtree.id)
490 new_trees.append(newtree)
490 new_trees.append(newtree)
491 curtree = newtree
491 curtree = newtree
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 else:
493 else:
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495
495
496 new_trees.append(parent)
496 new_trees.append(parent)
497 # Update ancestors
497 # Update ancestors
498 reversed_ancestors = reversed(
498 reversed_ancestors = reversed(
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 for parent, tree, path in reversed_ancestors:
500 for parent, tree, path in reversed_ancestors:
501 parent[path] = (DIR_STAT, tree.id)
501 parent[path] = (DIR_STAT, tree.id)
502 object_store.add_object(tree)
502 object_store.add_object(tree)
503
503
504 object_store.add_object(blob)
504 object_store.add_object(blob)
505 for tree in new_trees:
505 for tree in new_trees:
506 object_store.add_object(tree)
506 object_store.add_object(tree)
507
507
508 for node_path in removed:
508 for node_path in removed:
509 paths = node_path.split('/')
509 paths = node_path.split('/')
510 tree = commit_tree
510 tree = commit_tree
511 trees = [tree]
511 trees = [tree]
512 # Traverse deep into the forest...
512 # Traverse deep into the forest...
513 for path in paths:
513 for path in paths:
514 try:
514 try:
515 obj = repo[tree[path][1]]
515 obj = repo[tree[path][1]]
516 if isinstance(obj, objects.Tree):
516 if isinstance(obj, objects.Tree):
517 trees.append(obj)
517 trees.append(obj)
518 tree = obj
518 tree = obj
519 except KeyError:
519 except KeyError:
520 break
520 break
521 # Cut down the blob and all rotten trees on the way back...
521 # Cut down the blob and all rotten trees on the way back...
522 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(list(zip(paths, trees))):
523 del tree[path]
523 del tree[path]
524 if tree:
524 if tree:
525 # This tree still has elements - don't remove it or any
525 # This tree still has elements - don't remove it or any
526 # of it's parents
526 # of it's parents
527 break
527 break
528
528
529 object_store.add_object(commit_tree)
529 object_store.add_object(commit_tree)
530
530
531 # Create commit
531 # Create commit
532 commit = objects.Commit()
532 commit = objects.Commit()
533 commit.tree = commit_tree.id
533 commit.tree = commit_tree.id
534 for k, v in commit_data.iteritems():
534 for k, v in commit_data.items():
535 setattr(commit, k, v)
535 setattr(commit, k, v)
536 object_store.add_object(commit)
536 object_store.add_object(commit)
537
537
538 self.create_branch(wire, branch, commit.id)
538 self.create_branch(wire, branch, commit.id)
539
539
540 # dulwich set-ref
540 # dulwich set-ref
541 ref = 'refs/heads/%s' % branch
541 ref = 'refs/heads/%s' % branch
542 repo.refs[ref] = commit.id
542 repo.refs[ref] = commit.id
543
543
544 return commit.id
544 return commit.id
545
545
546 @reraise_safe_exceptions
546 @reraise_safe_exceptions
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 if url != 'default' and '://' not in url:
548 if url != 'default' and '://' not in url:
549 client = LocalGitClient(url)
549 client = LocalGitClient(url)
550 else:
550 else:
551 url_obj = url_parser(url)
551 url_obj = url_parser(url)
552 o = self._build_opener(url)
552 o = self._build_opener(url)
553 url, _ = url_obj.authinfo()
553 url, _ = url_obj.authinfo()
554 client = HttpGitClient(base_url=url, opener=o)
554 client = HttpGitClient(base_url=url, opener=o)
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556
556
557 determine_wants = repo.object_store.determine_wants_all
557 determine_wants = repo.object_store.determine_wants_all
558 if refs:
558 if refs:
559 def determine_wants_requested(references):
559 def determine_wants_requested(references):
560 return [references[r] for r in references if r in refs]
560 return [references[r] for r in references if r in refs]
561 determine_wants = determine_wants_requested
561 determine_wants = determine_wants_requested
562
562
563 try:
563 try:
564 remote_refs = client.fetch(
564 remote_refs = client.fetch(
565 path=url, target=repo, determine_wants=determine_wants)
565 path=url, target=repo, determine_wants=determine_wants)
566 except NotGitRepository as e:
566 except NotGitRepository as e:
567 log.warning(
567 log.warning(
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 # Exception can contain unicode which we convert
569 # Exception can contain unicode which we convert
570 raise exceptions.AbortException(e)(repr(e))
570 raise exceptions.AbortException(e)(repr(e))
571
571
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 # refs filtered by `determine_wants` function. We need to filter result
573 # refs filtered by `determine_wants` function. We need to filter result
574 # as well
574 # as well
575 if refs:
575 if refs:
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577
577
578 if apply_refs:
578 if apply_refs:
579 # TODO: johbo: Needs proper test coverage with a git repository
579 # TODO: johbo: Needs proper test coverage with a git repository
580 # that contains a tag object, so that we would end up with
580 # that contains a tag object, so that we would end up with
581 # a peeled ref at this point.
581 # a peeled ref at this point.
582 for k in remote_refs:
582 for k in remote_refs:
583 if k.endswith(PEELED_REF_MARKER):
583 if k.endswith(PEELED_REF_MARKER):
584 log.debug("Skipping peeled reference %s", k)
584 log.debug("Skipping peeled reference %s", k)
585 continue
585 continue
586 repo[k] = remote_refs[k]
586 repo[k] = remote_refs[k]
587
587
588 if refs and not update_after:
588 if refs and not update_after:
589 # mikhail: explicitly set the head to the last ref.
589 # mikhail: explicitly set the head to the last ref.
590 repo['HEAD'] = remote_refs[refs[-1]]
590 repo['HEAD'] = remote_refs[refs[-1]]
591
591
592 if update_after:
592 if update_after:
593 # we want to checkout HEAD
593 # we want to checkout HEAD
594 repo["HEAD"] = remote_refs["HEAD"]
594 repo["HEAD"] = remote_refs["HEAD"]
595 index.build_index_from_tree(repo.path, repo.index_path(),
595 index.build_index_from_tree(repo.path, repo.index_path(),
596 repo.object_store, repo["HEAD"].tree)
596 repo.object_store, repo["HEAD"].tree)
597 return remote_refs
597 return remote_refs
598
598
599 @reraise_safe_exceptions
599 @reraise_safe_exceptions
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
602 if refs and not isinstance(refs, (list, tuple)):
602 if refs and not isinstance(refs, (list, tuple)):
603 refs = [refs]
603 refs = [refs]
604
604
605 config = self._wire_to_config(wire)
605 config = self._wire_to_config(wire)
606 # get all remote refs we'll use to fetch later
606 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
607 cmd = ['ls-remote']
608 if not all_refs:
608 if not all_refs:
609 cmd += ['--heads', '--tags']
609 cmd += ['--heads', '--tags']
610 cmd += [url]
610 cmd += [url]
611 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
612 wire, cmd, fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
613 _copts=self._remote_conf(config),
613 _copts=self._remote_conf(config),
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615
615
616 remote_refs = collections.OrderedDict()
616 remote_refs = collections.OrderedDict()
617 fetch_refs = []
617 fetch_refs = []
618
618
619 for ref_line in output.splitlines():
619 for ref_line in output.splitlines():
620 sha, ref = ref_line.split('\t')
620 sha, ref = ref_line.split('\t')
621 sha = sha.strip()
621 sha = sha.strip()
622 if ref in remote_refs:
622 if ref in remote_refs:
623 # duplicate, skip
623 # duplicate, skip
624 continue
624 continue
625 if ref.endswith(PEELED_REF_MARKER):
625 if ref.endswith(PEELED_REF_MARKER):
626 log.debug("Skipping peeled reference %s", ref)
626 log.debug("Skipping peeled reference %s", ref)
627 continue
627 continue
628 # don't sync HEAD
628 # don't sync HEAD
629 if ref in ['HEAD']:
629 if ref in ['HEAD']:
630 continue
630 continue
631
631
632 remote_refs[ref] = sha
632 remote_refs[ref] = sha
633
633
634 if refs and sha in refs:
634 if refs and sha in refs:
635 # we filter fetch using our specified refs
635 # we filter fetch using our specified refs
636 fetch_refs.append('{}:{}'.format(ref, ref))
636 fetch_refs.append('{}:{}'.format(ref, ref))
637 elif not refs:
637 elif not refs:
638 fetch_refs.append('{}:{}'.format(ref, ref))
638 fetch_refs.append('{}:{}'.format(ref, ref))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
640
641 if fetch_refs:
641 if fetch_refs:
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 fetch_refs_chunks = list(chunk)
643 fetch_refs_chunks = list(chunk)
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 _out, _err = self.run_git_command(
645 _out, _err = self.run_git_command(
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 fail_on_stderr=False,
647 fail_on_stderr=False,
648 _copts=self._remote_conf(config),
648 _copts=self._remote_conf(config),
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650
650
651 return remote_refs
651 return remote_refs
652
652
653 @reraise_safe_exceptions
653 @reraise_safe_exceptions
654 def sync_push(self, wire, url, refs=None):
654 def sync_push(self, wire, url, refs=None):
655 if not self.check_url(url, wire):
655 if not self.check_url(url, wire):
656 return
656 return
657 config = self._wire_to_config(wire)
657 config = self._wire_to_config(wire)
658 self._factory.repo(wire)
658 self._factory.repo(wire)
659 self.run_git_command(
659 self.run_git_command(
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663
663
664 @reraise_safe_exceptions
664 @reraise_safe_exceptions
665 def get_remote_refs(self, wire, url):
665 def get_remote_refs(self, wire, url):
666 repo = Repo(url)
666 repo = Repo(url)
667 return repo.get_refs()
667 return repo.get_refs()
668
668
669 @reraise_safe_exceptions
669 @reraise_safe_exceptions
670 def get_description(self, wire):
670 def get_description(self, wire):
671 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
672 return repo.get_description()
672 return repo.get_description()
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def get_missing_revs(self, wire, rev1, rev2, path2):
675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678
678
679 wire_remote = wire.copy()
679 wire_remote = wire.copy()
680 wire_remote['path'] = path2
680 wire_remote['path'] = path2
681 repo_remote = self._factory.repo(wire_remote)
681 repo_remote = self._factory.repo(wire_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683
683
684 revs = [
684 revs = [
685 x.commit.id
685 x.commit.id
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 return revs
687 return revs
688
688
689 @reraise_safe_exceptions
689 @reraise_safe_exceptions
690 def get_object(self, wire, sha, maybe_unreachable=False):
690 def get_object(self, wire, sha, maybe_unreachable=False):
691 cache_on, context_uid, repo_id = self._cache_on(wire)
691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 def _get_object(_context_uid, _repo_id, _sha):
693 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
694 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
695 with repo_init as repo:
696
696
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 try:
698 try:
699 commit = repo.revparse_single(sha)
699 commit = repo.revparse_single(sha)
700 except KeyError:
700 except KeyError:
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 # here, we instead give something more explicit
702 # here, we instead give something more explicit
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 raise exceptions.LookupException(e)(missing_commit_err)
704 raise exceptions.LookupException(e)(missing_commit_err)
705 except ValueError as e:
705 except ValueError as e:
706 raise exceptions.LookupException(e)(missing_commit_err)
706 raise exceptions.LookupException(e)(missing_commit_err)
707
707
708 is_tag = False
708 is_tag = False
709 if isinstance(commit, pygit2.Tag):
709 if isinstance(commit, pygit2.Tag):
710 commit = repo.get(commit.target)
710 commit = repo.get(commit.target)
711 is_tag = True
711 is_tag = True
712
712
713 check_dangling = True
713 check_dangling = True
714 if is_tag:
714 if is_tag:
715 check_dangling = False
715 check_dangling = False
716
716
717 if check_dangling and maybe_unreachable:
717 if check_dangling and maybe_unreachable:
718 check_dangling = False
718 check_dangling = False
719
719
720 # we used a reference and it parsed means we're not having a dangling commit
720 # we used a reference and it parsed means we're not having a dangling commit
721 if sha != commit.hex:
721 if sha != commit.hex:
722 check_dangling = False
722 check_dangling = False
723
723
724 if check_dangling:
724 if check_dangling:
725 # check for dangling commit
725 # check for dangling commit
726 for branch in repo.branches.with_commit(commit.hex):
726 for branch in repo.branches.with_commit(commit.hex):
727 if branch:
727 if branch:
728 break
728 break
729 else:
729 else:
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 # here, we instead give something more explicit
731 # here, we instead give something more explicit
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 raise exceptions.LookupException(e)(missing_commit_err)
733 raise exceptions.LookupException(e)(missing_commit_err)
734
734
735 commit_id = commit.hex
735 commit_id = commit.hex
736 type_id = commit.type
736 type_id = commit.type_str
737
737
738 return {
738 return {
739 'id': commit_id,
739 'id': commit_id,
740 'type': self._type_id_to_name(type_id),
740 'type': self._type_id_to_name(type_id),
741 'commit_id': commit_id,
741 'commit_id': commit_id,
742 'idx': 0
742 'idx': 0
743 }
743 }
744
744
745 return _get_object(context_uid, repo_id, sha)
745 return _get_object(context_uid, repo_id, sha)
746
746
747 @reraise_safe_exceptions
747 @reraise_safe_exceptions
748 def get_refs(self, wire):
748 def get_refs(self, wire):
749 cache_on, context_uid, repo_id = self._cache_on(wire)
749 cache_on, context_uid, repo_id = self._cache_on(wire)
750 @self.region.conditional_cache_on_arguments(condition=cache_on)
750 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 def _get_refs(_context_uid, _repo_id):
751 def _get_refs(_context_uid, _repo_id):
752
752
753 repo_init = self._factory.repo_libgit2(wire)
753 repo_init = self._factory.repo_libgit2(wire)
754 with repo_init as repo:
754 with repo_init as repo:
755 regex = re.compile('^refs/(heads|tags)/')
755 regex = re.compile('^refs/(heads|tags)/')
756 return {x.name: x.target.hex for x in
756 return {x.name: x.target.hex for x in
757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
757 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
758
758
759 return _get_refs(context_uid, repo_id)
759 return _get_refs(context_uid, repo_id)
760
760
761 @reraise_safe_exceptions
761 @reraise_safe_exceptions
762 def get_branch_pointers(self, wire):
762 def get_branch_pointers(self, wire):
763 cache_on, context_uid, repo_id = self._cache_on(wire)
763 cache_on, context_uid, repo_id = self._cache_on(wire)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 def _get_branch_pointers(_context_uid, _repo_id):
765 def _get_branch_pointers(_context_uid, _repo_id):
766
766
767 repo_init = self._factory.repo_libgit2(wire)
767 repo_init = self._factory.repo_libgit2(wire)
768 regex = re.compile('^refs/heads')
768 regex = re.compile('^refs/heads')
769 with repo_init as repo:
769 with repo_init as repo:
770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
770 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
771 return {x.target.hex: x.shorthand for x in branches}
771 return {x.target.hex: x.shorthand for x in branches}
772
772
773 return _get_branch_pointers(context_uid, repo_id)
773 return _get_branch_pointers(context_uid, repo_id)
774
774
775 @reraise_safe_exceptions
775 @reraise_safe_exceptions
776 def head(self, wire, show_exc=True):
776 def head(self, wire, show_exc=True):
777 cache_on, context_uid, repo_id = self._cache_on(wire)
777 cache_on, context_uid, repo_id = self._cache_on(wire)
778 @self.region.conditional_cache_on_arguments(condition=cache_on)
778 @self.region.conditional_cache_on_arguments(condition=cache_on)
779 def _head(_context_uid, _repo_id, _show_exc):
779 def _head(_context_uid, _repo_id, _show_exc):
780 repo_init = self._factory.repo_libgit2(wire)
780 repo_init = self._factory.repo_libgit2(wire)
781 with repo_init as repo:
781 with repo_init as repo:
782 try:
782 try:
783 return repo.head.peel().hex
783 return repo.head.peel().hex
784 except Exception:
784 except Exception:
785 if show_exc:
785 if show_exc:
786 raise
786 raise
787 return _head(context_uid, repo_id, show_exc)
787 return _head(context_uid, repo_id, show_exc)
788
788
789 @reraise_safe_exceptions
789 @reraise_safe_exceptions
790 def init(self, wire):
790 def init(self, wire):
791 repo_path = str_to_dulwich(wire['path'])
791 repo_path = str_to_dulwich(wire['path'])
792 self.repo = Repo.init(repo_path)
792 self.repo = Repo.init(repo_path)
793
793
794 @reraise_safe_exceptions
794 @reraise_safe_exceptions
795 def init_bare(self, wire):
795 def init_bare(self, wire):
796 repo_path = str_to_dulwich(wire['path'])
796 repo_path = str_to_dulwich(wire['path'])
797 self.repo = Repo.init_bare(repo_path)
797 self.repo = Repo.init_bare(repo_path)
798
798
799 @reraise_safe_exceptions
799 @reraise_safe_exceptions
800 def revision(self, wire, rev):
800 def revision(self, wire, rev):
801
801
802 cache_on, context_uid, repo_id = self._cache_on(wire)
802 cache_on, context_uid, repo_id = self._cache_on(wire)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 def _revision(_context_uid, _repo_id, _rev):
804 def _revision(_context_uid, _repo_id, _rev):
805 repo_init = self._factory.repo_libgit2(wire)
805 repo_init = self._factory.repo_libgit2(wire)
806 with repo_init as repo:
806 with repo_init as repo:
807 commit = repo[rev]
807 commit = repo[rev]
808 obj_data = {
808 obj_data = {
809 'id': commit.id.hex,
809 'id': commit.id.hex,
810 }
810 }
811 # tree objects itself don't have tree_id attribute
811 # tree objects itself don't have tree_id attribute
812 if hasattr(commit, 'tree_id'):
812 if hasattr(commit, 'tree_id'):
813 obj_data['tree'] = commit.tree_id.hex
813 obj_data['tree'] = commit.tree_id.hex
814
814
815 return obj_data
815 return obj_data
816 return _revision(context_uid, repo_id, rev)
816 return _revision(context_uid, repo_id, rev)
817
817
818 @reraise_safe_exceptions
818 @reraise_safe_exceptions
819 def date(self, wire, commit_id):
819 def date(self, wire, commit_id):
820 cache_on, context_uid, repo_id = self._cache_on(wire)
820 cache_on, context_uid, repo_id = self._cache_on(wire)
821 @self.region.conditional_cache_on_arguments(condition=cache_on)
821 @self.region.conditional_cache_on_arguments(condition=cache_on)
822 def _date(_repo_id, _commit_id):
822 def _date(_repo_id, _commit_id):
823 repo_init = self._factory.repo_libgit2(wire)
823 repo_init = self._factory.repo_libgit2(wire)
824 with repo_init as repo:
824 with repo_init as repo:
825 commit = repo[commit_id]
825 commit = repo[commit_id]
826
826
827 if hasattr(commit, 'commit_time'):
827 if hasattr(commit, 'commit_time'):
828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 else:
829 else:
830 commit = commit.get_object()
830 commit = commit.get_object()
831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832
832
833 # TODO(marcink): check dulwich difference of offset vs timezone
833 # TODO(marcink): check dulwich difference of offset vs timezone
834 return [commit_time, commit_time_offset]
834 return [commit_time, commit_time_offset]
835 return _date(repo_id, commit_id)
835 return _date(repo_id, commit_id)
836
836
837 @reraise_safe_exceptions
837 @reraise_safe_exceptions
838 def author(self, wire, commit_id):
838 def author(self, wire, commit_id):
839 cache_on, context_uid, repo_id = self._cache_on(wire)
839 cache_on, context_uid, repo_id = self._cache_on(wire)
840 @self.region.conditional_cache_on_arguments(condition=cache_on)
840 @self.region.conditional_cache_on_arguments(condition=cache_on)
841 def _author(_repo_id, _commit_id):
841 def _author(_repo_id, _commit_id):
842 repo_init = self._factory.repo_libgit2(wire)
842 repo_init = self._factory.repo_libgit2(wire)
843 with repo_init as repo:
843 with repo_init as repo:
844 commit = repo[commit_id]
844 commit = repo[commit_id]
845
845
846 if hasattr(commit, 'author'):
846 if hasattr(commit, 'author'):
847 author = commit.author
847 author = commit.author
848 else:
848 else:
849 author = commit.get_object().author
849 author = commit.get_object().author
850
850
851 if author.email:
851 if author.email:
852 return u"{} <{}>".format(author.name, author.email)
852 return "{} <{}>".format(author.name, author.email)
853
853
854 try:
854 try:
855 return u"{}".format(author.name)
855 return "{}".format(author.name)
856 except Exception:
856 except Exception:
857 return u"{}".format(safe_unicode(author.raw_name))
857 return "{}".format(safe_unicode(author.raw_name))
858
858
859 return _author(repo_id, commit_id)
859 return _author(repo_id, commit_id)
860
860
861 @reraise_safe_exceptions
861 @reraise_safe_exceptions
862 def message(self, wire, commit_id):
862 def message(self, wire, commit_id):
863 cache_on, context_uid, repo_id = self._cache_on(wire)
863 cache_on, context_uid, repo_id = self._cache_on(wire)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 def _message(_repo_id, _commit_id):
865 def _message(_repo_id, _commit_id):
866 repo_init = self._factory.repo_libgit2(wire)
866 repo_init = self._factory.repo_libgit2(wire)
867 with repo_init as repo:
867 with repo_init as repo:
868 commit = repo[commit_id]
868 commit = repo[commit_id]
869 return commit.message
869 return commit.message
870 return _message(repo_id, commit_id)
870 return _message(repo_id, commit_id)
871
871
872 @reraise_safe_exceptions
872 @reraise_safe_exceptions
873 def parents(self, wire, commit_id):
873 def parents(self, wire, commit_id):
874 cache_on, context_uid, repo_id = self._cache_on(wire)
874 cache_on, context_uid, repo_id = self._cache_on(wire)
875 @self.region.conditional_cache_on_arguments(condition=cache_on)
875 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 def _parents(_repo_id, _commit_id):
876 def _parents(_repo_id, _commit_id):
877 repo_init = self._factory.repo_libgit2(wire)
877 repo_init = self._factory.repo_libgit2(wire)
878 with repo_init as repo:
878 with repo_init as repo:
879 commit = repo[commit_id]
879 commit = repo[commit_id]
880 if hasattr(commit, 'parent_ids'):
880 if hasattr(commit, 'parent_ids'):
881 parent_ids = commit.parent_ids
881 parent_ids = commit.parent_ids
882 else:
882 else:
883 parent_ids = commit.get_object().parent_ids
883 parent_ids = commit.get_object().parent_ids
884
884
885 return [x.hex for x in parent_ids]
885 return [x.hex for x in parent_ids]
886 return _parents(repo_id, commit_id)
886 return _parents(repo_id, commit_id)
887
887
888 @reraise_safe_exceptions
888 @reraise_safe_exceptions
889 def children(self, wire, commit_id):
889 def children(self, wire, commit_id):
890 cache_on, context_uid, repo_id = self._cache_on(wire)
890 cache_on, context_uid, repo_id = self._cache_on(wire)
891 @self.region.conditional_cache_on_arguments(condition=cache_on)
891 @self.region.conditional_cache_on_arguments(condition=cache_on)
892 def _children(_repo_id, _commit_id):
892 def _children(_repo_id, _commit_id):
893 output, __ = self.run_git_command(
893 output, __ = self.run_git_command(
894 wire, ['rev-list', '--all', '--children'])
894 wire, ['rev-list', '--all', '--children'])
895
895
896 child_ids = []
896 child_ids = []
897 pat = re.compile(r'^%s' % commit_id)
897 pat = re.compile(r'^%s' % commit_id)
898 for l in output.splitlines():
898 for l in output.splitlines():
899 if pat.match(l):
899 if pat.match(l):
900 found_ids = l.split(' ')[1:]
900 found_ids = l.split(' ')[1:]
901 child_ids.extend(found_ids)
901 child_ids.extend(found_ids)
902
902
903 return child_ids
903 return child_ids
904 return _children(repo_id, commit_id)
904 return _children(repo_id, commit_id)
905
905
906 @reraise_safe_exceptions
906 @reraise_safe_exceptions
907 def set_refs(self, wire, key, value):
907 def set_refs(self, wire, key, value):
908 repo_init = self._factory.repo_libgit2(wire)
908 repo_init = self._factory.repo_libgit2(wire)
909 with repo_init as repo:
909 with repo_init as repo:
910 repo.references.create(key, value, force=True)
910 repo.references.create(key, value, force=True)
911
911
912 @reraise_safe_exceptions
912 @reraise_safe_exceptions
913 def create_branch(self, wire, branch_name, commit_id, force=False):
913 def create_branch(self, wire, branch_name, commit_id, force=False):
914 repo_init = self._factory.repo_libgit2(wire)
914 repo_init = self._factory.repo_libgit2(wire)
915 with repo_init as repo:
915 with repo_init as repo:
916 commit = repo[commit_id]
916 commit = repo[commit_id]
917
917
918 if force:
918 if force:
919 repo.branches.local.create(branch_name, commit, force=force)
919 repo.branches.local.create(branch_name, commit, force=force)
920 elif not repo.branches.get(branch_name):
920 elif not repo.branches.get(branch_name):
921 # create only if that branch isn't existing
921 # create only if that branch isn't existing
922 repo.branches.local.create(branch_name, commit, force=force)
922 repo.branches.local.create(branch_name, commit, force=force)
923
923
924 @reraise_safe_exceptions
924 @reraise_safe_exceptions
925 def remove_ref(self, wire, key):
925 def remove_ref(self, wire, key):
926 repo_init = self._factory.repo_libgit2(wire)
926 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
927 with repo_init as repo:
928 repo.references.delete(key)
928 repo.references.delete(key)
929
929
930 @reraise_safe_exceptions
930 @reraise_safe_exceptions
931 def tag_remove(self, wire, tag_name):
931 def tag_remove(self, wire, tag_name):
932 repo_init = self._factory.repo_libgit2(wire)
932 repo_init = self._factory.repo_libgit2(wire)
933 with repo_init as repo:
933 with repo_init as repo:
934 key = 'refs/tags/{}'.format(tag_name)
934 key = 'refs/tags/{}'.format(tag_name)
935 repo.references.delete(key)
935 repo.references.delete(key)
936
936
937 @reraise_safe_exceptions
937 @reraise_safe_exceptions
938 def tree_changes(self, wire, source_id, target_id):
938 def tree_changes(self, wire, source_id, target_id):
939 # TODO(marcink): remove this seems it's only used by tests
939 # TODO(marcink): remove this seems it's only used by tests
940 repo = self._factory.repo(wire)
940 repo = self._factory.repo(wire)
941 source = repo[source_id].tree if source_id else None
941 source = repo[source_id].tree if source_id else None
942 target = repo[target_id].tree
942 target = repo[target_id].tree
943 result = repo.object_store.tree_changes(source, target)
943 result = repo.object_store.tree_changes(source, target)
944 return list(result)
944 return list(result)
945
945
946 @reraise_safe_exceptions
946 @reraise_safe_exceptions
947 def tree_and_type_for_path(self, wire, commit_id, path):
947 def tree_and_type_for_path(self, wire, commit_id, path):
948
948
949 cache_on, context_uid, repo_id = self._cache_on(wire)
949 cache_on, context_uid, repo_id = self._cache_on(wire)
950 @self.region.conditional_cache_on_arguments(condition=cache_on)
950 @self.region.conditional_cache_on_arguments(condition=cache_on)
951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 repo_init = self._factory.repo_libgit2(wire)
952 repo_init = self._factory.repo_libgit2(wire)
953
953
954 with repo_init as repo:
954 with repo_init as repo:
955 commit = repo[commit_id]
955 commit = repo[commit_id]
956 try:
956 try:
957 tree = commit.tree[path]
957 tree = commit.tree[path]
958 except KeyError:
958 except KeyError:
959 return None, None, None
959 return None, None, None
960
960
961 return tree.id.hex, tree.type, tree.filemode
961 return tree.id.hex, tree.type_str, tree.filemode
962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963
963
964 @reraise_safe_exceptions
964 @reraise_safe_exceptions
965 def tree_items(self, wire, tree_id):
965 def tree_items(self, wire, tree_id):
966 cache_on, context_uid, repo_id = self._cache_on(wire)
966 cache_on, context_uid, repo_id = self._cache_on(wire)
967 @self.region.conditional_cache_on_arguments(condition=cache_on)
967 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 def _tree_items(_repo_id, _tree_id):
968 def _tree_items(_repo_id, _tree_id):
969
969
970 repo_init = self._factory.repo_libgit2(wire)
970 repo_init = self._factory.repo_libgit2(wire)
971 with repo_init as repo:
971 with repo_init as repo:
972 try:
972 try:
973 tree = repo[tree_id]
973 tree = repo[tree_id]
974 except KeyError:
974 except KeyError:
975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976
976
977 result = []
977 result = []
978 for item in tree:
978 for item in tree:
979 item_sha = item.hex
979 item_sha = item.hex
980 item_mode = item.filemode
980 item_mode = item.filemode
981 item_type = item.type
981 item_type = item.type_str
982
982
983 if item_type == 'commit':
983 if item_type == 'commit':
984 # NOTE(marcink): submodules we translate to 'link' for backward compat
984 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 item_type = 'link'
985 item_type = 'link'
986
986
987 result.append((item.name, item_mode, item_sha, item_type))
987 result.append((item.name, item_mode, item_sha, item_type))
988 return result
988 return result
989 return _tree_items(repo_id, tree_id)
989 return _tree_items(repo_id, tree_id)
990
990
991 @reraise_safe_exceptions
991 @reraise_safe_exceptions
992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 """
993 """
994 Old version that uses subprocess to call diff
994 Old version that uses subprocess to call diff
995 """
995 """
996
996
997 flags = [
997 flags = [
998 '-U%s' % context, '--patch',
998 '-U%s' % context, '--patch',
999 '--binary',
999 '--binary',
1000 '--find-renames',
1000 '--find-renames',
1001 '--no-indent-heuristic',
1001 '--no-indent-heuristic',
1002 # '--indent-heuristic',
1002 # '--indent-heuristic',
1003 #'--full-index',
1003 #'--full-index',
1004 #'--abbrev=40'
1004 #'--abbrev=40'
1005 ]
1005 ]
1006
1006
1007 if opt_ignorews:
1007 if opt_ignorews:
1008 flags.append('--ignore-all-space')
1008 flags.append('--ignore-all-space')
1009
1009
1010 if commit_id_1 == self.EMPTY_COMMIT:
1010 if commit_id_1 == self.EMPTY_COMMIT:
1011 cmd = ['show'] + flags + [commit_id_2]
1011 cmd = ['show'] + flags + [commit_id_2]
1012 else:
1012 else:
1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014
1014
1015 if file_filter:
1015 if file_filter:
1016 cmd.extend(['--', file_filter])
1016 cmd.extend(['--', file_filter])
1017
1017
1018 diff, __ = self.run_git_command(wire, cmd)
1018 diff, __ = self.run_git_command(wire, cmd)
1019 # If we used 'show' command, strip first few lines (until actual diff
1019 # If we used 'show' command, strip first few lines (until actual diff
1020 # starts)
1020 # starts)
1021 if commit_id_1 == self.EMPTY_COMMIT:
1021 if commit_id_1 == self.EMPTY_COMMIT:
1022 lines = diff.splitlines()
1022 lines = diff.splitlines()
1023 x = 0
1023 x = 0
1024 for line in lines:
1024 for line in lines:
1025 if line.startswith('diff'):
1025 if line.startswith('diff'):
1026 break
1026 break
1027 x += 1
1027 x += 1
1028 # Append new line just like 'diff' command do
1028 # Append new line just like 'diff' command do
1029 diff = '\n'.join(lines[x:]) + '\n'
1029 diff = '\n'.join(lines[x:]) + '\n'
1030 return diff
1030 return diff
1031
1031
1032 @reraise_safe_exceptions
1032 @reraise_safe_exceptions
1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 repo_init = self._factory.repo_libgit2(wire)
1034 repo_init = self._factory.repo_libgit2(wire)
1035 with repo_init as repo:
1035 with repo_init as repo:
1036 swap = True
1036 swap = True
1037 flags = 0
1037 flags = 0
1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039
1039
1040 if opt_ignorews:
1040 if opt_ignorews:
1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042
1042
1043 if commit_id_1 == self.EMPTY_COMMIT:
1043 if commit_id_1 == self.EMPTY_COMMIT:
1044 comm1 = repo[commit_id_2]
1044 comm1 = repo[commit_id_2]
1045 diff_obj = comm1.tree.diff_to_tree(
1045 diff_obj = comm1.tree.diff_to_tree(
1046 flags=flags, context_lines=context, swap=swap)
1046 flags=flags, context_lines=context, swap=swap)
1047
1047
1048 else:
1048 else:
1049 comm1 = repo[commit_id_2]
1049 comm1 = repo[commit_id_2]
1050 comm2 = repo[commit_id_1]
1050 comm2 = repo[commit_id_1]
1051 diff_obj = comm1.tree.diff_to_tree(
1051 diff_obj = comm1.tree.diff_to_tree(
1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 similar_flags = 0
1053 similar_flags = 0
1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 diff_obj.find_similar(flags=similar_flags)
1055 diff_obj.find_similar(flags=similar_flags)
1056
1056
1057 if file_filter:
1057 if file_filter:
1058 for p in diff_obj:
1058 for p in diff_obj:
1059 if p.delta.old_file.path == file_filter:
1059 if p.delta.old_file.path == file_filter:
1060 return p.patch or ''
1060 return p.patch or ''
1061 # fo matching path == no diff
1061 # fo matching path == no diff
1062 return ''
1062 return ''
1063 return diff_obj.patch or ''
1063 return diff_obj.patch or ''
1064
1064
1065 @reraise_safe_exceptions
1065 @reraise_safe_exceptions
1066 def node_history(self, wire, commit_id, path, limit):
1066 def node_history(self, wire, commit_id, path, limit):
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 # optimize for n==1, rev-list is much faster for that use-case
1070 # optimize for n==1, rev-list is much faster for that use-case
1071 if limit == 1:
1071 if limit == 1:
1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 else:
1073 else:
1074 cmd = ['log']
1074 cmd = ['log']
1075 if limit:
1075 if limit:
1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078
1078
1079 output, __ = self.run_git_command(wire, cmd)
1079 output, __ = self.run_git_command(wire, cmd)
1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081
1081
1082 return [x for x in commit_ids]
1082 return [x for x in commit_ids]
1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084
1084
1085 @reraise_safe_exceptions
1085 @reraise_safe_exceptions
1086 def node_annotate(self, wire, commit_id, path):
1086 def node_annotate(self, wire, commit_id, path):
1087
1087
1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 # -l ==> outputs long shas (and we need all 40 characters)
1089 # -l ==> outputs long shas (and we need all 40 characters)
1090 # --root ==> doesn't put '^' character for boundaries
1090 # --root ==> doesn't put '^' character for boundaries
1091 # -r commit_id ==> blames for the given commit
1091 # -r commit_id ==> blames for the given commit
1092 output, __ = self.run_git_command(wire, cmd)
1092 output, __ = self.run_git_command(wire, cmd)
1093
1093
1094 result = []
1094 result = []
1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 line_no = i + 1
1096 line_no = i + 1
1097 commit_id, line = re.split(r' ', blame_line, 1)
1097 commit_id, line = re.split(r' ', blame_line, 1)
1098 result.append((line_no, commit_id, line))
1098 result.append((line_no, commit_id, line))
1099 return result
1099 return result
1100
1100
1101 @reraise_safe_exceptions
1101 @reraise_safe_exceptions
1102 def update_server_info(self, wire):
1102 def update_server_info(self, wire):
1103 repo = self._factory.repo(wire)
1103 repo = self._factory.repo(wire)
1104 update_server_info(repo)
1104 update_server_info(repo)
1105
1105
1106 @reraise_safe_exceptions
1106 @reraise_safe_exceptions
1107 def get_all_commit_ids(self, wire):
1107 def get_all_commit_ids(self, wire):
1108
1108
1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1111 def _get_all_commit_ids(_context_uid, _repo_id):
1111 def _get_all_commit_ids(_context_uid, _repo_id):
1112
1112
1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 try:
1114 try:
1115 output, __ = self.run_git_command(wire, cmd)
1115 output, __ = self.run_git_command(wire, cmd)
1116 return output.splitlines()
1116 return output.splitlines()
1117 except Exception:
1117 except Exception:
1118 # Can be raised for empty repositories
1118 # Can be raised for empty repositories
1119 return []
1119 return []
1120 return _get_all_commit_ids(context_uid, repo_id)
1120 return _get_all_commit_ids(context_uid, repo_id)
1121
1121
1122 @reraise_safe_exceptions
1122 @reraise_safe_exceptions
1123 def run_git_command(self, wire, cmd, **opts):
1123 def run_git_command(self, wire, cmd, **opts):
1124 path = wire.get('path', None)
1124 path = wire.get('path', None)
1125
1125
1126 if path and os.path.isdir(path):
1126 if path and os.path.isdir(path):
1127 opts['cwd'] = path
1127 opts['cwd'] = path
1128
1128
1129 if '_bare' in opts:
1129 if '_bare' in opts:
1130 _copts = []
1130 _copts = []
1131 del opts['_bare']
1131 del opts['_bare']
1132 else:
1132 else:
1133 _copts = ['-c', 'core.quotepath=false', ]
1133 _copts = ['-c', 'core.quotepath=false', ]
1134 safe_call = False
1134 safe_call = False
1135 if '_safe' in opts:
1135 if '_safe' in opts:
1136 # no exc on failure
1136 # no exc on failure
1137 del opts['_safe']
1137 del opts['_safe']
1138 safe_call = True
1138 safe_call = True
1139
1139
1140 if '_copts' in opts:
1140 if '_copts' in opts:
1141 _copts.extend(opts['_copts'] or [])
1141 _copts.extend(opts['_copts'] or [])
1142 del opts['_copts']
1142 del opts['_copts']
1143
1143
1144 gitenv = os.environ.copy()
1144 gitenv = os.environ.copy()
1145 gitenv.update(opts.pop('extra_env', {}))
1145 gitenv.update(opts.pop('extra_env', {}))
1146 # need to clean fix GIT_DIR !
1146 # need to clean fix GIT_DIR !
1147 if 'GIT_DIR' in gitenv:
1147 if 'GIT_DIR' in gitenv:
1148 del gitenv['GIT_DIR']
1148 del gitenv['GIT_DIR']
1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151
1151
1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 _opts = {'env': gitenv, 'shell': False}
1153 _opts = {'env': gitenv, 'shell': False}
1154
1154
1155 proc = None
1155 proc = None
1156 try:
1156 try:
1157 _opts.update(opts)
1157 _opts.update(opts)
1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159
1159
1160 return ''.join(proc), ''.join(proc.error)
1160 return ''.join(proc), ''.join(proc.error)
1161 except (EnvironmentError, OSError) as err:
1161 except (EnvironmentError, OSError) as err:
1162 cmd = ' '.join(cmd) # human friendly CMD
1162 cmd = ' '.join(cmd) # human friendly CMD
1163 tb_err = ("Couldn't run git command (%s).\n"
1163 tb_err = ("Couldn't run git command (%s).\n"
1164 "Original error was:%s\n"
1164 "Original error was:%s\n"
1165 "Call options:%s\n"
1165 "Call options:%s\n"
1166 % (cmd, err, _opts))
1166 % (cmd, err, _opts))
1167 log.exception(tb_err)
1167 log.exception(tb_err)
1168 if safe_call:
1168 if safe_call:
1169 return '', err
1169 return '', err
1170 else:
1170 else:
1171 raise exceptions.VcsException()(tb_err)
1171 raise exceptions.VcsException()(tb_err)
1172 finally:
1172 finally:
1173 if proc:
1173 if proc:
1174 proc.close()
1174 proc.close()
1175
1175
1176 @reraise_safe_exceptions
1176 @reraise_safe_exceptions
1177 def install_hooks(self, wire, force=False):
1177 def install_hooks(self, wire, force=False):
1178 from vcsserver.hook_utils import install_git_hooks
1178 from vcsserver.hook_utils import install_git_hooks
1179 bare = self.bare(wire)
1179 bare = self.bare(wire)
1180 path = wire['path']
1180 path = wire['path']
1181 return install_git_hooks(path, bare, force_create=force)
1181 return install_git_hooks(path, bare, force_create=force)
1182
1182
1183 @reraise_safe_exceptions
1183 @reraise_safe_exceptions
1184 def get_hooks_info(self, wire):
1184 def get_hooks_info(self, wire):
1185 from vcsserver.hook_utils import (
1185 from vcsserver.hook_utils import (
1186 get_git_pre_hook_version, get_git_post_hook_version)
1186 get_git_pre_hook_version, get_git_post_hook_version)
1187 bare = self.bare(wire)
1187 bare = self.bare(wire)
1188 path = wire['path']
1188 path = wire['path']
1189 return {
1189 return {
1190 'pre_version': get_git_pre_hook_version(path, bare),
1190 'pre_version': get_git_pre_hook_version(path, bare),
1191 'post_version': get_git_post_hook_version(path, bare),
1191 'post_version': get_git_post_hook_version(path, bare),
1192 }
1192 }
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from app import create_app
19 from .app import create_app
@@ -1,1009 +1,1009 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib.request, urllib.parse, urllib.error
22 import urllib2
22 import urllib.request, urllib.error, urllib.parse
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30 from mercurial import repair
31
31
32 import vcsserver
32 import vcsserver
33 from vcsserver import exceptions
33 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError)
40 RepoLookupError, InterventionRequired, RequirementError)
41 from vcsserver.vcs_base import RemoteBase
41 from vcsserver.vcs_base import RemoteBase
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def make_ui_from_config(repo_config):
46 def make_ui_from_config(repo_config):
47
47
48 class LoggingUI(ui.ui):
48 class LoggingUI(ui.ui):
49 def status(self, *msg, **opts):
49 def status(self, *msg, **opts):
50 log.info(' '.join(msg).rstrip('\n'))
50 log.info(' '.join(msg).rstrip('\n'))
51 super(LoggingUI, self).status(*msg, **opts)
51 super(LoggingUI, self).status(*msg, **opts)
52
52
53 def warn(self, *msg, **opts):
53 def warn(self, *msg, **opts):
54 log.warn(' '.join(msg).rstrip('\n'))
54 log.warn(' '.join(msg).rstrip('\n'))
55 super(LoggingUI, self).warn(*msg, **opts)
55 super(LoggingUI, self).warn(*msg, **opts)
56
56
57 def error(self, *msg, **opts):
57 def error(self, *msg, **opts):
58 log.error(' '.join(msg).rstrip('\n'))
58 log.error(' '.join(msg).rstrip('\n'))
59 super(LoggingUI, self).error(*msg, **opts)
59 super(LoggingUI, self).error(*msg, **opts)
60
60
61 def note(self, *msg, **opts):
61 def note(self, *msg, **opts):
62 log.info(' '.join(msg).rstrip('\n'))
62 log.info(' '.join(msg).rstrip('\n'))
63 super(LoggingUI, self).note(*msg, **opts)
63 super(LoggingUI, self).note(*msg, **opts)
64
64
65 def debug(self, *msg, **opts):
65 def debug(self, *msg, **opts):
66 log.debug(' '.join(msg).rstrip('\n'))
66 log.debug(' '.join(msg).rstrip('\n'))
67 super(LoggingUI, self).debug(*msg, **opts)
67 super(LoggingUI, self).debug(*msg, **opts)
68
68
69 baseui = LoggingUI()
69 baseui = LoggingUI()
70
70
71 # clean the baseui object
71 # clean the baseui object
72 baseui._ocfg = hgconfig.config()
72 baseui._ocfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
75
75
76 for section, option, value in repo_config:
76 for section, option, value in repo_config:
77 baseui.setconfig(section, option, value)
77 baseui.setconfig(section, option, value)
78
78
79 # make our hgweb quiet so it doesn't print output
79 # make our hgweb quiet so it doesn't print output
80 baseui.setconfig('ui', 'quiet', 'true')
80 baseui.setconfig('ui', 'quiet', 'true')
81
81
82 baseui.setconfig('ui', 'paginate', 'never')
82 baseui.setconfig('ui', 'paginate', 'never')
83 # for better Error reporting of Mercurial
83 # for better Error reporting of Mercurial
84 baseui.setconfig('ui', 'message-output', 'stderr')
84 baseui.setconfig('ui', 'message-output', 'stderr')
85
85
86 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 # signal in a non-main thread, thus generating a ValueError.
87 # signal in a non-main thread, thus generating a ValueError.
88 baseui.setconfig('worker', 'numcpus', 1)
88 baseui.setconfig('worker', 'numcpus', 1)
89
89
90 # If there is no config for the largefiles extension, we explicitly disable
90 # If there is no config for the largefiles extension, we explicitly disable
91 # it here. This overrides settings from repositories hgrc file. Recent
91 # it here. This overrides settings from repositories hgrc file. Recent
92 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 # repo.
93 # repo.
94 if not baseui.hasconfig('extensions', 'largefiles'):
94 if not baseui.hasconfig('extensions', 'largefiles'):
95 log.debug('Explicitly disable largefiles extension for repo.')
95 log.debug('Explicitly disable largefiles extension for repo.')
96 baseui.setconfig('extensions', 'largefiles', '!')
96 baseui.setconfig('extensions', 'largefiles', '!')
97
97
98 return baseui
98 return baseui
99
99
100
100
101 def reraise_safe_exceptions(func):
101 def reraise_safe_exceptions(func):
102 """Decorator for converting mercurial exceptions to something neutral."""
102 """Decorator for converting mercurial exceptions to something neutral."""
103
103
104 def wrapper(*args, **kwargs):
104 def wrapper(*args, **kwargs):
105 try:
105 try:
106 return func(*args, **kwargs)
106 return func(*args, **kwargs)
107 except (Abort, InterventionRequired) as e:
107 except (Abort, InterventionRequired) as e:
108 raise_from_original(exceptions.AbortException(e))
108 raise_from_original(exceptions.AbortException(e))
109 except RepoLookupError as e:
109 except RepoLookupError as e:
110 raise_from_original(exceptions.LookupException(e))
110 raise_from_original(exceptions.LookupException(e))
111 except RequirementError as e:
111 except RequirementError as e:
112 raise_from_original(exceptions.RequirementException(e))
112 raise_from_original(exceptions.RequirementException(e))
113 except RepoError as e:
113 except RepoError as e:
114 raise_from_original(exceptions.VcsException(e))
114 raise_from_original(exceptions.VcsException(e))
115 except LookupError as e:
115 except LookupError as e:
116 raise_from_original(exceptions.LookupException(e))
116 raise_from_original(exceptions.LookupException(e))
117 except Exception as e:
117 except Exception as e:
118 if not hasattr(e, '_vcs_kind'):
118 if not hasattr(e, '_vcs_kind'):
119 log.exception("Unhandled exception in hg remote call")
119 log.exception("Unhandled exception in hg remote call")
120 raise_from_original(exceptions.UnhandledException(e))
120 raise_from_original(exceptions.UnhandledException(e))
121
121
122 raise
122 raise
123 return wrapper
123 return wrapper
124
124
125
125
126 class MercurialFactory(RepoFactory):
126 class MercurialFactory(RepoFactory):
127 repo_type = 'hg'
127 repo_type = 'hg'
128
128
129 def _create_config(self, config, hooks=True):
129 def _create_config(self, config, hooks=True):
130 if not hooks:
130 if not hooks:
131 hooks_to_clean = frozenset((
131 hooks_to_clean = frozenset((
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 new_config = []
134 new_config = []
135 for section, option, value in config:
135 for section, option, value in config:
136 if section == 'hooks' and option in hooks_to_clean:
136 if section == 'hooks' and option in hooks_to_clean:
137 continue
137 continue
138 new_config.append((section, option, value))
138 new_config.append((section, option, value))
139 config = new_config
139 config = new_config
140
140
141 baseui = make_ui_from_config(config)
141 baseui = make_ui_from_config(config)
142 return baseui
142 return baseui
143
143
144 def _create_repo(self, wire, create):
144 def _create_repo(self, wire, create):
145 baseui = self._create_config(wire["config"])
145 baseui = self._create_config(wire["config"])
146 return instance(baseui, wire["path"], create)
146 return instance(baseui, wire["path"], create)
147
147
148 def repo(self, wire, create=False):
148 def repo(self, wire, create=False):
149 """
149 """
150 Get a repository instance for the given path.
150 Get a repository instance for the given path.
151 """
151 """
152 return self._create_repo(wire, create)
152 return self._create_repo(wire, create)
153
153
154
154
155 def patch_ui_message_output(baseui):
155 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
156 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
157 output = io.BytesIO()
158
158
159 def write(data, **unused_kwargs):
159 def write(data, **unused_kwargs):
160 output.write(data)
160 output.write(data)
161
161
162 baseui.status = write
162 baseui.status = write
163 baseui.write = write
163 baseui.write = write
164 baseui.warn = write
164 baseui.warn = write
165 baseui.debug = write
165 baseui.debug = write
166
166
167 return baseui, output
167 return baseui, output
168
168
169
169
170 class HgRemote(RemoteBase):
170 class HgRemote(RemoteBase):
171
171
172 def __init__(self, factory):
172 def __init__(self, factory):
173 self._factory = factory
173 self._factory = factory
174 self._bulk_methods = {
174 self._bulk_methods = {
175 "affected_files": self.ctx_files,
175 "affected_files": self.ctx_files,
176 "author": self.ctx_user,
176 "author": self.ctx_user,
177 "branch": self.ctx_branch,
177 "branch": self.ctx_branch,
178 "children": self.ctx_children,
178 "children": self.ctx_children,
179 "date": self.ctx_date,
179 "date": self.ctx_date,
180 "message": self.ctx_description,
180 "message": self.ctx_description,
181 "parents": self.ctx_parents,
181 "parents": self.ctx_parents,
182 "status": self.ctx_status,
182 "status": self.ctx_status,
183 "obsolete": self.ctx_obsolete,
183 "obsolete": self.ctx_obsolete,
184 "phase": self.ctx_phase,
184 "phase": self.ctx_phase,
185 "hidden": self.ctx_hidden,
185 "hidden": self.ctx_hidden,
186 "_file_paths": self.ctx_list,
186 "_file_paths": self.ctx_list,
187 }
187 }
188
188
189 def _get_ctx(self, repo, ref):
189 def _get_ctx(self, repo, ref):
190 return get_ctx(repo, ref)
190 return get_ctx(repo, ref)
191
191
192 @reraise_safe_exceptions
192 @reraise_safe_exceptions
193 def discover_hg_version(self):
193 def discover_hg_version(self):
194 from mercurial import util
194 from mercurial import util
195 return util.version()
195 return util.version()
196
196
197 @reraise_safe_exceptions
197 @reraise_safe_exceptions
198 def is_empty(self, wire):
198 def is_empty(self, wire):
199 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
200
200
201 try:
201 try:
202 return len(repo) == 0
202 return len(repo) == 0
203 except Exception:
203 except Exception:
204 log.exception("failed to read object_store")
204 log.exception("failed to read object_store")
205 return False
205 return False
206
206
207 @reraise_safe_exceptions
207 @reraise_safe_exceptions
208 def archive_repo(self, archive_path, mtime, file_info, kind):
208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 if kind == "tgz":
209 if kind == "tgz":
210 archiver = archival.tarit(archive_path, mtime, "gz")
210 archiver = archival.tarit(archive_path, mtime, "gz")
211 elif kind == "tbz2":
211 elif kind == "tbz2":
212 archiver = archival.tarit(archive_path, mtime, "bz2")
212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 elif kind == 'zip':
213 elif kind == 'zip':
214 archiver = archival.zipit(archive_path, mtime)
214 archiver = archival.zipit(archive_path, mtime)
215 else:
215 else:
216 raise exceptions.ArchiveException()(
216 raise exceptions.ArchiveException()(
217 'Remote does not support: "%s".' % kind)
217 'Remote does not support: "%s".' % kind)
218
218
219 for f_path, f_mode, f_is_link, f_content in file_info:
219 for f_path, f_mode, f_is_link, f_content in file_info:
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 archiver.done()
221 archiver.done()
222
222
223 @reraise_safe_exceptions
223 @reraise_safe_exceptions
224 def bookmarks(self, wire):
224 def bookmarks(self, wire):
225 cache_on, context_uid, repo_id = self._cache_on(wire)
225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 def _bookmarks(_context_uid, _repo_id):
227 def _bookmarks(_context_uid, _repo_id):
228 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
229 return dict(repo._bookmarks)
229 return dict(repo._bookmarks)
230
230
231 return _bookmarks(context_uid, repo_id)
231 return _bookmarks(context_uid, repo_id)
232
232
233 @reraise_safe_exceptions
233 @reraise_safe_exceptions
234 def branches(self, wire, normal, closed):
234 def branches(self, wire, normal, closed):
235 cache_on, context_uid, repo_id = self._cache_on(wire)
235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _branches(_context_uid, _repo_id, _normal, _closed):
237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 repo = self._factory.repo(wire)
238 repo = self._factory.repo(wire)
239 iter_branches = repo.branchmap().iterbranches()
239 iter_branches = repo.branchmap().iterbranches()
240 bt = {}
240 bt = {}
241 for branch_name, _heads, tip, is_closed in iter_branches:
241 for branch_name, _heads, tip, is_closed in iter_branches:
242 if normal and not is_closed:
242 if normal and not is_closed:
243 bt[branch_name] = tip
243 bt[branch_name] = tip
244 if closed and is_closed:
244 if closed and is_closed:
245 bt[branch_name] = tip
245 bt[branch_name] = tip
246
246
247 return bt
247 return bt
248
248
249 return _branches(context_uid, repo_id, normal, closed)
249 return _branches(context_uid, repo_id, normal, closed)
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def bulk_request(self, wire, commit_id, pre_load):
252 def bulk_request(self, wire, commit_id, pre_load):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 result = {}
256 result = {}
257 for attr in pre_load:
257 for attr in pre_load:
258 try:
258 try:
259 method = self._bulk_methods[attr]
259 method = self._bulk_methods[attr]
260 result[attr] = method(wire, commit_id)
260 result[attr] = method(wire, commit_id)
261 except KeyError as e:
261 except KeyError as e:
262 raise exceptions.VcsException(e)(
262 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
263 'Unknown bulk attribute: "%s"' % attr)
264 return result
264 return result
265
265
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
269 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
272 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
274 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
275 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
276 return _ctx_branch(repo_id, commit_id)
277
277
278 @reraise_safe_exceptions
278 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
279 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_date(_repo_id, _commit_id):
282 def _ctx_date(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.date()
285 return ctx.date()
286 return _ctx_date(repo_id, commit_id)
286 return _ctx_date(repo_id, commit_id)
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def ctx_description(self, wire, revision):
289 def ctx_description(self, wire, revision):
290 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
291 ctx = self._get_ctx(repo, revision)
291 ctx = self._get_ctx(repo, revision)
292 return ctx.description()
292 return ctx.description()
293
293
294 @reraise_safe_exceptions
294 @reraise_safe_exceptions
295 def ctx_files(self, wire, commit_id):
295 def ctx_files(self, wire, commit_id):
296 cache_on, context_uid, repo_id = self._cache_on(wire)
296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 def _ctx_files(_repo_id, _commit_id):
298 def _ctx_files(_repo_id, _commit_id):
299 repo = self._factory.repo(wire)
299 repo = self._factory.repo(wire)
300 ctx = self._get_ctx(repo, commit_id)
300 ctx = self._get_ctx(repo, commit_id)
301 return ctx.files()
301 return ctx.files()
302
302
303 return _ctx_files(repo_id, commit_id)
303 return _ctx_files(repo_id, commit_id)
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_list(self, path, revision):
306 def ctx_list(self, path, revision):
307 repo = self._factory.repo(path)
307 repo = self._factory.repo(path)
308 ctx = self._get_ctx(repo, revision)
308 ctx = self._get_ctx(repo, revision)
309 return list(ctx)
309 return list(ctx)
310
310
311 @reraise_safe_exceptions
311 @reraise_safe_exceptions
312 def ctx_parents(self, wire, commit_id):
312 def ctx_parents(self, wire, commit_id):
313 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 def _ctx_parents(_repo_id, _commit_id):
315 def _ctx_parents(_repo_id, _commit_id):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 ctx = self._get_ctx(repo, commit_id)
317 ctx = self._get_ctx(repo, commit_id)
318 return [parent.hex() for parent in ctx.parents()
318 return [parent.hex() for parent in ctx.parents()
319 if not (parent.hidden() or parent.obsolete())]
319 if not (parent.hidden() or parent.obsolete())]
320
320
321 return _ctx_parents(repo_id, commit_id)
321 return _ctx_parents(repo_id, commit_id)
322
322
323 @reraise_safe_exceptions
323 @reraise_safe_exceptions
324 def ctx_children(self, wire, commit_id):
324 def ctx_children(self, wire, commit_id):
325 cache_on, context_uid, repo_id = self._cache_on(wire)
325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _ctx_children(_repo_id, _commit_id):
327 def _ctx_children(_repo_id, _commit_id):
328 repo = self._factory.repo(wire)
328 repo = self._factory.repo(wire)
329 ctx = self._get_ctx(repo, commit_id)
329 ctx = self._get_ctx(repo, commit_id)
330 return [child.hex() for child in ctx.children()
330 return [child.hex() for child in ctx.children()
331 if not (child.hidden() or child.obsolete())]
331 if not (child.hidden() or child.obsolete())]
332
332
333 return _ctx_children(repo_id, commit_id)
333 return _ctx_children(repo_id, commit_id)
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def ctx_phase(self, wire, commit_id):
336 def ctx_phase(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
341 ctx = self._get_ctx(repo, commit_id)
341 ctx = self._get_ctx(repo, commit_id)
342 # public=0, draft=1, secret=3
342 # public=0, draft=1, secret=3
343 return ctx.phase()
343 return ctx.phase()
344 return _ctx_phase(context_uid, repo_id, commit_id)
344 return _ctx_phase(context_uid, repo_id, commit_id)
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def ctx_obsolete(self, wire, commit_id):
347 def ctx_obsolete(self, wire, commit_id):
348 cache_on, context_uid, repo_id = self._cache_on(wire)
348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
353 return ctx.obsolete()
353 return ctx.obsolete()
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_hidden(self, wire, commit_id):
357 def ctx_hidden(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
363 return ctx.hidden()
363 return ctx.hidden()
364 return _ctx_hidden(context_uid, repo_id, commit_id)
364 return _ctx_hidden(context_uid, repo_id, commit_id)
365
365
366 @reraise_safe_exceptions
366 @reraise_safe_exceptions
367 def ctx_substate(self, wire, revision):
367 def ctx_substate(self, wire, revision):
368 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
370 return ctx.substate
370 return ctx.substate
371
371
372 @reraise_safe_exceptions
372 @reraise_safe_exceptions
373 def ctx_status(self, wire, revision):
373 def ctx_status(self, wire, revision):
374 repo = self._factory.repo(wire)
374 repo = self._factory.repo(wire)
375 ctx = self._get_ctx(repo, revision)
375 ctx = self._get_ctx(repo, revision)
376 status = repo[ctx.p1().node()].status(other=ctx.node())
376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 # object of status (odd, custom named tuple in mercurial) is not
377 # object of status (odd, custom named tuple in mercurial) is not
378 # correctly serializable, we make it a list, as the underling
378 # correctly serializable, we make it a list, as the underling
379 # API expects this to be a list
379 # API expects this to be a list
380 return list(status)
380 return list(status)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_user(self, wire, revision):
383 def ctx_user(self, wire, revision):
384 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
385 ctx = self._get_ctx(repo, revision)
386 return ctx.user()
386 return ctx.user()
387
387
388 @reraise_safe_exceptions
388 @reraise_safe_exceptions
389 def check_url(self, url, config):
389 def check_url(self, url, config):
390 _proto = None
390 _proto = None
391 if '+' in url[:url.find('://')]:
391 if '+' in url[:url.find('://')]:
392 _proto = url[0:url.find('+')]
392 _proto = url[0:url.find('+')]
393 url = url[url.find('+') + 1:]
393 url = url[url.find('+') + 1:]
394 handlers = []
394 handlers = []
395 url_obj = url_parser(url)
395 url_obj = url_parser(url)
396 test_uri, authinfo = url_obj.authinfo()
396 test_uri, authinfo = url_obj.authinfo()
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 url_obj.query = obfuscate_qs(url_obj.query)
398 url_obj.query = obfuscate_qs(url_obj.query)
399
399
400 cleaned_uri = str(url_obj)
400 cleaned_uri = str(url_obj)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402
402
403 if authinfo:
403 if authinfo:
404 # create a password manager
404 # create a password manager
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
405 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
406 passmgr.add_password(*authinfo)
406 passmgr.add_password(*authinfo)
407
407
408 handlers.extend((httpbasicauthhandler(passmgr),
408 handlers.extend((httpbasicauthhandler(passmgr),
409 httpdigestauthhandler(passmgr)))
409 httpdigestauthhandler(passmgr)))
410
410
411 o = urllib2.build_opener(*handlers)
411 o = urllib.request.build_opener(*handlers)
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 ('Accept', 'application/mercurial-0.1')]
413 ('Accept', 'application/mercurial-0.1')]
414
414
415 q = {"cmd": 'between'}
415 q = {"cmd": 'between'}
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 qs = '?%s' % urllib.urlencode(q)
417 qs = '?%s' % urllib.parse.urlencode(q)
418 cu = "%s%s" % (test_uri, qs)
418 cu = "%s%s" % (test_uri, qs)
419 req = urllib2.Request(cu, None, {})
419 req = urllib.request.Request(cu, None, {})
420
420
421 try:
421 try:
422 log.debug("Trying to open URL %s", cleaned_uri)
422 log.debug("Trying to open URL %s", cleaned_uri)
423 resp = o.open(req)
423 resp = o.open(req)
424 if resp.code != 200:
424 if resp.code != 200:
425 raise exceptions.URLError()('Return Code is not 200')
425 raise exceptions.URLError()('Return Code is not 200')
426 except Exception as e:
426 except Exception as e:
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 # means it cannot be cloned
428 # means it cannot be cloned
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430
430
431 # now check if it's a proper hg repo, but don't do it for svn
431 # now check if it's a proper hg repo, but don't do it for svn
432 try:
432 try:
433 if _proto == 'svn':
433 if _proto == 'svn':
434 pass
434 pass
435 else:
435 else:
436 # check for pure hg repos
436 # check for pure hg repos
437 log.debug(
437 log.debug(
438 "Verifying if URL is a Mercurial repository: %s",
438 "Verifying if URL is a Mercurial repository: %s",
439 cleaned_uri)
439 cleaned_uri)
440 ui = make_ui_from_config(config)
440 ui = make_ui_from_config(config)
441 peer_checker = makepeer(ui, url)
441 peer_checker = makepeer(ui, url)
442 peer_checker.lookup('tip')
442 peer_checker.lookup('tip')
443 except Exception as e:
443 except Exception as e:
444 log.warning("URL is not a valid Mercurial repository: %s",
444 log.warning("URL is not a valid Mercurial repository: %s",
445 cleaned_uri)
445 cleaned_uri)
446 raise exceptions.URLError(e)(
446 raise exceptions.URLError(e)(
447 "url [%s] does not look like an hg repo org_exc: %s"
447 "url [%s] does not look like an hg repo org_exc: %s"
448 % (cleaned_uri, e))
448 % (cleaned_uri, e))
449
449
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 return True
451 return True
452
452
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
456
456
457 if file_filter:
457 if file_filter:
458 match_filter = match(file_filter[0], '', [file_filter[1]])
458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 else:
459 else:
460 match_filter = file_filter
460 match_filter = file_filter
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462
462
463 try:
463 try:
464 return "".join(patch.diff(
464 return "".join(patch.diff(
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 except RepoLookupError as e:
466 except RepoLookupError as e:
467 raise exceptions.LookupException(e)()
467 raise exceptions.LookupException(e)()
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def node_history(self, wire, revision, path, limit):
470 def node_history(self, wire, revision, path, limit):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
475
475
476 ctx = self._get_ctx(repo, revision)
476 ctx = self._get_ctx(repo, revision)
477 fctx = ctx.filectx(path)
477 fctx = ctx.filectx(path)
478
478
479 def history_iter():
479 def history_iter():
480 limit_rev = fctx.rev()
480 limit_rev = fctx.rev()
481 for obj in reversed(list(fctx.filelog())):
481 for obj in reversed(list(fctx.filelog())):
482 obj = fctx.filectx(obj)
482 obj = fctx.filectx(obj)
483 ctx = obj.changectx()
483 ctx = obj.changectx()
484 if ctx.hidden() or ctx.obsolete():
484 if ctx.hidden() or ctx.obsolete():
485 continue
485 continue
486
486
487 if limit_rev >= obj.rev():
487 if limit_rev >= obj.rev():
488 yield obj
488 yield obj
489
489
490 history = []
490 history = []
491 for cnt, obj in enumerate(history_iter()):
491 for cnt, obj in enumerate(history_iter()):
492 if limit and cnt >= limit:
492 if limit and cnt >= limit:
493 break
493 break
494 history.append(hex(obj.node()))
494 history.append(hex(obj.node()))
495
495
496 return [x for x in history]
496 return [x for x in history]
497 return _node_history(context_uid, repo_id, revision, path, limit)
497 return _node_history(context_uid, repo_id, revision, path, limit)
498
498
499 @reraise_safe_exceptions
499 @reraise_safe_exceptions
500 def node_history_untill(self, wire, revision, path, limit):
500 def node_history_untill(self, wire, revision, path, limit):
501 cache_on, context_uid, repo_id = self._cache_on(wire)
501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 def _node_history_until(_context_uid, _repo_id):
503 def _node_history_until(_context_uid, _repo_id):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
507
507
508 file_log = list(fctx.filelog())
508 file_log = list(fctx.filelog())
509 if limit:
509 if limit:
510 # Limit to the last n items
510 # Limit to the last n items
511 file_log = file_log[-limit:]
511 file_log = file_log[-limit:]
512
512
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515
515
516 @reraise_safe_exceptions
516 @reraise_safe_exceptions
517 def fctx_annotate(self, wire, revision, path):
517 def fctx_annotate(self, wire, revision, path):
518 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
519 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
520 fctx = ctx.filectx(path)
521
521
522 result = []
522 result = []
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 ln_no = i
524 ln_no = i
525 sha = hex(annotate_obj.fctx.node())
525 sha = hex(annotate_obj.fctx.node())
526 content = annotate_obj.text
526 content = annotate_obj.text
527 result.append((ln_no, sha, content))
527 result.append((ln_no, sha, content))
528 return result
528 return result
529
529
530 @reraise_safe_exceptions
530 @reraise_safe_exceptions
531 def fctx_node_data(self, wire, revision, path):
531 def fctx_node_data(self, wire, revision, path):
532 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
533 ctx = self._get_ctx(repo, revision)
533 ctx = self._get_ctx(repo, revision)
534 fctx = ctx.filectx(path)
534 fctx = ctx.filectx(path)
535 return fctx.data()
535 return fctx.data()
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def fctx_flags(self, wire, commit_id, path):
538 def fctx_flags(self, wire, commit_id, path):
539 cache_on, context_uid, repo_id = self._cache_on(wire)
539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 def _fctx_size(_repo_id, _revision, _path):
553 def _fctx_size(_repo_id, _revision, _path):
554 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
555 ctx = self._get_ctx(repo, commit_id)
555 ctx = self._get_ctx(repo, commit_id)
556 fctx = ctx.filectx(path)
556 fctx = ctx.filectx(path)
557 return fctx.size()
557 return fctx.size()
558 return _fctx_size(repo_id, commit_id, path)
558 return _fctx_size(repo_id, commit_id, path)
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def get_all_commit_ids(self, wire, name):
561 def get_all_commit_ids(self, wire, name):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
566 repo = repo.filtered(name)
566 repo = repo.filtered(name)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
567 revs = [hex(x[7]) for x in repo.changelog.index]
568 return revs
568 return revs
569 return _get_all_commit_ids(context_uid, repo_id, name)
569 return _get_all_commit_ids(context_uid, repo_id, name)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def get_config_value(self, wire, section, name, untrusted=False):
572 def get_config_value(self, wire, section, name, untrusted=False):
573 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
574 return repo.ui.config(section, name, untrusted=untrusted)
574 return repo.ui.config(section, name, untrusted=untrusted)
575
575
576 @reraise_safe_exceptions
576 @reraise_safe_exceptions
577 def is_large_file(self, wire, commit_id, path):
577 def is_large_file(self, wire, commit_id, path):
578 cache_on, context_uid, repo_id = self._cache_on(wire)
578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 return largefiles.lfutil.isstandin(path)
581 return largefiles.lfutil.isstandin(path)
582
582
583 return _is_large_file(context_uid, repo_id, commit_id, path)
583 return _is_large_file(context_uid, repo_id, commit_id, path)
584
584
585 @reraise_safe_exceptions
585 @reraise_safe_exceptions
586 def is_binary(self, wire, revision, path):
586 def is_binary(self, wire, revision, path):
587 cache_on, context_uid, repo_id = self._cache_on(wire)
587 cache_on, context_uid, repo_id = self._cache_on(wire)
588
588
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 def _is_binary(_repo_id, _sha, _path):
590 def _is_binary(_repo_id, _sha, _path):
591 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
592 ctx = self._get_ctx(repo, revision)
592 ctx = self._get_ctx(repo, revision)
593 fctx = ctx.filectx(path)
593 fctx = ctx.filectx(path)
594 return fctx.isbinary()
594 return fctx.isbinary()
595
595
596 return _is_binary(repo_id, revision, path)
596 return _is_binary(repo_id, revision, path)
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def in_largefiles_store(self, wire, sha):
599 def in_largefiles_store(self, wire, sha):
600 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
601 return largefiles.lfutil.instore(repo, sha)
601 return largefiles.lfutil.instore(repo, sha)
602
602
603 @reraise_safe_exceptions
603 @reraise_safe_exceptions
604 def in_user_cache(self, wire, sha):
604 def in_user_cache(self, wire, sha):
605 repo = self._factory.repo(wire)
605 repo = self._factory.repo(wire)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
607
607
608 @reraise_safe_exceptions
608 @reraise_safe_exceptions
609 def store_path(self, wire, sha):
609 def store_path(self, wire, sha):
610 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
611 return largefiles.lfutil.storepath(repo, sha)
611 return largefiles.lfutil.storepath(repo, sha)
612
612
613 @reraise_safe_exceptions
613 @reraise_safe_exceptions
614 def link(self, wire, sha, path):
614 def link(self, wire, sha, path):
615 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
616 largefiles.lfutil.link(
616 largefiles.lfutil.link(
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def localrepository(self, wire, create=False):
620 def localrepository(self, wire, create=False):
621 self._factory.repo(wire, create=create)
621 self._factory.repo(wire, create=create)
622
622
623 @reraise_safe_exceptions
623 @reraise_safe_exceptions
624 def lookup(self, wire, revision, both):
624 def lookup(self, wire, revision, both):
625 cache_on, context_uid, repo_id = self._cache_on(wire)
625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 def _lookup(_context_uid, _repo_id, _revision, _both):
627 def _lookup(_context_uid, _repo_id, _revision, _both):
628
628
629 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
630 rev = _revision
630 rev = _revision
631 if isinstance(rev, int):
631 if isinstance(rev, int):
632 # NOTE(marcink):
632 # NOTE(marcink):
633 # since Mercurial doesn't support negative indexes properly
633 # since Mercurial doesn't support negative indexes properly
634 # we need to shift accordingly by one to get proper index, e.g
634 # we need to shift accordingly by one to get proper index, e.g
635 # repo[-1] => repo[-2]
635 # repo[-1] => repo[-2]
636 # repo[0] => repo[-1]
636 # repo[0] => repo[-1]
637 if rev <= 0:
637 if rev <= 0:
638 rev = rev + -1
638 rev = rev + -1
639 try:
639 try:
640 ctx = self._get_ctx(repo, rev)
640 ctx = self._get_ctx(repo, rev)
641 except (TypeError, RepoLookupError) as e:
641 except (TypeError, RepoLookupError) as e:
642 e._org_exc_tb = traceback.format_exc()
642 e._org_exc_tb = traceback.format_exc()
643 raise exceptions.LookupException(e)(rev)
643 raise exceptions.LookupException(e)(rev)
644 except LookupError as e:
644 except LookupError as e:
645 e._org_exc_tb = traceback.format_exc()
645 e._org_exc_tb = traceback.format_exc()
646 raise exceptions.LookupException(e)(e.name)
646 raise exceptions.LookupException(e)(e.name)
647
647
648 if not both:
648 if not both:
649 return ctx.hex()
649 return ctx.hex()
650
650
651 ctx = repo[ctx.hex()]
651 ctx = repo[ctx.hex()]
652 return ctx.hex(), ctx.rev()
652 return ctx.hex(), ctx.rev()
653
653
654 return _lookup(context_uid, repo_id, revision, both)
654 return _lookup(context_uid, repo_id, revision, both)
655
655
656 @reraise_safe_exceptions
656 @reraise_safe_exceptions
657 def sync_push(self, wire, url):
657 def sync_push(self, wire, url):
658 if not self.check_url(url, wire['config']):
658 if not self.check_url(url, wire['config']):
659 return
659 return
660
660
661 repo = self._factory.repo(wire)
661 repo = self._factory.repo(wire)
662
662
663 # Disable any prompts for this repo
663 # Disable any prompts for this repo
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665
665
666 bookmarks = dict(repo._bookmarks).keys()
666 bookmarks = list(dict(repo._bookmarks).keys())
667 remote = peer(repo, {}, url)
667 remote = peer(repo, {}, url)
668 # Disable any prompts for this remote
668 # Disable any prompts for this remote
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670
670
671 return exchange.push(
671 return exchange.push(
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def revision(self, wire, rev):
675 def revision(self, wire, rev):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 ctx = self._get_ctx(repo, rev)
677 ctx = self._get_ctx(repo, rev)
678 return ctx.rev()
678 return ctx.rev()
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def rev_range(self, wire, commit_filter):
681 def rev_range(self, wire, commit_filter):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683
683
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 def _rev_range(_context_uid, _repo_id, _filter):
685 def _rev_range(_context_uid, _repo_id, _filter):
686 repo = self._factory.repo(wire)
686 repo = self._factory.repo(wire)
687 revisions = [rev for rev in revrange(repo, commit_filter)]
687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 return revisions
688 return revisions
689
689
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691
691
692 @reraise_safe_exceptions
692 @reraise_safe_exceptions
693 def rev_range_hash(self, wire, node):
693 def rev_range_hash(self, wire, node):
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695
695
696 def get_revs(repo, rev_opt):
696 def get_revs(repo, rev_opt):
697 if rev_opt:
697 if rev_opt:
698 revs = revrange(repo, rev_opt)
698 revs = revrange(repo, rev_opt)
699 if len(revs) == 0:
699 if len(revs) == 0:
700 return (nullrev, nullrev)
700 return (nullrev, nullrev)
701 return max(revs), min(revs)
701 return max(revs), min(revs)
702 else:
702 else:
703 return len(repo) - 1, 0
703 return len(repo) - 1, 0
704
704
705 stop, start = get_revs(repo, [node + ':'])
705 stop, start = get_revs(repo, [node + ':'])
706 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
706 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
707 return revs
707 return revs
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 other_path = kwargs.pop('other_path', None)
711 other_path = kwargs.pop('other_path', None)
712
712
713 # case when we want to compare two independent repositories
713 # case when we want to compare two independent repositories
714 if other_path and other_path != wire["path"]:
714 if other_path and other_path != wire["path"]:
715 baseui = self._factory._create_config(wire["config"])
715 baseui = self._factory._create_config(wire["config"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 else:
717 else:
718 repo = self._factory.repo(wire)
718 repo = self._factory.repo(wire)
719 return list(repo.revs(rev_spec, *args))
719 return list(repo.revs(rev_spec, *args))
720
720
721 @reraise_safe_exceptions
721 @reraise_safe_exceptions
722 def verify(self, wire,):
722 def verify(self, wire,):
723 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
725
725
726 baseui, output = patch_ui_message_output(baseui)
726 baseui, output = patch_ui_message_output(baseui)
727
727
728 repo.ui = baseui
728 repo.ui = baseui
729 verify.verify(repo)
729 verify.verify(repo)
730 return output.getvalue()
730 return output.getvalue()
731
731
732 @reraise_safe_exceptions
732 @reraise_safe_exceptions
733 def hg_update_cache(self, wire,):
733 def hg_update_cache(self, wire,):
734 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
735 baseui = self._factory._create_config(wire['config'])
735 baseui = self._factory._create_config(wire['config'])
736 baseui, output = patch_ui_message_output(baseui)
736 baseui, output = patch_ui_message_output(baseui)
737
737
738 repo.ui = baseui
738 repo.ui = baseui
739 with repo.wlock(), repo.lock():
739 with repo.wlock(), repo.lock():
740 repo.updatecaches(full=True)
740 repo.updatecaches(full=True)
741
741
742 return output.getvalue()
742 return output.getvalue()
743
743
744 @reraise_safe_exceptions
744 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
745 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
746 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
747 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
748 baseui, output = patch_ui_message_output(baseui)
749
749
750 repo.ui = baseui
750 repo.ui = baseui
751
751
752 repair.rebuildfncache(baseui, repo)
752 repair.rebuildfncache(baseui, repo)
753
753
754 return output.getvalue()
754 return output.getvalue()
755
755
756 @reraise_safe_exceptions
756 @reraise_safe_exceptions
757 def tags(self, wire):
757 def tags(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 def _tags(_context_uid, _repo_id):
760 def _tags(_context_uid, _repo_id):
761 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
762 return repo.tags()
762 return repo.tags()
763
763
764 return _tags(context_uid, repo_id)
764 return _tags(context_uid, repo_id)
765
765
766 @reraise_safe_exceptions
766 @reraise_safe_exceptions
767 def update(self, wire, node=None, clean=False):
767 def update(self, wire, node=None, clean=False):
768 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
769 baseui = self._factory._create_config(wire['config'])
769 baseui = self._factory._create_config(wire['config'])
770 commands.update(baseui, repo, node=node, clean=clean)
770 commands.update(baseui, repo, node=node, clean=clean)
771
771
772 @reraise_safe_exceptions
772 @reraise_safe_exceptions
773 def identify(self, wire):
773 def identify(self, wire):
774 repo = self._factory.repo(wire)
774 repo = self._factory.repo(wire)
775 baseui = self._factory._create_config(wire['config'])
775 baseui = self._factory._create_config(wire['config'])
776 output = io.BytesIO()
776 output = io.BytesIO()
777 baseui.write = output.write
777 baseui.write = output.write
778 # This is required to get a full node id
778 # This is required to get a full node id
779 baseui.debugflag = True
779 baseui.debugflag = True
780 commands.identify(baseui, repo, id=True)
780 commands.identify(baseui, repo, id=True)
781
781
782 return output.getvalue()
782 return output.getvalue()
783
783
784 @reraise_safe_exceptions
784 @reraise_safe_exceptions
785 def heads(self, wire, branch=None):
785 def heads(self, wire, branch=None):
786 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
788 output = io.BytesIO()
788 output = io.BytesIO()
789
789
790 def write(data, **unused_kwargs):
790 def write(data, **unused_kwargs):
791 output.write(data)
791 output.write(data)
792
792
793 baseui.write = write
793 baseui.write = write
794 if branch:
794 if branch:
795 args = [branch]
795 args = [branch]
796 else:
796 else:
797 args = []
797 args = []
798 commands.heads(baseui, repo, template='{node} ', *args)
798 commands.heads(baseui, repo, template='{node} ', *args)
799
799
800 return output.getvalue()
800 return output.getvalue()
801
801
802 @reraise_safe_exceptions
802 @reraise_safe_exceptions
803 def ancestor(self, wire, revision1, revision2):
803 def ancestor(self, wire, revision1, revision2):
804 repo = self._factory.repo(wire)
804 repo = self._factory.repo(wire)
805 changelog = repo.changelog
805 changelog = repo.changelog
806 lookup = repo.lookup
806 lookup = repo.lookup
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 return hex(a)
808 return hex(a)
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
814
814
815 @reraise_safe_exceptions
815 @reraise_safe_exceptions
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817
817
818 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
819 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
820 publishing = baseui.configbool('phases', 'publish')
820 publishing = baseui.configbool('phases', 'publish')
821 if publishing:
821 if publishing:
822 new_commit = 'public'
822 new_commit = 'public'
823 else:
823 else:
824 new_commit = 'draft'
824 new_commit = 'draft'
825
825
826 def _filectxfn(_repo, ctx, path):
826 def _filectxfn(_repo, ctx, path):
827 """
827 """
828 Marks given path as added/changed/removed in a given _repo. This is
828 Marks given path as added/changed/removed in a given _repo. This is
829 for internal mercurial commit function.
829 for internal mercurial commit function.
830 """
830 """
831
831
832 # check if this path is removed
832 # check if this path is removed
833 if path in removed:
833 if path in removed:
834 # returning None is a way to mark node for removal
834 # returning None is a way to mark node for removal
835 return None
835 return None
836
836
837 # check if this path is added
837 # check if this path is added
838 for node in updated:
838 for node in updated:
839 if node['path'] == path:
839 if node['path'] == path:
840 return memfilectx(
840 return memfilectx(
841 _repo,
841 _repo,
842 changectx=ctx,
842 changectx=ctx,
843 path=node['path'],
843 path=node['path'],
844 data=node['content'],
844 data=node['content'],
845 islink=False,
845 islink=False,
846 isexec=bool(node['mode'] & stat.S_IXUSR),
846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 copysource=False)
847 copysource=False)
848
848
849 raise exceptions.AbortException()(
849 raise exceptions.AbortException()(
850 "Given path haven't been marked as added, "
850 "Given path haven't been marked as added, "
851 "changed or removed (%s)" % path)
851 "changed or removed (%s)" % path)
852
852
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854
854
855 commit_ctx = memctx(
855 commit_ctx = memctx(
856 repo=repo,
856 repo=repo,
857 parents=parents,
857 parents=parents,
858 text=message,
858 text=message,
859 files=files,
859 files=files,
860 filectxfn=_filectxfn,
860 filectxfn=_filectxfn,
861 user=user,
861 user=user,
862 date=(commit_time, commit_timezone),
862 date=(commit_time, commit_timezone),
863 extra=extra)
863 extra=extra)
864
864
865 n = repo.commitctx(commit_ctx)
865 n = repo.commitctx(commit_ctx)
866 new_id = hex(n)
866 new_id = hex(n)
867
867
868 return new_id
868 return new_id
869
869
870 @reraise_safe_exceptions
870 @reraise_safe_exceptions
871 def pull(self, wire, url, commit_ids=None):
871 def pull(self, wire, url, commit_ids=None):
872 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
873 # Disable any prompts for this repo
873 # Disable any prompts for this repo
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875
875
876 remote = peer(repo, {}, url)
876 remote = peer(repo, {}, url)
877 # Disable any prompts for this remote
877 # Disable any prompts for this remote
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879
879
880 if commit_ids:
880 if commit_ids:
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882
882
883 return exchange.pull(
883 return exchange.pull(
884 repo, remote, heads=commit_ids, force=None).cgresult
884 repo, remote, heads=commit_ids, force=None).cgresult
885
885
886 @reraise_safe_exceptions
886 @reraise_safe_exceptions
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 repo = self._factory.repo(wire)
888 repo = self._factory.repo(wire)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890
890
891 # Mercurial internally has a lot of logic that checks ONLY if
891 # Mercurial internally has a lot of logic that checks ONLY if
892 # option is defined, we just pass those if they are defined then
892 # option is defined, we just pass those if they are defined then
893 opts = {}
893 opts = {}
894 if bookmark:
894 if bookmark:
895 opts['bookmark'] = bookmark
895 opts['bookmark'] = bookmark
896 if branch:
896 if branch:
897 opts['branch'] = branch
897 opts['branch'] = branch
898 if revision:
898 if revision:
899 opts['rev'] = revision
899 opts['rev'] = revision
900
900
901 commands.pull(baseui, repo, source, **opts)
901 commands.pull(baseui, repo, source, **opts)
902
902
903 @reraise_safe_exceptions
903 @reraise_safe_exceptions
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 repo = self._factory.repo(wire)
905 repo = self._factory.repo(wire)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 new_branch=push_branches)
908 new_branch=push_branches)
909
909
910 @reraise_safe_exceptions
910 @reraise_safe_exceptions
911 def strip(self, wire, revision, update, backup):
911 def strip(self, wire, revision, update, backup):
912 repo = self._factory.repo(wire)
912 repo = self._factory.repo(wire)
913 ctx = self._get_ctx(repo, revision)
913 ctx = self._get_ctx(repo, revision)
914 hgext_strip(
914 hgext_strip(
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916
916
917 @reraise_safe_exceptions
917 @reraise_safe_exceptions
918 def get_unresolved_files(self, wire):
918 def get_unresolved_files(self, wire):
919 repo = self._factory.repo(wire)
919 repo = self._factory.repo(wire)
920
920
921 log.debug('Calculating unresolved files for repo: %s', repo)
921 log.debug('Calculating unresolved files for repo: %s', repo)
922 output = io.BytesIO()
922 output = io.BytesIO()
923
923
924 def write(data, **unused_kwargs):
924 def write(data, **unused_kwargs):
925 output.write(data)
925 output.write(data)
926
926
927 baseui = self._factory._create_config(wire['config'])
927 baseui = self._factory._create_config(wire['config'])
928 baseui.write = write
928 baseui.write = write
929
929
930 commands.resolve(baseui, repo, list=True)
930 commands.resolve(baseui, repo, list=True)
931 unresolved = output.getvalue().splitlines(0)
931 unresolved = output.getvalue().splitlines(0)
932 return unresolved
932 return unresolved
933
933
934 @reraise_safe_exceptions
934 @reraise_safe_exceptions
935 def merge(self, wire, revision):
935 def merge(self, wire, revision):
936 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
937 baseui = self._factory._create_config(wire['config'])
937 baseui = self._factory._create_config(wire['config'])
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939
939
940 # In case of sub repositories are used mercurial prompts the user in
940 # In case of sub repositories are used mercurial prompts the user in
941 # case of merge conflicts or different sub repository sources. By
941 # case of merge conflicts or different sub repository sources. By
942 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 # used but instead uses a default value.
943 # used but instead uses a default value.
944 repo.ui.setconfig('ui', 'interactive', False)
944 repo.ui.setconfig('ui', 'interactive', False)
945 commands.merge(baseui, repo, rev=revision)
945 commands.merge(baseui, repo, rev=revision)
946
946
947 @reraise_safe_exceptions
947 @reraise_safe_exceptions
948 def merge_state(self, wire):
948 def merge_state(self, wire):
949 repo = self._factory.repo(wire)
949 repo = self._factory.repo(wire)
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951
951
952 # In case of sub repositories are used mercurial prompts the user in
952 # In case of sub repositories are used mercurial prompts the user in
953 # case of merge conflicts or different sub repository sources. By
953 # case of merge conflicts or different sub repository sources. By
954 # setting the interactive flag to `False` mercurial doesn't prompt the
954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 # used but instead uses a default value.
955 # used but instead uses a default value.
956 repo.ui.setconfig('ui', 'interactive', False)
956 repo.ui.setconfig('ui', 'interactive', False)
957 ms = hg_merge.mergestate(repo)
957 ms = hg_merge.mergestate(repo)
958 return [x for x in ms.unresolved()]
958 return [x for x in ms.unresolved()]
959
959
960 @reraise_safe_exceptions
960 @reraise_safe_exceptions
961 def commit(self, wire, message, username, close_branch=False):
961 def commit(self, wire, message, username, close_branch=False):
962 repo = self._factory.repo(wire)
962 repo = self._factory.repo(wire)
963 baseui = self._factory._create_config(wire['config'])
963 baseui = self._factory._create_config(wire['config'])
964 repo.ui.setconfig('ui', 'username', username)
964 repo.ui.setconfig('ui', 'username', username)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966
966
967 @reraise_safe_exceptions
967 @reraise_safe_exceptions
968 def rebase(self, wire, source=None, dest=None, abort=False):
968 def rebase(self, wire, source=None, dest=None, abort=False):
969 repo = self._factory.repo(wire)
969 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'])
970 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 # In case of sub repositories are used mercurial prompts the user in
972 # In case of sub repositories are used mercurial prompts the user in
973 # case of merge conflicts or different sub repository sources. By
973 # case of merge conflicts or different sub repository sources. By
974 # setting the interactive flag to `False` mercurial doesn't prompt the
974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 # used but instead uses a default value.
975 # used but instead uses a default value.
976 repo.ui.setconfig('ui', 'interactive', False)
976 repo.ui.setconfig('ui', 'interactive', False)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978
978
979 @reraise_safe_exceptions
979 @reraise_safe_exceptions
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 repo = self._factory.repo(wire)
981 repo = self._factory.repo(wire)
982 ctx = self._get_ctx(repo, revision)
982 ctx = self._get_ctx(repo, revision)
983 node = ctx.node()
983 node = ctx.node()
984
984
985 date = (tag_time, tag_timezone)
985 date = (tag_time, tag_timezone)
986 try:
986 try:
987 hg_tag.tag(repo, name, node, message, local, user, date)
987 hg_tag.tag(repo, name, node, message, local, user, date)
988 except Abort as e:
988 except Abort as e:
989 log.exception("Tag operation aborted")
989 log.exception("Tag operation aborted")
990 # Exception can contain unicode which we convert
990 # Exception can contain unicode which we convert
991 raise exceptions.AbortException(e)(repr(e))
991 raise exceptions.AbortException(e)(repr(e))
992
992
993 @reraise_safe_exceptions
993 @reraise_safe_exceptions
994 def bookmark(self, wire, bookmark, revision=None):
994 def bookmark(self, wire, bookmark, revision=None):
995 repo = self._factory.repo(wire)
995 repo = self._factory.repo(wire)
996 baseui = self._factory._create_config(wire['config'])
996 baseui = self._factory._create_config(wire['config'])
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998
998
999 @reraise_safe_exceptions
999 @reraise_safe_exceptions
1000 def install_hooks(self, wire, force=False):
1000 def install_hooks(self, wire, force=False):
1001 # we don't need any special hooks for Mercurial
1001 # we don't need any special hooks for Mercurial
1002 pass
1002 pass
1003
1003
1004 @reraise_safe_exceptions
1004 @reraise_safe_exceptions
1005 def get_hooks_info(self, wire):
1005 def get_hooks_info(self, wire):
1006 return {
1006 return {
1007 'pre_version': vcsserver.__version__,
1007 'pre_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1009 }
1009 }
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto._capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto._capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(orig, repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 return calc_capabilities(orig, repo, proto)
56 else:
56 else:
57 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
58 return orig(repo, proto)
58 return orig(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo, subrepoutil
65 from .hgcompat import subrepo, subrepoutil
66 from vcsserver.exceptions import SubrepoMergeException
66 from vcsserver.exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepoutil.state(self._ctx, self.ui)
100 substate = subrepoutil.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,729 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from http.client import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in range(p + 1, end):
222 for x in range(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 if len(commit_data) == 3:
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
698 'Some functionality might be limited')
699 path, commit_id = commit_data
699 path, commit_id = commit_data
700 txn_id = None
700 txn_id = None
701
701
702 branches = []
702 branches = []
703 tags = []
703 tags = []
704
704
705 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
706 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
707 else:
707 else:
708 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
709 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
710 if not extras:
710 if not extras:
711 return 0
711 return 0
712
712
713 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
714 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
715 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
716 extras['new_refs'] = {
716 extras['new_refs'] = {
717 'branches': branches,
717 'branches': branches,
718 'bookmarks': [],
718 'bookmarks': [],
719 'tags': tags,
719 'tags': tags,
720 'total_commits': 1,
720 'total_commits': 1,
721 }
721 }
722
722
723 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
724 try:
724 try:
725 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
726 except Exception:
726 except Exception:
727 pass
727 pass
728
728
729 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,791 +1,791 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib.error import URLError
23 import urlparse
23 import urllib.parse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import io
27 import urllib
27 import urllib.request, urllib.parse, urllib.error
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39 from vcsserver.vcs_base import RemoteBase
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 svn_compatible_versions_map = {
44 svn_compatible_versions_map = {
45 'pre-1.4-compatible': '1.3',
45 'pre-1.4-compatible': '1.3',
46 'pre-1.5-compatible': '1.4',
46 'pre-1.5-compatible': '1.4',
47 'pre-1.6-compatible': '1.5',
47 'pre-1.6-compatible': '1.5',
48 'pre-1.8-compatible': '1.7',
48 'pre-1.8-compatible': '1.7',
49 'pre-1.9-compatible': '1.8',
49 'pre-1.9-compatible': '1.8',
50 }
50 }
51
51
52 current_compatible_version = '1.12'
52 current_compatible_version = '1.12'
53
53
54
54
55 def reraise_safe_exceptions(func):
55 def reraise_safe_exceptions(func):
56 """Decorator for converting svn exceptions to something neutral."""
56 """Decorator for converting svn exceptions to something neutral."""
57 def wrapper(*args, **kwargs):
57 def wrapper(*args, **kwargs):
58 try:
58 try:
59 return func(*args, **kwargs)
59 return func(*args, **kwargs)
60 except Exception as e:
60 except Exception as e:
61 if not hasattr(e, '_vcs_kind'):
61 if not hasattr(e, '_vcs_kind'):
62 log.exception("Unhandled exception in svn remote call")
62 log.exception("Unhandled exception in svn remote call")
63 raise_from_original(exceptions.UnhandledException(e))
63 raise_from_original(exceptions.UnhandledException(e))
64 raise
64 raise
65 return wrapper
65 return wrapper
66
66
67
67
68 class SubversionFactory(RepoFactory):
68 class SubversionFactory(RepoFactory):
69 repo_type = 'svn'
69 repo_type = 'svn'
70
70
71 def _create_repo(self, wire, create, compatible_version):
71 def _create_repo(self, wire, create, compatible_version):
72 path = svn.core.svn_path_canonicalize(wire['path'])
72 path = svn.core.svn_path_canonicalize(wire['path'])
73 if create:
73 if create:
74 fs_config = {'compatible-version': current_compatible_version}
74 fs_config = {'compatible-version': current_compatible_version}
75 if compatible_version:
75 if compatible_version:
76
76
77 compatible_version_string = \
77 compatible_version_string = \
78 svn_compatible_versions_map.get(compatible_version) \
78 svn_compatible_versions_map.get(compatible_version) \
79 or compatible_version
79 or compatible_version
80 fs_config['compatible-version'] = compatible_version_string
80 fs_config['compatible-version'] = compatible_version_string
81
81
82 log.debug('Create SVN repo with config "%s"', fs_config)
82 log.debug('Create SVN repo with config "%s"', fs_config)
83 repo = svn.repos.create(path, "", "", None, fs_config)
83 repo = svn.repos.create(path, "", "", None, fs_config)
84 else:
84 else:
85 repo = svn.repos.open(path)
85 repo = svn.repos.open(path)
86
86
87 log.debug('Got SVN object: %s', repo)
87 log.debug('Got SVN object: %s', repo)
88 return repo
88 return repo
89
89
90 def repo(self, wire, create=False, compatible_version=None):
90 def repo(self, wire, create=False, compatible_version=None):
91 """
91 """
92 Get a repository instance for the given path.
92 Get a repository instance for the given path.
93 """
93 """
94 return self._create_repo(wire, create, compatible_version)
94 return self._create_repo(wire, create, compatible_version)
95
95
96
96
97 NODE_TYPE_MAPPING = {
97 NODE_TYPE_MAPPING = {
98 svn.core.svn_node_file: 'file',
98 svn.core.svn_node_file: 'file',
99 svn.core.svn_node_dir: 'dir',
99 svn.core.svn_node_dir: 'dir',
100 }
100 }
101
101
102
102
103 class SvnRemote(RemoteBase):
103 class SvnRemote(RemoteBase):
104
104
105 def __init__(self, factory, hg_factory=None):
105 def __init__(self, factory, hg_factory=None):
106 self._factory = factory
106 self._factory = factory
107 # TODO: Remove once we do not use internal Mercurial objects anymore
107 # TODO: Remove once we do not use internal Mercurial objects anymore
108 # for subversion
108 # for subversion
109 self._hg_factory = hg_factory
109 self._hg_factory = hg_factory
110
110
111 @reraise_safe_exceptions
111 @reraise_safe_exceptions
112 def discover_svn_version(self):
112 def discover_svn_version(self):
113 try:
113 try:
114 import svn.core
114 import svn.core
115 svn_ver = svn.core.SVN_VERSION
115 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
116 except ImportError:
117 svn_ver = None
117 svn_ver = None
118 return svn_ver
118 return svn_ver
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def is_empty(self, wire):
121 def is_empty(self, wire):
122
122
123 try:
123 try:
124 return self.lookup(wire, -1) == 0
124 return self.lookup(wire, -1) == 0
125 except Exception:
125 except Exception:
126 log.exception("failed to read object_store")
126 log.exception("failed to read object_store")
127 return False
127 return False
128
128
129 def check_url(self, url, config_items):
129 def check_url(self, url, config_items):
130 # this can throw exception if not installed, but we detect this
130 # this can throw exception if not installed, but we detect this
131 from hgsubversion import svnrepo
131 from hgsubversion import svnrepo
132
132
133 baseui = self._hg_factory._create_config(config_items)
133 baseui = self._hg_factory._create_config(config_items)
134 # uuid function get's only valid UUID from proper repo, else
134 # uuid function get's only valid UUID from proper repo, else
135 # throws exception
135 # throws exception
136 try:
136 try:
137 svnrepo.svnremoterepo(baseui, url).svn.uuid
137 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 except Exception:
138 except Exception:
139 tb = traceback.format_exc()
139 tb = traceback.format_exc()
140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 raise URLError(
141 raise URLError(
142 '"%s" is not a valid Subversion source url.' % (url, ))
142 '"%s" is not a valid Subversion source url.' % (url, ))
143 return True
143 return True
144
144
145 def is_path_valid_repository(self, wire, path):
145 def is_path_valid_repository(self, wire, path):
146
146
147 # NOTE(marcink): short circuit the check for SVN repo
147 # NOTE(marcink): short circuit the check for SVN repo
148 # the repos.open might be expensive to check, but we have one cheap
148 # the repos.open might be expensive to check, but we have one cheap
149 # pre condition that we can use, to check for 'format' file
149 # pre condition that we can use, to check for 'format' file
150
150
151 if not os.path.isfile(os.path.join(path, 'format')):
151 if not os.path.isfile(os.path.join(path, 'format')):
152 return False
152 return False
153
153
154 try:
154 try:
155 svn.repos.open(path)
155 svn.repos.open(path)
156 except svn.core.SubversionException:
156 except svn.core.SubversionException:
157 tb = traceback.format_exc()
157 tb = traceback.format_exc()
158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 return False
159 return False
160 return True
160 return True
161
161
162 @reraise_safe_exceptions
162 @reraise_safe_exceptions
163 def verify(self, wire,):
163 def verify(self, wire,):
164 repo_path = wire['path']
164 repo_path = wire['path']
165 if not self.is_path_valid_repository(wire, repo_path):
165 if not self.is_path_valid_repository(wire, repo_path):
166 raise Exception(
166 raise Exception(
167 "Path %s is not a valid Subversion repository." % repo_path)
167 "Path %s is not a valid Subversion repository." % repo_path)
168
168
169 cmd = ['svnadmin', 'info', repo_path]
169 cmd = ['svnadmin', 'info', repo_path]
170 stdout, stderr = subprocessio.run_command(cmd)
170 stdout, stderr = subprocessio.run_command(cmd)
171 return stdout
171 return stdout
172
172
173 def lookup(self, wire, revision):
173 def lookup(self, wire, revision):
174 if revision not in [-1, None, 'HEAD']:
174 if revision not in [-1, None, 'HEAD']:
175 raise NotImplementedError
175 raise NotImplementedError
176 repo = self._factory.repo(wire)
176 repo = self._factory.repo(wire)
177 fs_ptr = svn.repos.fs(repo)
177 fs_ptr = svn.repos.fs(repo)
178 head = svn.fs.youngest_rev(fs_ptr)
178 head = svn.fs.youngest_rev(fs_ptr)
179 return head
179 return head
180
180
181 def lookup_interval(self, wire, start_ts, end_ts):
181 def lookup_interval(self, wire, start_ts, end_ts):
182 repo = self._factory.repo(wire)
182 repo = self._factory.repo(wire)
183 fsobj = svn.repos.fs(repo)
183 fsobj = svn.repos.fs(repo)
184 start_rev = None
184 start_rev = None
185 end_rev = None
185 end_rev = None
186 if start_ts:
186 if start_ts:
187 start_ts_svn = apr_time_t(start_ts)
187 start_ts_svn = apr_time_t(start_ts)
188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 else:
189 else:
190 start_rev = 1
190 start_rev = 1
191 if end_ts:
191 if end_ts:
192 end_ts_svn = apr_time_t(end_ts)
192 end_ts_svn = apr_time_t(end_ts)
193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 else:
194 else:
195 end_rev = svn.fs.youngest_rev(fsobj)
195 end_rev = svn.fs.youngest_rev(fsobj)
196 return start_rev, end_rev
196 return start_rev, end_rev
197
197
198 def revision_properties(self, wire, revision):
198 def revision_properties(self, wire, revision):
199
199
200 cache_on, context_uid, repo_id = self._cache_on(wire)
200 cache_on, context_uid, repo_id = self._cache_on(wire)
201 @self.region.conditional_cache_on_arguments(condition=cache_on)
201 @self.region.conditional_cache_on_arguments(condition=cache_on)
202 def _revision_properties(_repo_id, _revision):
202 def _revision_properties(_repo_id, _revision):
203 repo = self._factory.repo(wire)
203 repo = self._factory.repo(wire)
204 fs_ptr = svn.repos.fs(repo)
204 fs_ptr = svn.repos.fs(repo)
205 return svn.fs.revision_proplist(fs_ptr, revision)
205 return svn.fs.revision_proplist(fs_ptr, revision)
206 return _revision_properties(repo_id, revision)
206 return _revision_properties(repo_id, revision)
207
207
208 def revision_changes(self, wire, revision):
208 def revision_changes(self, wire, revision):
209
209
210 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
211 fsobj = svn.repos.fs(repo)
211 fsobj = svn.repos.fs(repo)
212 rev_root = svn.fs.revision_root(fsobj, revision)
212 rev_root = svn.fs.revision_root(fsobj, revision)
213
213
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 base_dir = ""
216 base_dir = ""
217 send_deltas = False
217 send_deltas = False
218 svn.repos.replay2(
218 svn.repos.replay2(
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 editor_ptr, editor_baton, None)
220 editor_ptr, editor_baton, None)
221
221
222 added = []
222 added = []
223 changed = []
223 changed = []
224 removed = []
224 removed = []
225
225
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 for path, change in editor.changes.iteritems():
227 for path, change in editor.changes.items():
228 # TODO: Decide what to do with directory nodes. Subversion can add
228 # TODO: Decide what to do with directory nodes. Subversion can add
229 # empty directories.
229 # empty directories.
230
230
231 if change.item_kind == svn.core.svn_node_dir:
231 if change.item_kind == svn.core.svn_node_dir:
232 continue
232 continue
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 added.append(path)
234 added.append(path)
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 svn.repos.CHANGE_ACTION_REPLACE]:
236 svn.repos.CHANGE_ACTION_REPLACE]:
237 changed.append(path)
237 changed.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 removed.append(path)
239 removed.append(path)
240 else:
240 else:
241 raise NotImplementedError(
241 raise NotImplementedError(
242 "Action %s not supported on path %s" % (
242 "Action %s not supported on path %s" % (
243 change.action, path))
243 change.action, path))
244
244
245 changes = {
245 changes = {
246 'added': added,
246 'added': added,
247 'changed': changed,
247 'changed': changed,
248 'removed': removed,
248 'removed': removed,
249 }
249 }
250 return changes
250 return changes
251
251
252 @reraise_safe_exceptions
252 @reraise_safe_exceptions
253 def node_history(self, wire, path, revision, limit):
253 def node_history(self, wire, path, revision, limit):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
257 cross_copies = False
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
259 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
260 rev_root = svn.fs.revision_root(fsobj, revision)
261
261
262 history_revisions = []
262 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
263 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
264 history = svn.fs.history_prev(history, cross_copies)
265 while history:
265 while history:
266 __, node_revision = svn.fs.history_location(history)
266 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
267 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
268 if limit and len(history_revisions) >= limit:
269 break
269 break
270 history = svn.fs.history_prev(history, cross_copies)
270 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
271 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
273
274 def node_properties(self, wire, path, revision):
274 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 @self.region.conditional_cache_on_arguments(condition=cache_on)
276 @self.region.conditional_cache_on_arguments(condition=cache_on)
277 def _node_properties(_repo_id, _path, _revision):
277 def _node_properties(_repo_id, _path, _revision):
278 repo = self._factory.repo(wire)
278 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
279 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
280 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
281 return svn.fs.node_proplist(rev_root, path)
282 return _node_properties(repo_id, path, revision)
282 return _node_properties(repo_id, path, revision)
283
283
284 def file_annotate(self, wire, path, revision):
284 def file_annotate(self, wire, path, revision):
285 abs_path = 'file://' + urllib.pathname2url(
285 abs_path = 'file://' + urllib.request.pathname2url(
286 vcspath.join(wire['path'], path))
286 vcspath.join(wire['path'], path))
287 file_uri = svn.core.svn_path_canonicalize(abs_path)
287 file_uri = svn.core.svn_path_canonicalize(abs_path)
288
288
289 start_rev = svn_opt_revision_value_t(0)
289 start_rev = svn_opt_revision_value_t(0)
290 peg_rev = svn_opt_revision_value_t(revision)
290 peg_rev = svn_opt_revision_value_t(revision)
291 end_rev = peg_rev
291 end_rev = peg_rev
292
292
293 annotations = []
293 annotations = []
294
294
295 def receiver(line_no, revision, author, date, line, pool):
295 def receiver(line_no, revision, author, date, line, pool):
296 annotations.append((line_no, revision, line))
296 annotations.append((line_no, revision, line))
297
297
298 # TODO: Cannot use blame5, missing typemap function in the swig code
298 # TODO: Cannot use blame5, missing typemap function in the swig code
299 try:
299 try:
300 svn.client.blame2(
300 svn.client.blame2(
301 file_uri, peg_rev, start_rev, end_rev,
301 file_uri, peg_rev, start_rev, end_rev,
302 receiver, svn.client.create_context())
302 receiver, svn.client.create_context())
303 except svn.core.SubversionException as exc:
303 except svn.core.SubversionException as exc:
304 log.exception("Error during blame operation.")
304 log.exception("Error during blame operation.")
305 raise Exception(
305 raise Exception(
306 "Blame not supported or file does not exist at path %s. "
306 "Blame not supported or file does not exist at path %s. "
307 "Error %s." % (path, exc))
307 "Error %s." % (path, exc))
308
308
309 return annotations
309 return annotations
310
310
311 def get_node_type(self, wire, path, revision=None):
311 def get_node_type(self, wire, path, revision=None):
312
312
313 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 def _get_node_type(_repo_id, _path, _revision):
315 def _get_node_type(_repo_id, _path, _revision):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 fs_ptr = svn.repos.fs(repo)
317 fs_ptr = svn.repos.fs(repo)
318 if _revision is None:
318 if _revision is None:
319 _revision = svn.fs.youngest_rev(fs_ptr)
319 _revision = svn.fs.youngest_rev(fs_ptr)
320 root = svn.fs.revision_root(fs_ptr, _revision)
320 root = svn.fs.revision_root(fs_ptr, _revision)
321 node = svn.fs.check_path(root, path)
321 node = svn.fs.check_path(root, path)
322 return NODE_TYPE_MAPPING.get(node, None)
322 return NODE_TYPE_MAPPING.get(node, None)
323 return _get_node_type(repo_id, path, revision)
323 return _get_node_type(repo_id, path, revision)
324
324
325 def get_nodes(self, wire, path, revision=None):
325 def get_nodes(self, wire, path, revision=None):
326
326
327 cache_on, context_uid, repo_id = self._cache_on(wire)
327 cache_on, context_uid, repo_id = self._cache_on(wire)
328 @self.region.conditional_cache_on_arguments(condition=cache_on)
328 @self.region.conditional_cache_on_arguments(condition=cache_on)
329 def _get_nodes(_repo_id, _path, _revision):
329 def _get_nodes(_repo_id, _path, _revision):
330 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
331 fsobj = svn.repos.fs(repo)
331 fsobj = svn.repos.fs(repo)
332 if _revision is None:
332 if _revision is None:
333 _revision = svn.fs.youngest_rev(fsobj)
333 _revision = svn.fs.youngest_rev(fsobj)
334 root = svn.fs.revision_root(fsobj, _revision)
334 root = svn.fs.revision_root(fsobj, _revision)
335 entries = svn.fs.dir_entries(root, path)
335 entries = svn.fs.dir_entries(root, path)
336 result = []
336 result = []
337 for entry_path, entry_info in entries.iteritems():
337 for entry_path, entry_info in entries.items():
338 result.append(
338 result.append(
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 return result
340 return result
341 return _get_nodes(repo_id, path, revision)
341 return _get_nodes(repo_id, path, revision)
342
342
343 def get_file_content(self, wire, path, rev=None):
343 def get_file_content(self, wire, path, rev=None):
344 repo = self._factory.repo(wire)
344 repo = self._factory.repo(wire)
345 fsobj = svn.repos.fs(repo)
345 fsobj = svn.repos.fs(repo)
346 if rev is None:
346 if rev is None:
347 rev = svn.fs.youngest_revision(fsobj)
347 rev = svn.fs.youngest_revision(fsobj)
348 root = svn.fs.revision_root(fsobj, rev)
348 root = svn.fs.revision_root(fsobj, rev)
349 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 return content.read()
350 return content.read()
351
351
352 def get_file_size(self, wire, path, revision=None):
352 def get_file_size(self, wire, path, revision=None):
353
353
354 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 @self.region.conditional_cache_on_arguments(condition=cache_on)
356 def _get_file_size(_repo_id, _path, _revision):
356 def _get_file_size(_repo_id, _path, _revision):
357 repo = self._factory.repo(wire)
357 repo = self._factory.repo(wire)
358 fsobj = svn.repos.fs(repo)
358 fsobj = svn.repos.fs(repo)
359 if _revision is None:
359 if _revision is None:
360 _revision = svn.fs.youngest_revision(fsobj)
360 _revision = svn.fs.youngest_revision(fsobj)
361 root = svn.fs.revision_root(fsobj, _revision)
361 root = svn.fs.revision_root(fsobj, _revision)
362 size = svn.fs.file_length(root, path)
362 size = svn.fs.file_length(root, path)
363 return size
363 return size
364 return _get_file_size(repo_id, path, revision)
364 return _get_file_size(repo_id, path, revision)
365
365
366 def create_repository(self, wire, compatible_version=None):
366 def create_repository(self, wire, compatible_version=None):
367 log.info('Creating Subversion repository in path "%s"', wire['path'])
367 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 self._factory.repo(wire, create=True,
368 self._factory.repo(wire, create=True,
369 compatible_version=compatible_version)
369 compatible_version=compatible_version)
370
370
371 def get_url_and_credentials(self, src_url):
371 def get_url_and_credentials(self, src_url):
372 obj = urlparse.urlparse(src_url)
372 obj = urllib.parse.urlparse(src_url)
373 username = obj.username or None
373 username = obj.username or None
374 password = obj.password or None
374 password = obj.password or None
375 return username, password, src_url
375 return username, password, src_url
376
376
377 def import_remote_repository(self, wire, src_url):
377 def import_remote_repository(self, wire, src_url):
378 repo_path = wire['path']
378 repo_path = wire['path']
379 if not self.is_path_valid_repository(wire, repo_path):
379 if not self.is_path_valid_repository(wire, repo_path):
380 raise Exception(
380 raise Exception(
381 "Path %s is not a valid Subversion repository." % repo_path)
381 "Path %s is not a valid Subversion repository." % repo_path)
382
382
383 username, password, src_url = self.get_url_and_credentials(src_url)
383 username, password, src_url = self.get_url_and_credentials(src_url)
384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 '--trust-server-cert-failures=unknown-ca']
385 '--trust-server-cert-failures=unknown-ca']
386 if username and password:
386 if username and password:
387 rdump_cmd += ['--username', username, '--password', password]
387 rdump_cmd += ['--username', username, '--password', password]
388 rdump_cmd += [src_url]
388 rdump_cmd += [src_url]
389
389
390 rdump = subprocess.Popen(
390 rdump = subprocess.Popen(
391 rdump_cmd,
391 rdump_cmd,
392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 load = subprocess.Popen(
393 load = subprocess.Popen(
394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395
395
396 # TODO: johbo: This can be a very long operation, might be better
396 # TODO: johbo: This can be a very long operation, might be better
397 # to track some kind of status and provide an api to check if the
397 # to track some kind of status and provide an api to check if the
398 # import is done.
398 # import is done.
399 rdump.wait()
399 rdump.wait()
400 load.wait()
400 load.wait()
401
401
402 log.debug('Return process ended with code: %s', rdump.returncode)
402 log.debug('Return process ended with code: %s', rdump.returncode)
403 if rdump.returncode != 0:
403 if rdump.returncode != 0:
404 errors = rdump.stderr.read()
404 errors = rdump.stderr.read()
405 log.error('svnrdump dump failed: statuscode %s: message: %s',
405 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 rdump.returncode, errors)
406 rdump.returncode, errors)
407 reason = 'UNKNOWN'
407 reason = 'UNKNOWN'
408 if 'svnrdump: E230001:' in errors:
408 if 'svnrdump: E230001:' in errors:
409 reason = 'INVALID_CERTIFICATE'
409 reason = 'INVALID_CERTIFICATE'
410
410
411 if reason == 'UNKNOWN':
411 if reason == 'UNKNOWN':
412 reason = 'UNKNOWN:{}'.format(errors)
412 reason = 'UNKNOWN:{}'.format(errors)
413 raise Exception(
413 raise Exception(
414 'Failed to dump the remote repository from %s. Reason:%s' % (
414 'Failed to dump the remote repository from %s. Reason:%s' % (
415 src_url, reason))
415 src_url, reason))
416 if load.returncode != 0:
416 if load.returncode != 0:
417 raise Exception(
417 raise Exception(
418 'Failed to load the dump of remote repository from %s.' %
418 'Failed to load the dump of remote repository from %s.' %
419 (src_url, ))
419 (src_url, ))
420
420
421 def commit(self, wire, message, author, timestamp, updated, removed):
421 def commit(self, wire, message, author, timestamp, updated, removed):
422 assert isinstance(message, str)
422 assert isinstance(message, str)
423 assert isinstance(author, str)
423 assert isinstance(author, str)
424
424
425 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
426 fsobj = svn.repos.fs(repo)
426 fsobj = svn.repos.fs(repo)
427
427
428 rev = svn.fs.youngest_rev(fsobj)
428 rev = svn.fs.youngest_rev(fsobj)
429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 txn_root = svn.fs.txn_root(txn)
430 txn_root = svn.fs.txn_root(txn)
431
431
432 for node in updated:
432 for node in updated:
433 TxnNodeProcessor(node, txn_root).update()
433 TxnNodeProcessor(node, txn_root).update()
434 for node in removed:
434 for node in removed:
435 TxnNodeProcessor(node, txn_root).remove()
435 TxnNodeProcessor(node, txn_root).remove()
436
436
437 commit_id = svn.repos.fs_commit_txn(repo, txn)
437 commit_id = svn.repos.fs_commit_txn(repo, txn)
438
438
439 if timestamp:
439 if timestamp:
440 apr_time = apr_time_t(timestamp)
440 apr_time = apr_time_t(timestamp)
441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443
443
444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 return commit_id
445 return commit_id
446
446
447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 ignore_whitespace=False, context=3):
448 ignore_whitespace=False, context=3):
449
449
450 wire.update(cache=False)
450 wire.update(cache=False)
451 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
452 diff_creator = SvnDiffer(
452 diff_creator = SvnDiffer(
453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 try:
454 try:
455 return diff_creator.generate_diff()
455 return diff_creator.generate_diff()
456 except svn.core.SubversionException as e:
456 except svn.core.SubversionException as e:
457 log.exception(
457 log.exception(
458 "Error during diff operation operation. "
458 "Error during diff operation operation. "
459 "Path might not exist %s, %s" % (path1, path2))
459 "Path might not exist %s, %s" % (path1, path2))
460 return ""
460 return ""
461
461
462 @reraise_safe_exceptions
462 @reraise_safe_exceptions
463 def is_large_file(self, wire, path):
463 def is_large_file(self, wire, path):
464 return False
464 return False
465
465
466 @reraise_safe_exceptions
466 @reraise_safe_exceptions
467 def is_binary(self, wire, rev, path):
467 def is_binary(self, wire, rev, path):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
469
469
470 @self.region.conditional_cache_on_arguments(condition=cache_on)
470 @self.region.conditional_cache_on_arguments(condition=cache_on)
471 def _is_binary(_repo_id, _rev, _path):
471 def _is_binary(_repo_id, _rev, _path):
472 raw_bytes = self.get_file_content(wire, path, rev)
472 raw_bytes = self.get_file_content(wire, path, rev)
473 return raw_bytes and '\0' in raw_bytes
473 return raw_bytes and '\0' in raw_bytes
474
474
475 return _is_binary(repo_id, rev, path)
475 return _is_binary(repo_id, rev, path)
476
476
477 @reraise_safe_exceptions
477 @reraise_safe_exceptions
478 def run_svn_command(self, wire, cmd, **opts):
478 def run_svn_command(self, wire, cmd, **opts):
479 path = wire.get('path', None)
479 path = wire.get('path', None)
480
480
481 if path and os.path.isdir(path):
481 if path and os.path.isdir(path):
482 opts['cwd'] = path
482 opts['cwd'] = path
483
483
484 safe_call = False
484 safe_call = False
485 if '_safe' in opts:
485 if '_safe' in opts:
486 safe_call = True
486 safe_call = True
487
487
488 svnenv = os.environ.copy()
488 svnenv = os.environ.copy()
489 svnenv.update(opts.pop('extra_env', {}))
489 svnenv.update(opts.pop('extra_env', {}))
490
490
491 _opts = {'env': svnenv, 'shell': False}
491 _opts = {'env': svnenv, 'shell': False}
492
492
493 try:
493 try:
494 _opts.update(opts)
494 _opts.update(opts)
495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496
496
497 return ''.join(p), ''.join(p.error)
497 return ''.join(p), ''.join(p.error)
498 except (EnvironmentError, OSError) as err:
498 except (EnvironmentError, OSError) as err:
499 cmd = ' '.join(cmd) # human friendly CMD
499 cmd = ' '.join(cmd) # human friendly CMD
500 tb_err = ("Couldn't run svn command (%s).\n"
500 tb_err = ("Couldn't run svn command (%s).\n"
501 "Original error was:%s\n"
501 "Original error was:%s\n"
502 "Call options:%s\n"
502 "Call options:%s\n"
503 % (cmd, err, _opts))
503 % (cmd, err, _opts))
504 log.exception(tb_err)
504 log.exception(tb_err)
505 if safe_call:
505 if safe_call:
506 return '', err
506 return '', err
507 else:
507 else:
508 raise exceptions.VcsException()(tb_err)
508 raise exceptions.VcsException()(tb_err)
509
509
510 @reraise_safe_exceptions
510 @reraise_safe_exceptions
511 def install_hooks(self, wire, force=False):
511 def install_hooks(self, wire, force=False):
512 from vcsserver.hook_utils import install_svn_hooks
512 from vcsserver.hook_utils import install_svn_hooks
513 repo_path = wire['path']
513 repo_path = wire['path']
514 binary_dir = settings.BINARY_DIR
514 binary_dir = settings.BINARY_DIR
515 executable = None
515 executable = None
516 if binary_dir:
516 if binary_dir:
517 executable = os.path.join(binary_dir, 'python')
517 executable = os.path.join(binary_dir, 'python')
518 return install_svn_hooks(
518 return install_svn_hooks(
519 repo_path, executable=executable, force_create=force)
519 repo_path, executable=executable, force_create=force)
520
520
521 @reraise_safe_exceptions
521 @reraise_safe_exceptions
522 def get_hooks_info(self, wire):
522 def get_hooks_info(self, wire):
523 from vcsserver.hook_utils import (
523 from vcsserver.hook_utils import (
524 get_svn_pre_hook_version, get_svn_post_hook_version)
524 get_svn_pre_hook_version, get_svn_post_hook_version)
525 repo_path = wire['path']
525 repo_path = wire['path']
526 return {
526 return {
527 'pre_version': get_svn_pre_hook_version(repo_path),
527 'pre_version': get_svn_pre_hook_version(repo_path),
528 'post_version': get_svn_post_hook_version(repo_path),
528 'post_version': get_svn_post_hook_version(repo_path),
529 }
529 }
530
530
531
531
532 class SvnDiffer(object):
532 class SvnDiffer(object):
533 """
533 """
534 Utility to create diffs based on difflib and the Subversion api
534 Utility to create diffs based on difflib and the Subversion api
535 """
535 """
536
536
537 binary_content = False
537 binary_content = False
538
538
539 def __init__(
539 def __init__(
540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 ignore_whitespace, context):
541 ignore_whitespace, context):
542 self.repo = repo
542 self.repo = repo
543 self.ignore_whitespace = ignore_whitespace
543 self.ignore_whitespace = ignore_whitespace
544 self.context = context
544 self.context = context
545
545
546 fsobj = svn.repos.fs(repo)
546 fsobj = svn.repos.fs(repo)
547
547
548 self.tgt_rev = tgt_rev
548 self.tgt_rev = tgt_rev
549 self.tgt_path = tgt_path or ''
549 self.tgt_path = tgt_path or ''
550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552
552
553 self.src_rev = src_rev
553 self.src_rev = src_rev
554 self.src_path = src_path or self.tgt_path
554 self.src_path = src_path or self.tgt_path
555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557
557
558 self._validate()
558 self._validate()
559
559
560 def _validate(self):
560 def _validate(self):
561 if (self.tgt_kind != svn.core.svn_node_none and
561 if (self.tgt_kind != svn.core.svn_node_none and
562 self.src_kind != svn.core.svn_node_none and
562 self.src_kind != svn.core.svn_node_none and
563 self.src_kind != self.tgt_kind):
563 self.src_kind != self.tgt_kind):
564 # TODO: johbo: proper error handling
564 # TODO: johbo: proper error handling
565 raise Exception(
565 raise Exception(
566 "Source and target are not compatible for diff generation. "
566 "Source and target are not compatible for diff generation. "
567 "Source type: %s, target type: %s" %
567 "Source type: %s, target type: %s" %
568 (self.src_kind, self.tgt_kind))
568 (self.src_kind, self.tgt_kind))
569
569
570 def generate_diff(self):
570 def generate_diff(self):
571 buf = StringIO.StringIO()
571 buf = io.StringIO()
572 if self.tgt_kind == svn.core.svn_node_dir:
572 if self.tgt_kind == svn.core.svn_node_dir:
573 self._generate_dir_diff(buf)
573 self._generate_dir_diff(buf)
574 else:
574 else:
575 self._generate_file_diff(buf)
575 self._generate_file_diff(buf)
576 return buf.getvalue()
576 return buf.getvalue()
577
577
578 def _generate_dir_diff(self, buf):
578 def _generate_dir_diff(self, buf):
579 editor = DiffChangeEditor()
579 editor = DiffChangeEditor()
580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 svn.repos.dir_delta2(
581 svn.repos.dir_delta2(
582 self.src_root,
582 self.src_root,
583 self.src_path,
583 self.src_path,
584 '', # src_entry
584 '', # src_entry
585 self.tgt_root,
585 self.tgt_root,
586 self.tgt_path,
586 self.tgt_path,
587 editor_ptr, editor_baton,
587 editor_ptr, editor_baton,
588 authorization_callback_allow_all,
588 authorization_callback_allow_all,
589 False, # text_deltas
589 False, # text_deltas
590 svn.core.svn_depth_infinity, # depth
590 svn.core.svn_depth_infinity, # depth
591 False, # entry_props
591 False, # entry_props
592 False, # ignore_ancestry
592 False, # ignore_ancestry
593 )
593 )
594
594
595 for path, __, change in sorted(editor.changes):
595 for path, __, change in sorted(editor.changes):
596 self._generate_node_diff(
596 self._generate_node_diff(
597 buf, change, path, self.tgt_path, path, self.src_path)
597 buf, change, path, self.tgt_path, path, self.src_path)
598
598
599 def _generate_file_diff(self, buf):
599 def _generate_file_diff(self, buf):
600 change = None
600 change = None
601 if self.src_kind == svn.core.svn_node_none:
601 if self.src_kind == svn.core.svn_node_none:
602 change = "add"
602 change = "add"
603 elif self.tgt_kind == svn.core.svn_node_none:
603 elif self.tgt_kind == svn.core.svn_node_none:
604 change = "delete"
604 change = "delete"
605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 src_base, src_path = vcspath.split(self.src_path)
606 src_base, src_path = vcspath.split(self.src_path)
607 self._generate_node_diff(
607 self._generate_node_diff(
608 buf, change, tgt_path, tgt_base, src_path, src_base)
608 buf, change, tgt_path, tgt_base, src_path, src_base)
609
609
610 def _generate_node_diff(
610 def _generate_node_diff(
611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612
612
613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 # makes consistent behaviour with git/hg to return empty diff if
614 # makes consistent behaviour with git/hg to return empty diff if
615 # we compare same revisions
615 # we compare same revisions
616 return
616 return
617
617
618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 src_full_path = vcspath.join(src_base, src_path)
619 src_full_path = vcspath.join(src_base, src_path)
620
620
621 self.binary_content = False
621 self.binary_content = False
622 mime_type = self._get_mime_type(tgt_full_path)
622 mime_type = self._get_mime_type(tgt_full_path)
623
623
624 if mime_type and not mime_type.startswith('text'):
624 if mime_type and not mime_type.startswith('text'):
625 self.binary_content = True
625 self.binary_content = True
626 buf.write("=" * 67 + '\n')
626 buf.write("=" * 67 + '\n')
627 buf.write("Cannot display: file marked as a binary type.\n")
627 buf.write("Cannot display: file marked as a binary type.\n")
628 buf.write("svn:mime-type = %s\n" % mime_type)
628 buf.write("svn:mime-type = %s\n" % mime_type)
629 buf.write("Index: %s\n" % (tgt_path, ))
629 buf.write("Index: %s\n" % (tgt_path, ))
630 buf.write("=" * 67 + '\n')
630 buf.write("=" * 67 + '\n')
631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 'tgt_path': tgt_path})
632 'tgt_path': tgt_path})
633
633
634 if change == 'add':
634 if change == 'add':
635 # TODO: johbo: SVN is missing a zero here compared to git
635 # TODO: johbo: SVN is missing a zero here compared to git
636 buf.write("new file mode 10644\n")
636 buf.write("new file mode 10644\n")
637
637
638 #TODO(marcink): intro to binary detection of svn patches
638 #TODO(marcink): intro to binary detection of svn patches
639 # if self.binary_content:
639 # if self.binary_content:
640 # buf.write('GIT binary patch\n')
640 # buf.write('GIT binary patch\n')
641
641
642 buf.write("--- /dev/null\t(revision 0)\n")
642 buf.write("--- /dev/null\t(revision 0)\n")
643 src_lines = []
643 src_lines = []
644 else:
644 else:
645 if change == 'delete':
645 if change == 'delete':
646 buf.write("deleted file mode 10644\n")
646 buf.write("deleted file mode 10644\n")
647
647
648 #TODO(marcink): intro to binary detection of svn patches
648 #TODO(marcink): intro to binary detection of svn patches
649 # if self.binary_content:
649 # if self.binary_content:
650 # buf.write('GIT binary patch\n')
650 # buf.write('GIT binary patch\n')
651
651
652 buf.write("--- a/%s\t(revision %s)\n" % (
652 buf.write("--- a/%s\t(revision %s)\n" % (
653 src_path, self.src_rev))
653 src_path, self.src_rev))
654 src_lines = self._svn_readlines(self.src_root, src_full_path)
654 src_lines = self._svn_readlines(self.src_root, src_full_path)
655
655
656 if change == 'delete':
656 if change == 'delete':
657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 tgt_lines = []
658 tgt_lines = []
659 else:
659 else:
660 buf.write("+++ b/%s\t(revision %s)\n" % (
660 buf.write("+++ b/%s\t(revision %s)\n" % (
661 tgt_path, self.tgt_rev))
661 tgt_path, self.tgt_rev))
662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663
663
664 if not self.binary_content:
664 if not self.binary_content:
665 udiff = svn_diff.unified_diff(
665 udiff = svn_diff.unified_diff(
666 src_lines, tgt_lines, context=self.context,
666 src_lines, tgt_lines, context=self.context,
667 ignore_blank_lines=self.ignore_whitespace,
667 ignore_blank_lines=self.ignore_whitespace,
668 ignore_case=False,
668 ignore_case=False,
669 ignore_space_changes=self.ignore_whitespace)
669 ignore_space_changes=self.ignore_whitespace)
670 buf.writelines(udiff)
670 buf.writelines(udiff)
671
671
672 def _get_mime_type(self, path):
672 def _get_mime_type(self, path):
673 try:
673 try:
674 mime_type = svn.fs.node_prop(
674 mime_type = svn.fs.node_prop(
675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 except svn.core.SubversionException:
676 except svn.core.SubversionException:
677 mime_type = svn.fs.node_prop(
677 mime_type = svn.fs.node_prop(
678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 return mime_type
679 return mime_type
680
680
681 def _svn_readlines(self, fs_root, node_path):
681 def _svn_readlines(self, fs_root, node_path):
682 if self.binary_content:
682 if self.binary_content:
683 return []
683 return []
684 node_kind = svn.fs.check_path(fs_root, node_path)
684 node_kind = svn.fs.check_path(fs_root, node_path)
685 if node_kind not in (
685 if node_kind not in (
686 svn.core.svn_node_file, svn.core.svn_node_symlink):
686 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 return []
687 return []
688 content = svn.core.Stream(
688 content = svn.core.Stream(
689 svn.fs.file_contents(fs_root, node_path)).read()
689 svn.fs.file_contents(fs_root, node_path)).read()
690 return content.splitlines(True)
690 return content.splitlines(True)
691
691
692
692
693 class DiffChangeEditor(svn.delta.Editor):
693 class DiffChangeEditor(svn.delta.Editor):
694 """
694 """
695 Records changes between two given revisions
695 Records changes between two given revisions
696 """
696 """
697
697
698 def __init__(self):
698 def __init__(self):
699 self.changes = []
699 self.changes = []
700
700
701 def delete_entry(self, path, revision, parent_baton, pool=None):
701 def delete_entry(self, path, revision, parent_baton, pool=None):
702 self.changes.append((path, None, 'delete'))
702 self.changes.append((path, None, 'delete'))
703
703
704 def add_file(
704 def add_file(
705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 file_pool=None):
706 file_pool=None):
707 self.changes.append((path, 'file', 'add'))
707 self.changes.append((path, 'file', 'add'))
708
708
709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 self.changes.append((path, 'file', 'change'))
710 self.changes.append((path, 'file', 'change'))
711
711
712
712
713 def authorization_callback_allow_all(root, path, pool):
713 def authorization_callback_allow_all(root, path, pool):
714 return True
714 return True
715
715
716
716
717 class TxnNodeProcessor(object):
717 class TxnNodeProcessor(object):
718 """
718 """
719 Utility to process the change of one node within a transaction root.
719 Utility to process the change of one node within a transaction root.
720
720
721 It encapsulates the knowledge of how to add, update or remove
721 It encapsulates the knowledge of how to add, update or remove
722 a node for a given transaction root. The purpose is to support the method
722 a node for a given transaction root. The purpose is to support the method
723 `SvnRemote.commit`.
723 `SvnRemote.commit`.
724 """
724 """
725
725
726 def __init__(self, node, txn_root):
726 def __init__(self, node, txn_root):
727 assert isinstance(node['path'], str)
727 assert isinstance(node['path'], str)
728
728
729 self.node = node
729 self.node = node
730 self.txn_root = txn_root
730 self.txn_root = txn_root
731
731
732 def update(self):
732 def update(self):
733 self._ensure_parent_dirs()
733 self._ensure_parent_dirs()
734 self._add_file_if_node_does_not_exist()
734 self._add_file_if_node_does_not_exist()
735 self._update_file_content()
735 self._update_file_content()
736 self._update_file_properties()
736 self._update_file_properties()
737
737
738 def remove(self):
738 def remove(self):
739 svn.fs.delete(self.txn_root, self.node['path'])
739 svn.fs.delete(self.txn_root, self.node['path'])
740 # TODO: Clean up directory if empty
740 # TODO: Clean up directory if empty
741
741
742 def _ensure_parent_dirs(self):
742 def _ensure_parent_dirs(self):
743 curdir = vcspath.dirname(self.node['path'])
743 curdir = vcspath.dirname(self.node['path'])
744 dirs_to_create = []
744 dirs_to_create = []
745 while not self._svn_path_exists(curdir):
745 while not self._svn_path_exists(curdir):
746 dirs_to_create.append(curdir)
746 dirs_to_create.append(curdir)
747 curdir = vcspath.dirname(curdir)
747 curdir = vcspath.dirname(curdir)
748
748
749 for curdir in reversed(dirs_to_create):
749 for curdir in reversed(dirs_to_create):
750 log.debug('Creating missing directory "%s"', curdir)
750 log.debug('Creating missing directory "%s"', curdir)
751 svn.fs.make_dir(self.txn_root, curdir)
751 svn.fs.make_dir(self.txn_root, curdir)
752
752
753 def _svn_path_exists(self, path):
753 def _svn_path_exists(self, path):
754 path_status = svn.fs.check_path(self.txn_root, path)
754 path_status = svn.fs.check_path(self.txn_root, path)
755 return path_status != svn.core.svn_node_none
755 return path_status != svn.core.svn_node_none
756
756
757 def _add_file_if_node_does_not_exist(self):
757 def _add_file_if_node_does_not_exist(self):
758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 if kind == svn.core.svn_node_none:
759 if kind == svn.core.svn_node_none:
760 svn.fs.make_file(self.txn_root, self.node['path'])
760 svn.fs.make_file(self.txn_root, self.node['path'])
761
761
762 def _update_file_content(self):
762 def _update_file_content(self):
763 assert isinstance(self.node['content'], str)
763 assert isinstance(self.node['content'], str)
764 handler, baton = svn.fs.apply_textdelta(
764 handler, baton = svn.fs.apply_textdelta(
765 self.txn_root, self.node['path'], None, None)
765 self.txn_root, self.node['path'], None, None)
766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767
767
768 def _update_file_properties(self):
768 def _update_file_properties(self):
769 properties = self.node.get('properties', {})
769 properties = self.node.get('properties', {})
770 for key, value in properties.iteritems():
770 for key, value in properties.items():
771 svn.fs.change_node_prop(
771 svn.fs.change_node_prop(
772 self.txn_root, self.node['path'], key, value)
772 self.txn_root, self.node['path'], key, value)
773
773
774
774
775 def apr_time_t(timestamp):
775 def apr_time_t(timestamp):
776 """
776 """
777 Convert a Python timestamp into APR timestamp type apr_time_t
777 Convert a Python timestamp into APR timestamp type apr_time_t
778 """
778 """
779 return timestamp * 1E6
779 return timestamp * 1E6
780
780
781
781
782 def svn_opt_revision_value_t(num):
782 def svn_opt_revision_value_t(num):
783 """
783 """
784 Put `num` into a `svn_opt_revision_value_t` structure.
784 Put `num` into a `svn_opt_revision_value_t` structure.
785 """
785 """
786 value = svn.core.svn_opt_revision_value_t()
786 value = svn.core.svn_opt_revision_value_t()
787 value.number = num
787 value.number = num
788 revision = svn.core.svn_opt_revision_t()
788 revision = svn.core.svn_opt_revision_t()
789 revision.kind = svn.core.svn_opt_revision_number
789 revision.kind = svn.core.svn_opt_revision_number
790 revision.value = value
790 revision.value = value
791 return revision
791 return revision
@@ -1,241 +1,241 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from http.server import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from socketserver import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 def get_hg_ui(extras=None):
32 def get_hg_ui(extras=None):
33 """Create a Config object with a valid RC_SCM_DATA entry."""
33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 extras = extras or {}
34 extras = extras or {}
35 required_extras = {
35 required_extras = {
36 'username': '',
36 'username': '',
37 'repository': '',
37 'repository': '',
38 'locked_by': '',
38 'locked_by': '',
39 'scm': '',
39 'scm': '',
40 'make_lock': '',
40 'make_lock': '',
41 'action': '',
41 'action': '',
42 'ip': '',
42 'ip': '',
43 'hooks_uri': 'fake_hooks_uri',
43 'hooks_uri': 'fake_hooks_uri',
44 }
44 }
45 required_extras.update(extras)
45 required_extras.update(extras)
46 hg_ui = mercurial.ui.ui()
46 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48
48
49 return hg_ui
49 return hg_ui
50
50
51
51
52 def test_git_pre_receive_is_disabled():
52 def test_git_pre_receive_is_disabled():
53 extras = {'hooks': ['pull']}
53 extras = {'hooks': ['pull']}
54 response = hooks.git_pre_receive(None, None,
54 response = hooks.git_pre_receive(None, None,
55 {'RC_SCM_DATA': json.dumps(extras)})
55 {'RC_SCM_DATA': json.dumps(extras)})
56
56
57 assert response == 0
57 assert response == 0
58
58
59
59
60 def test_git_post_receive_is_disabled():
60 def test_git_post_receive_is_disabled():
61 extras = {'hooks': ['pull']}
61 extras = {'hooks': ['pull']}
62 response = hooks.git_post_receive(None, '',
62 response = hooks.git_post_receive(None, '',
63 {'RC_SCM_DATA': json.dumps(extras)})
63 {'RC_SCM_DATA': json.dumps(extras)})
64
64
65 assert response == 0
65 assert response == 0
66
66
67
67
68 def test_git_post_receive_calls_repo_size():
68 def test_git_post_receive_calls_repo_size():
69 extras = {'hooks': ['push', 'repo_size']}
69 extras = {'hooks': ['push', 'repo_size']}
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
71 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 expected_calls = [
75 expected_calls = [
76 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
78 ]
78 ]
79 assert call_hook_mock.call_args_list == expected_calls
79 assert call_hook_mock.call_args_list == expected_calls
80
80
81
81
82 def test_git_post_receive_does_not_call_disabled_repo_size():
82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 extras = {'hooks': ['push']}
83 extras = {'hooks': ['push']}
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
85 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 expected_calls = [
89 expected_calls = [
90 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
91 ]
91 ]
92 assert call_hook_mock.call_args_list == expected_calls
92 assert call_hook_mock.call_args_list == expected_calls
93
93
94
94
95 def test_repo_size_exception_does_not_affect_git_post_receive():
95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 extras = {'hooks': ['push', 'repo_size']}
96 extras = {'hooks': ['push', 'repo_size']}
97 status = 0
97 status = 0
98
98
99 def side_effect(name, *args, **kwargs):
99 def side_effect(name, *args, **kwargs):
100 if name == 'repo_size':
100 if name == 'repo_size':
101 raise Exception('Fake exception')
101 raise Exception('Fake exception')
102 else:
102 else:
103 return status
103 return status
104
104
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 call_hook_mock.side_effect = side_effect
106 call_hook_mock.side_effect = side_effect
107 result = hooks.git_post_receive(
107 result = hooks.git_post_receive(
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 assert result == status
109 assert result == status
110
110
111
111
112 def test_git_pre_pull_is_disabled():
112 def test_git_pre_pull_is_disabled():
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114
114
115
115
116 def test_git_post_pull_is_disabled():
116 def test_git_post_pull_is_disabled():
117 assert (
117 assert (
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119
119
120
120
121 class TestGetHooksClient(object):
121 class TestGetHooksClient(object):
122
122
123 def test_returns_http_client_when_protocol_matches(self):
123 def test_returns_http_client_when_protocol_matches(self):
124 hooks_uri = 'localhost:8000'
124 hooks_uri = 'localhost:8000'
125 result = hooks._get_hooks_client({
125 result = hooks._get_hooks_client({
126 'hooks_uri': hooks_uri,
126 'hooks_uri': hooks_uri,
127 'hooks_protocol': 'http'
127 'hooks_protocol': 'http'
128 })
128 })
129 assert isinstance(result, hooks.HooksHttpClient)
129 assert isinstance(result, hooks.HooksHttpClient)
130 assert result.hooks_uri == hooks_uri
130 assert result.hooks_uri == hooks_uri
131
131
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 fake_module = mock.Mock()
133 fake_module = mock.Mock()
134 import_patcher = mock.patch.object(
134 import_patcher = mock.patch.object(
135 hooks.importlib, 'import_module', return_value=fake_module)
135 hooks.importlib, 'import_module', return_value=fake_module)
136 fake_module_name = 'fake.module'
136 fake_module_name = 'fake.module'
137 with import_patcher as import_mock:
137 with import_patcher as import_mock:
138 result = hooks._get_hooks_client(
138 result = hooks._get_hooks_client(
139 {'hooks_module': fake_module_name})
139 {'hooks_module': fake_module_name})
140
140
141 import_mock.assert_called_once_with(fake_module_name)
141 import_mock.assert_called_once_with(fake_module_name)
142 assert isinstance(result, hooks.HooksDummyClient)
142 assert isinstance(result, hooks.HooksDummyClient)
143 assert result._hooks_module == fake_module
143 assert result._hooks_module == fake_module
144
144
145
145
146 class TestHooksHttpClient(object):
146 class TestHooksHttpClient(object):
147 def test_init_sets_hooks_uri(self):
147 def test_init_sets_hooks_uri(self):
148 uri = 'localhost:3000'
148 uri = 'localhost:3000'
149 client = hooks.HooksHttpClient(uri)
149 client = hooks.HooksHttpClient(uri)
150 assert client.hooks_uri == uri
150 assert client.hooks_uri == uri
151
151
152 def test_serialize_returns_json_string(self):
152 def test_serialize_returns_json_string(self):
153 client = hooks.HooksHttpClient('localhost:3000')
153 client = hooks.HooksHttpClient('localhost:3000')
154 hook_name = 'test'
154 hook_name = 'test'
155 extras = {
155 extras = {
156 'first': 1,
156 'first': 1,
157 'second': 'two'
157 'second': 'two'
158 }
158 }
159 result = client._serialize(hook_name, extras)
159 result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
160 expected_result = json.dumps({
161 'method': hook_name,
161 'method': hook_name,
162 'extras': extras
162 'extras': extras
163 })
163 })
164 assert result == expected_result
164 assert result == expected_result
165
165
166 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
167 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
168 hook_name = 'test'
168 hook_name = 'test'
169 extras = {
169 extras = {
170 'first': 1,
170 'first': 1,
171 'second': 'two'
171 'second': 'two'
172 }
172 }
173 result = client(hook_name, extras)
173 result = client(hook_name, extras)
174 expected_result = {
174 expected_result = {
175 'method': hook_name,
175 'method': hook_name,
176 'extras': extras
176 'extras': extras
177 }
177 }
178 assert result == expected_result
178 assert result == expected_result
179
179
180
180
181 class TestHooksDummyClient(object):
181 class TestHooksDummyClient(object):
182 def test_init_imports_hooks_module(self):
182 def test_init_imports_hooks_module(self):
183 hooks_module_name = 'rhodecode.fake.module'
183 hooks_module_name = 'rhodecode.fake.module'
184 hooks_module = mock.MagicMock()
184 hooks_module = mock.MagicMock()
185
185
186 import_patcher = mock.patch.object(
186 import_patcher = mock.patch.object(
187 hooks.importlib, 'import_module', return_value=hooks_module)
187 hooks.importlib, 'import_module', return_value=hooks_module)
188 with import_patcher as import_mock:
188 with import_patcher as import_mock:
189 client = hooks.HooksDummyClient(hooks_module_name)
189 client = hooks.HooksDummyClient(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
191 assert client._hooks_module == hooks_module
191 assert client._hooks_module == hooks_module
192
192
193 def test_call_returns_hook_result(self):
193 def test_call_returns_hook_result(self):
194 hooks_module_name = 'rhodecode.fake.module'
194 hooks_module_name = 'rhodecode.fake.module'
195 hooks_module = mock.MagicMock()
195 hooks_module = mock.MagicMock()
196 import_patcher = mock.patch.object(
196 import_patcher = mock.patch.object(
197 hooks.importlib, 'import_module', return_value=hooks_module)
197 hooks.importlib, 'import_module', return_value=hooks_module)
198 with import_patcher:
198 with import_patcher:
199 client = hooks.HooksDummyClient(hooks_module_name)
199 client = hooks.HooksDummyClient(hooks_module_name)
200
200
201 result = client('post_push', {})
201 result = client('post_push', {})
202 hooks_module.Hooks.assert_called_once_with()
202 hooks_module.Hooks.assert_called_once_with()
203 assert result == hooks_module.Hooks().__enter__().post_push()
203 assert result == hooks_module.Hooks().__enter__().post_push()
204
204
205
205
206 @pytest.fixture
206 @pytest.fixture
207 def http_mirror(request):
207 def http_mirror(request):
208 server = MirrorHttpServer()
208 server = MirrorHttpServer()
209 request.addfinalizer(server.stop)
209 request.addfinalizer(server.stop)
210 return server
210 return server
211
211
212
212
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 def do_POST(self):
214 def do_POST(self):
215 length = int(self.headers['Content-Length'])
215 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
216 body = self.rfile.read(length).decode('utf-8')
217 self.send_response(200)
217 self.send_response(200)
218 self.end_headers()
218 self.end_headers()
219 self.wfile.write(body)
219 self.wfile.write(body)
220
220
221
221
222 class MirrorHttpServer(object):
222 class MirrorHttpServer(object):
223 ip_address = '127.0.0.1'
223 ip_address = '127.0.0.1'
224 port = 0
224 port = 0
225
225
226 def __init__(self):
226 def __init__(self):
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 _, self.port = self._daemon.server_address
228 _, self.port = self._daemon.server_address
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 self._thread.daemon = True
230 self._thread.daemon = True
231 self._thread.start()
231 self._thread.start()
232
232
233 def stop(self):
233 def stop(self):
234 self._daemon.shutdown()
234 self._daemon.shutdown()
235 self._thread.join()
235 self._thread.join()
236 self._daemon = None
236 self._daemon = None
237 self._thread = None
237 self._thread = None
238
238
239 @property
239 @property
240 def uri(self):
240 def uri(self):
241 return '{}:{}'.format(self.ip_address, self.port)
241 return '{}:{}'.format(self.ip_address, self.port)
General Comments 0
You need to be logged in to leave comments. Login now