##// END OF EJS Templates
svn: added support for hooks management of git and subversion....
marcink -
r407:34976bc5 default
parent child Browse files
Show More
@@ -0,0 +1,154 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20 import re
21 import os
22 import sys
23 import datetime
24 import logging
25 import pkg_resources
26
27 import vcsserver
28
29 log = logging.getLogger(__name__)
30
31
32 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
33 """
34 Creates a RhodeCode hook inside a git repository
35
36 :param repo_path: path to repository
37 :param executable: binary executable to put in the hooks
38 :param force_create: Create even if same name hook exists
39 """
40 executable = executable or sys.executable
41 hooks_path = os.path.join(repo_path, 'hooks')
42 if not bare:
43 hooks_path = os.path.join(repo_path, '.git', 'hooks')
44 if not os.path.isdir(hooks_path):
45 os.makedirs(hooks_path, mode=0777)
46
47 tmpl_post = pkg_resources.resource_string(
48 'vcsserver', '/'.join(
49 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
50 tmpl_pre = pkg_resources.resource_string(
51 'vcsserver', '/'.join(
52 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
53
54 path = '' # not used for now
55 timestamp = datetime.datetime.utcnow().isoformat()
56
57 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
58 log.debug('Installing git hook in repo %s', repo_path)
59 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
60 _rhodecode_hook = check_rhodecode_hook(_hook_file)
61
62 if _rhodecode_hook or force_create:
63 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
64 try:
65 with open(_hook_file, 'wb') as f:
66 template = template.replace(
67 '_TMPL_', vcsserver.__version__)
68 template = template.replace('_DATE_', timestamp)
69 template = template.replace('_ENV_', executable)
70 template = template.replace('_PATH_', path)
71 f.write(template)
72 os.chmod(_hook_file, 0755)
73 except IOError:
74 log.exception('error writing hook file %s', _hook_file)
75 else:
76 log.debug('skipping writing hook file')
77
78 return True
79
80
81 def install_svn_hooks(repo_path, executable=None, force_create=False):
82 """
83 Creates RhodeCode hooks inside a svn repository
84
85 :param repo_path: path to repository
86 :param executable: binary executable to put in the hooks
87 :param force_create: Create even if same name hook exists
88 """
89 executable = executable or sys.executable
90 hooks_path = os.path.join(repo_path, 'hooks')
91 if not os.path.isdir(hooks_path):
92 os.makedirs(hooks_path, mode=0777)
93
94 tmpl_post = pkg_resources.resource_string(
95 'vcsserver', '/'.join(
96 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
97 tmpl_pre = pkg_resources.resource_string(
98 'vcsserver', '/'.join(
99 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
100
101 path = '' # not used for now
102 timestamp = datetime.datetime.utcnow().isoformat()
103
104 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
105 log.debug('Installing svn hook in repo %s', repo_path)
106 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
107 _rhodecode_hook = check_rhodecode_hook(_hook_file)
108
109 if _rhodecode_hook or force_create:
110 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
111
112 try:
113 with open(_hook_file, 'wb') as f:
114 template = template.replace(
115 '_TMPL_', vcsserver.__version__)
116 template = template.replace('_DATE_', timestamp)
117 template = template.replace('_ENV_', executable)
118 template = template.replace('_PATH_', path)
119
120 f.write(template)
121 os.chmod(_hook_file, 0755)
122 except IOError:
123 log.exception('error writing hook file %s', _hook_file)
124 else:
125 log.debug('skipping writing hook file')
126
127 return True
128
129
130 def check_rhodecode_hook(hook_path):
131 """
132 Check if the hook was created by RhodeCode
133 """
134 if not os.path.exists(hook_path):
135 return True
136
137 log.debug('hook exists, checking if it is from rhodecode')
138 hook_content = read_hook_content(hook_path)
139 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
140 if matches:
141 try:
142 version = matches.groups()[0]
143 log.debug('got version %s from hooks.', version)
144 return True
145 except Exception:
146 log.exception("Exception while reading the hook version.")
147
148 return False
149
150
151 def read_hook_content(hook_path):
152 with open(hook_path, 'rb') as f:
153 content = f.read()
154 return content
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_post_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_pre_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,50 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if hooks is None:
25 # exit with success if we cannot import vcsserver.hooks !!
26 # this allows simply push to this repo even without rhodecode
27 sys.exit(0)
28
29 if os.environ.get('RC_SKIP_HOOKS'):
30 sys.exit(0)
31 repo_path = os.getcwd()
32 push_data = sys.argv[1:]
33
34 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
35
36 try:
37 result = hooks.svn_post_commit(repo_path, push_data, os.environ)
38 sys.exit(result)
39 except Exception as error:
40 # TODO: johbo: Improve handling of this special case
41 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
42 raise
43 print 'ERROR:', error
44 sys.exit(1)
45 sys.exit(0)
46
47
48
49 if __name__ == '__main__':
50 main()
@@ -0,0 +1,52 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if os.environ.get('SSH_READ_ONLY') == '1':
25 sys.stderr.write('Only read-only access is allowed')
26 sys.exit(1)
27
28 if hooks is None:
29 # exit with success if we cannot import vcsserver.hooks !!
30 # this allows simply push to this repo even without rhodecode
31 sys.exit(0)
32 if os.environ.get('RC_SKIP_HOOKS'):
33 sys.exit(0)
34 repo_path = os.getcwd()
35 push_data = sys.argv[1:]
36
37 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
38
39 try:
40 result = hooks.svn_pre_commit(repo_path, push_data, os.environ)
41 sys.exit(result)
42 except Exception as error:
43 # TODO: johbo: Improve handling of this special case
44 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
45 raise
46 print 'ERROR:', error
47 sys.exit(1)
48 sys.exit(0)
49
50
51 if __name__ == '__main__':
52 main()
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,206 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import sys
20 import stat
21 import pytest
22 import vcsserver
23 import tempfile
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.utils import AttributeDict
27
28
29 class TestCheckRhodecodeHook(object):
30
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 with open(hook, 'wb') as f:
34 f.write('dummy test')
35 result = hook_utils.check_rhodecode_hook(hook)
36 assert result is False
37
38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 result = hook_utils.check_rhodecode_hook(hook)
41 assert result
42
43 @pytest.mark.parametrize("file_content, expected_result", [
44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 ("RC_HOOK = '3.3.3'\n", False),
46 ], ids=no_newline_id_generator)
47 def test_signatures(self, file_content, expected_result, tmpdir):
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 with open(hook, 'wb') as f:
50 f.write(file_content)
51
52 result = hook_utils.check_rhodecode_hook(hook)
53
54 assert result is expected_result
55
56
57 class BaseInstallHooks(object):
58 HOOK_FILES = ()
59
60 def _check_hook_file_mode(self, file_path):
61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 stat_info = os.stat(file_path)
63
64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 expected_mode = int('755', 8)
66 assert expected_mode == file_mode
67
68 def _check_hook_file_content(self, file_path, executable):
69 executable = executable or sys.executable
70 with open(file_path, 'rt') as hook_file:
71 content = hook_file.read()
72
73 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
78
79 def _create_fake_hook(self, file_path, content):
80 with open(file_path, 'w') as hook_file:
81 hook_file.write(content)
82
83 def create_dummy_repo(self, repo_type):
84 tmpdir = tempfile.mkdtemp()
85 repo = AttributeDict()
86 if repo_type == 'git':
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 os.makedirs(repo.path)
89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 repo.bare = True
91
92 elif repo_type == 'svn':
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 os.makedirs(repo.path)
95 os.makedirs(os.path.join(repo.path, 'hooks'))
96
97 return repo
98
99 def check_hooks(self, repo_path, repo_bare=True):
100 for file_name in self.HOOK_FILES:
101 if repo_bare:
102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 else:
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 self._check_hook_file_mode(file_path)
106 self._check_hook_file_content(file_path, sys.executable)
107
108
109 class TestInstallGitHooks(BaseInstallHooks):
110 HOOK_FILES = ('pre-receive', 'post-receive')
111
112 def test_hooks_are_installed(self):
113 repo = self.create_dummy_repo('git')
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 assert result
116 self.check_hooks(repo.path, repo.bare)
117
118 def test_hooks_are_replaced(self):
119 repo = self.create_dummy_repo('git')
120 hooks_path = os.path.join(repo.path, 'hooks')
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 self._create_fake_hook(
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 assert result
127 self.check_hooks(repo.path, repo.bare)
128
129 def test_non_rc_hooks_are_not_replaced(self):
130 repo = self.create_dummy_repo('git')
131 hooks_path = os.path.join(repo.path, 'hooks')
132 non_rc_content = 'echo "non rc hook"\n'
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
135 file_path, content=non_rc_content)
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
139
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 with open(file_path, 'rt') as hook_file:
142 content = hook_file.read()
143 assert content == non_rc_content
144
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 repo = self.create_dummy_repo('git')
147 hooks_path = os.path.join(repo.path, 'hooks')
148 non_rc_content = 'echo "non rc hook"\n'
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 self._create_fake_hook(
151 file_path, content=non_rc_content)
152
153 result = hook_utils.install_git_hooks(
154 repo.path, repo.bare, force_create=True)
155 assert result
156 self.check_hooks(repo.path, repo.bare)
157
158
159 class TestInstallSvnHooks(BaseInstallHooks):
160 HOOK_FILES = ('pre-commit', 'post-commit')
161
162 def test_hooks_are_installed(self):
163 repo = self.create_dummy_repo('svn')
164 result = hook_utils.install_svn_hooks(repo.path)
165 assert result
166 self.check_hooks(repo.path)
167
168 def test_hooks_are_replaced(self):
169 repo = self.create_dummy_repo('svn')
170 hooks_path = os.path.join(repo.path, 'hooks')
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 self._create_fake_hook(
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174
175 result = hook_utils.install_svn_hooks(repo.path)
176 assert result
177 self.check_hooks(repo.path)
178
179 def test_non_rc_hooks_are_not_replaced(self):
180 repo = self.create_dummy_repo('svn')
181 hooks_path = os.path.join(repo.path, 'hooks')
182 non_rc_content = 'echo "non rc hook"\n'
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
185 file_path, content=non_rc_content)
186
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
189
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 with open(file_path, 'rt') as hook_file:
192 content = hook_file.read()
193 assert content == non_rc_content
194
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 repo = self.create_dummy_repo('svn')
197 hooks_path = os.path.join(repo.path, 'hooks')
198 non_rc_content = 'echo "non rc hook"\n'
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 self._create_fake_hook(
201 file_path, content=non_rc_content)
202
203 result = hook_utils.install_svn_hooks(
204 repo.path, force_create=True)
205 assert result
206 self.check_hooks(repo.path, )
@@ -1,13 +1,16 b''
1 1 # top level files
2 2 include *.rst
3 3 include *.txt
4 4
5 5 # package extras
6 6 include vcsserver/VERSION
7 7
8 8 # all config files
9 9 recursive-include configs *
10 10
11 # hook templates
12 recursive-include vcsserver/hook_utils/hook_templates *
13
11 14 # skip any tests files
12 15 recursive-exclude vcsserver/tests *
13 16
@@ -1,79 +1,83 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 6 [app:main]
7 7 use = egg:rhodecode-vcsserver
8 8
9 9 pyramid.default_locale_name = en
10 10 pyramid.includes =
11 11
12 12 # default locale used by VCS systems
13 13 locale = en_US.UTF-8
14 14
15 15 # cache regions, please don't change
16 16 beaker.cache.regions = repo_object
17 17 beaker.cache.repo_object.type = memorylru
18 18 beaker.cache.repo_object.max_items = 100
19 19 # cache auto-expires after N seconds
20 20 beaker.cache.repo_object.expire = 300
21 21 beaker.cache.repo_object.enabled = true
22 22
23 # path to binaries for vcsserver, it should be set by the installer
24 # at installation time, e.g /home/user/vcsserver-1/profile/bin
25 core.binary_dir = ""
26
23 27 [server:main]
24 28 ## COMMON ##
25 29 host = 0.0.0.0
26 30 port = 9900
27 31
28 32 use = egg:waitress#main
29 33
30 34
31 35 ################################
32 36 ### LOGGING CONFIGURATION ####
33 37 ################################
34 38 [loggers]
35 39 keys = root, vcsserver, beaker
36 40
37 41 [handlers]
38 42 keys = console
39 43
40 44 [formatters]
41 45 keys = generic
42 46
43 47 #############
44 48 ## LOGGERS ##
45 49 #############
46 50 [logger_root]
47 51 level = NOTSET
48 52 handlers = console
49 53
50 54 [logger_vcsserver]
51 55 level = DEBUG
52 56 handlers =
53 57 qualname = vcsserver
54 58 propagate = 1
55 59
56 60 [logger_beaker]
57 61 level = DEBUG
58 62 handlers =
59 63 qualname = beaker
60 64 propagate = 1
61 65
62 66
63 67 ##############
64 68 ## HANDLERS ##
65 69 ##############
66 70
67 71 [handler_console]
68 72 class = StreamHandler
69 73 args = (sys.stderr,)
70 74 level = DEBUG
71 75 formatter = generic
72 76
73 77 ################
74 78 ## FORMATTERS ##
75 79 ################
76 80
77 81 [formatter_generic]
78 82 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
79 83 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,102 +1,106 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 6
7 7 [server:main]
8 8 ## COMMON ##
9 9 host = 127.0.0.1
10 10 port = 9900
11 11
12 12
13 13 ##########################
14 14 ## GUNICORN WSGI SERVER ##
15 15 ##########################
16 16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 17 use = egg:gunicorn#main
18 18 ## Sets the number of process workers. Recommended
19 19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 20 workers = 2
21 21 ## process name
22 22 proc_name = rhodecode_vcsserver
23 23 ## type of worker class, currently `sync` is the only option allowed.
24 24 worker_class = sync
25 25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 26 #worker_connections = 10
27 27 ## max number of requests that worker will handle before being gracefully
28 28 ## restarted, could prevent memory leaks
29 29 max_requests = 1000
30 30 max_requests_jitter = 30
31 31 ## amount of time a worker can spend with handling a request before it
32 32 ## gets killed and restarted. Set to 6hrs
33 33 timeout = 21600
34 34
35 35
36 36 [app:main]
37 37 use = egg:rhodecode-vcsserver
38 38
39 39 pyramid.default_locale_name = en
40 40 pyramid.includes =
41 41
42 ## default locale used by VCS systems
42 # default locale used by VCS systems
43 43 locale = en_US.UTF-8
44 44
45 45 # cache regions, please don't change
46 46 beaker.cache.regions = repo_object
47 47 beaker.cache.repo_object.type = memorylru
48 48 beaker.cache.repo_object.max_items = 100
49 49 # cache auto-expires after N seconds
50 50 beaker.cache.repo_object.expire = 300
51 51 beaker.cache.repo_object.enabled = true
52 52
53 # path to binaries for vcsserver, it should be set by the installer
54 # at installation time, e.g /home/user/vcsserver-1/profile/bin
55 core.binary_dir = ""
56
53 57
54 58 ################################
55 59 ### LOGGING CONFIGURATION ####
56 60 ################################
57 61 [loggers]
58 62 keys = root, vcsserver, beaker
59 63
60 64 [handlers]
61 65 keys = console
62 66
63 67 [formatters]
64 68 keys = generic
65 69
66 70 #############
67 71 ## LOGGERS ##
68 72 #############
69 73 [logger_root]
70 74 level = NOTSET
71 75 handlers = console
72 76
73 77 [logger_vcsserver]
74 78 level = DEBUG
75 79 handlers =
76 80 qualname = vcsserver
77 81 propagate = 1
78 82
79 83 [logger_beaker]
80 84 level = DEBUG
81 85 handlers =
82 86 qualname = beaker
83 87 propagate = 1
84 88
85 89
86 90 ##############
87 91 ## HANDLERS ##
88 92 ##############
89 93
90 94 [handler_console]
91 95 class = StreamHandler
92 96 args = (sys.stderr,)
93 97 level = DEBUG
94 98 formatter = generic
95 99
96 100 ################
97 101 ## FORMATTERS ##
98 102 ################
99 103
100 104 [formatter_generic]
101 105 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
102 106 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,663 +1,669 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 59 raise exceptions.LookupException(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 61 raise exceptions.VcsException(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 90
91 91 def _create_repo(self, wire, create):
92 92 repo_path = str_to_dulwich(wire['path'])
93 93 return Repo(repo_path)
94 94
95 95
96 96 class GitRemote(object):
97 97
98 98 def __init__(self, factory):
99 99 self._factory = factory
100 100
101 101 self._bulk_methods = {
102 102 "author": self.commit_attribute,
103 103 "date": self.get_object_attrs,
104 104 "message": self.commit_attribute,
105 105 "parents": self.commit_attribute,
106 106 "_commit": self.revision,
107 107 }
108 108
109 109 def _wire_to_config(self, wire):
110 110 if 'config' in wire:
111 111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 112 return {}
113 113
114 114 def _assign_ref(self, wire, ref, commit_id):
115 115 repo = self._factory.repo(wire)
116 116 repo[ref] = commit_id
117 117
118 118 @reraise_safe_exceptions
119 119 def add_object(self, wire, content):
120 120 repo = self._factory.repo(wire)
121 121 blob = objects.Blob()
122 122 blob.set_raw_string(content)
123 123 repo.object_store.add_object(blob)
124 124 return blob.id
125 125
126 126 @reraise_safe_exceptions
127 127 def assert_correct_path(self, wire):
128 128 path = wire.get('path')
129 129 try:
130 130 self._factory.repo(wire)
131 131 except NotGitRepository as e:
132 132 tb = traceback.format_exc()
133 133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 134 return False
135 135
136 136 return True
137 137
138 138 @reraise_safe_exceptions
139 139 def bare(self, wire):
140 140 repo = self._factory.repo(wire)
141 141 return repo.bare
142 142
143 143 @reraise_safe_exceptions
144 144 def blob_as_pretty_string(self, wire, sha):
145 145 repo = self._factory.repo(wire)
146 146 return repo[sha].as_pretty_string()
147 147
148 148 @reraise_safe_exceptions
149 149 def blob_raw_length(self, wire, sha):
150 150 repo = self._factory.repo(wire)
151 151 blob = repo[sha]
152 152 return blob.raw_length()
153 153
154 154 def _parse_lfs_pointer(self, raw_content):
155 155
156 156 spec_string = 'version https://git-lfs.github.com/spec'
157 157 if raw_content and raw_content.startswith(spec_string):
158 158 pattern = re.compile(r"""
159 159 (?:\n)?
160 160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 162 ^size[ ](?P<oid_size>[0-9]+)\n
163 163 (?:\n)?
164 164 """, re.VERBOSE | re.MULTILINE)
165 165 match = pattern.match(raw_content)
166 166 if match:
167 167 return match.groupdict()
168 168
169 169 return {}
170 170
171 171 @reraise_safe_exceptions
172 172 def is_large_file(self, wire, sha):
173 173 repo = self._factory.repo(wire)
174 174 blob = repo[sha]
175 175 return self._parse_lfs_pointer(blob.as_raw_string())
176 176
177 177 @reraise_safe_exceptions
178 178 def in_largefiles_store(self, wire, oid):
179 179 repo = self._factory.repo(wire)
180 180 conf = self._wire_to_config(wire)
181 181
182 182 store_location = conf.get('vcs_git_lfs_store_location')
183 183 if store_location:
184 184 repo_name = repo.path
185 185 store = LFSOidStore(
186 186 oid=oid, repo=repo_name, store_location=store_location)
187 187 return store.has_oid()
188 188
189 189 return False
190 190
191 191 @reraise_safe_exceptions
192 192 def store_path(self, wire, oid):
193 193 repo = self._factory.repo(wire)
194 194 conf = self._wire_to_config(wire)
195 195
196 196 store_location = conf.get('vcs_git_lfs_store_location')
197 197 if store_location:
198 198 repo_name = repo.path
199 199 store = LFSOidStore(
200 200 oid=oid, repo=repo_name, store_location=store_location)
201 201 return store.oid_path
202 202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 203
204 204 @reraise_safe_exceptions
205 205 def bulk_request(self, wire, rev, pre_load):
206 206 result = {}
207 207 for attr in pre_load:
208 208 try:
209 209 method = self._bulk_methods[attr]
210 210 args = [wire, rev]
211 211 if attr == "date":
212 212 args.extend(["commit_time", "commit_timezone"])
213 213 elif attr in ["author", "message", "parents"]:
214 214 args.append(attr)
215 215 result[attr] = method(*args)
216 216 except KeyError:
217 217 raise exceptions.VcsException(
218 218 "Unknown bulk attribute: %s" % attr)
219 219 return result
220 220
221 221 def _build_opener(self, url):
222 222 handlers = []
223 223 url_obj = url_parser(url)
224 224 _, authinfo = url_obj.authinfo()
225 225
226 226 if authinfo:
227 227 # create a password manager
228 228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 229 passmgr.add_password(*authinfo)
230 230
231 231 handlers.extend((httpbasicauthhandler(passmgr),
232 232 httpdigestauthhandler(passmgr)))
233 233
234 234 return urllib2.build_opener(*handlers)
235 235
236 236 @reraise_safe_exceptions
237 237 def check_url(self, url, config):
238 238 url_obj = url_parser(url)
239 239 test_uri, _ = url_obj.authinfo()
240 240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 241 url_obj.query = obfuscate_qs(url_obj.query)
242 242 cleaned_uri = str(url_obj)
243 243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 244
245 245 if not test_uri.endswith('info/refs'):
246 246 test_uri = test_uri.rstrip('/') + '/info/refs'
247 247
248 248 o = self._build_opener(url)
249 249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 250
251 251 q = {"service": 'git-upload-pack'}
252 252 qs = '?%s' % urllib.urlencode(q)
253 253 cu = "%s%s" % (test_uri, qs)
254 254 req = urllib2.Request(cu, None, {})
255 255
256 256 try:
257 257 log.debug("Trying to open URL %s", cleaned_uri)
258 258 resp = o.open(req)
259 259 if resp.code != 200:
260 260 raise exceptions.URLError('Return Code is not 200')
261 261 except Exception as e:
262 262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 263 # means it cannot be cloned
264 264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 265
266 266 # now detect if it's proper git repo
267 267 gitdata = resp.read()
268 268 if 'service=git-upload-pack' in gitdata:
269 269 pass
270 270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 271 # old style git can return some other format !
272 272 pass
273 273 else:
274 274 raise exceptions.URLError(
275 275 "url [%s] does not look like an git" % (cleaned_uri,))
276 276
277 277 return True
278 278
279 279 @reraise_safe_exceptions
280 280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 282 repo = self._factory.repo(wire)
283 283 if isinstance(valid_refs, list):
284 284 valid_refs = tuple(valid_refs)
285 285
286 286 for k in remote_refs:
287 287 # only parse heads/tags and skip so called deferred tags
288 288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 289 repo[k] = remote_refs[k]
290 290
291 291 if update_after_clone:
292 292 # we want to checkout HEAD
293 293 repo["HEAD"] = remote_refs["HEAD"]
294 294 index.build_index_from_tree(repo.path, repo.index_path(),
295 295 repo.object_store, repo["HEAD"].tree)
296 296
297 297 # TODO: this is quite complex, check if that can be simplified
298 298 @reraise_safe_exceptions
299 299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 300 repo = self._factory.repo(wire)
301 301 object_store = repo.object_store
302 302
303 303 # Create tree and populates it with blobs
304 304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 305
306 306 for node in updated:
307 307 # Compute subdirs if needed
308 308 dirpath, nodename = vcspath.split(node['path'])
309 309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 310 parent = commit_tree
311 311 ancestors = [('', parent)]
312 312
313 313 # Tries to dig for the deepest existing tree
314 314 while dirnames:
315 315 curdir = dirnames.pop(0)
316 316 try:
317 317 dir_id = parent[curdir][1]
318 318 except KeyError:
319 319 # put curdir back into dirnames and stops
320 320 dirnames.insert(0, curdir)
321 321 break
322 322 else:
323 323 # If found, updates parent
324 324 parent = repo[dir_id]
325 325 ancestors.append((curdir, parent))
326 326 # Now parent is deepest existing tree and we need to create
327 327 # subtrees for dirnames (in reverse order)
328 328 # [this only applies for nodes from added]
329 329 new_trees = []
330 330
331 331 blob = objects.Blob.from_string(node['content'])
332 332
333 333 if dirnames:
334 334 # If there are trees which should be created we need to build
335 335 # them now (in reverse order)
336 336 reversed_dirnames = list(reversed(dirnames))
337 337 curtree = objects.Tree()
338 338 curtree[node['node_path']] = node['mode'], blob.id
339 339 new_trees.append(curtree)
340 340 for dirname in reversed_dirnames[:-1]:
341 341 newtree = objects.Tree()
342 342 newtree[dirname] = (DIR_STAT, curtree.id)
343 343 new_trees.append(newtree)
344 344 curtree = newtree
345 345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 346 else:
347 347 parent.add(
348 348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 349
350 350 new_trees.append(parent)
351 351 # Update ancestors
352 352 reversed_ancestors = reversed(
353 353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 354 for parent, tree, path in reversed_ancestors:
355 355 parent[path] = (DIR_STAT, tree.id)
356 356 object_store.add_object(tree)
357 357
358 358 object_store.add_object(blob)
359 359 for tree in new_trees:
360 360 object_store.add_object(tree)
361 361
362 362 for node_path in removed:
363 363 paths = node_path.split('/')
364 364 tree = commit_tree
365 365 trees = [tree]
366 366 # Traverse deep into the forest...
367 367 for path in paths:
368 368 try:
369 369 obj = repo[tree[path][1]]
370 370 if isinstance(obj, objects.Tree):
371 371 trees.append(obj)
372 372 tree = obj
373 373 except KeyError:
374 374 break
375 375 # Cut down the blob and all rotten trees on the way back...
376 376 for path, tree in reversed(zip(paths, trees)):
377 377 del tree[path]
378 378 if tree:
379 379 # This tree still has elements - don't remove it or any
380 380 # of it's parents
381 381 break
382 382
383 383 object_store.add_object(commit_tree)
384 384
385 385 # Create commit
386 386 commit = objects.Commit()
387 387 commit.tree = commit_tree.id
388 388 for k, v in commit_data.iteritems():
389 389 setattr(commit, k, v)
390 390 object_store.add_object(commit)
391 391
392 392 ref = 'refs/heads/%s' % branch
393 393 repo.refs[ref] = commit.id
394 394
395 395 return commit.id
396 396
397 397 @reraise_safe_exceptions
398 398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 399 if url != 'default' and '://' not in url:
400 400 client = LocalGitClient(url)
401 401 else:
402 402 url_obj = url_parser(url)
403 403 o = self._build_opener(url)
404 404 url, _ = url_obj.authinfo()
405 405 client = HttpGitClient(base_url=url, opener=o)
406 406 repo = self._factory.repo(wire)
407 407
408 408 determine_wants = repo.object_store.determine_wants_all
409 409 if refs:
410 410 def determine_wants_requested(references):
411 411 return [references[r] for r in references if r in refs]
412 412 determine_wants = determine_wants_requested
413 413
414 414 try:
415 415 remote_refs = client.fetch(
416 416 path=url, target=repo, determine_wants=determine_wants)
417 417 except NotGitRepository as e:
418 418 log.warning(
419 419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 420 # Exception can contain unicode which we convert
421 421 raise exceptions.AbortException(repr(e))
422 422
423 423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 424 # refs filtered by `determine_wants` function. We need to filter result
425 425 # as well
426 426 if refs:
427 427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 428
429 429 if apply_refs:
430 430 # TODO: johbo: Needs proper test coverage with a git repository
431 431 # that contains a tag object, so that we would end up with
432 432 # a peeled ref at this point.
433 433 PEELED_REF_MARKER = '^{}'
434 434 for k in remote_refs:
435 435 if k.endswith(PEELED_REF_MARKER):
436 436 log.info("Skipping peeled reference %s", k)
437 437 continue
438 438 repo[k] = remote_refs[k]
439 439
440 440 if refs:
441 441 # mikhail: explicitly set the head to the last ref.
442 442 repo['HEAD'] = remote_refs[refs[-1]]
443 443
444 444 # TODO: mikhail: should we return remote_refs here to be
445 445 # consistent?
446 446 else:
447 447 return remote_refs
448 448
449 449 @reraise_safe_exceptions
450 450 def sync_push(self, wire, url, refs=None):
451 451 if self.check_url(url, wire):
452 452 repo = self._factory.repo(wire)
453 453 self.run_git_command(
454 454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 455 _copts=['-c', 'core.askpass=""'],
456 456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 457
458 458 @reraise_safe_exceptions
459 459 def get_remote_refs(self, wire, url):
460 460 repo = Repo(url)
461 461 return repo.get_refs()
462 462
463 463 @reraise_safe_exceptions
464 464 def get_description(self, wire):
465 465 repo = self._factory.repo(wire)
466 466 return repo.get_description()
467 467
468 468 @reraise_safe_exceptions
469 469 def get_file_history(self, wire, file_path, commit_id, limit):
470 470 repo = self._factory.repo(wire)
471 471 include = [commit_id]
472 472 paths = [file_path]
473 473
474 474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 475 return [x.commit.id for x in walker]
476 476
477 477 @reraise_safe_exceptions
478 478 def get_missing_revs(self, wire, rev1, rev2, path2):
479 479 repo = self._factory.repo(wire)
480 480 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 481
482 482 wire_remote = wire.copy()
483 483 wire_remote['path'] = path2
484 484 repo_remote = self._factory.repo(wire_remote)
485 485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 486
487 487 revs = [
488 488 x.commit.id
489 489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 490 return revs
491 491
492 492 @reraise_safe_exceptions
493 493 def get_object(self, wire, sha):
494 494 repo = self._factory.repo(wire)
495 495 obj = repo.get_object(sha)
496 496 commit_id = obj.id
497 497
498 498 if isinstance(obj, Tag):
499 499 commit_id = obj.object[1]
500 500
501 501 return {
502 502 'id': obj.id,
503 503 'type': obj.type_name,
504 504 'commit_id': commit_id
505 505 }
506 506
507 507 @reraise_safe_exceptions
508 508 def get_object_attrs(self, wire, sha, *attrs):
509 509 repo = self._factory.repo(wire)
510 510 obj = repo.get_object(sha)
511 511 return list(getattr(obj, a) for a in attrs)
512 512
513 513 @reraise_safe_exceptions
514 514 def get_refs(self, wire):
515 515 repo = self._factory.repo(wire)
516 516 result = {}
517 517 for ref, sha in repo.refs.as_dict().items():
518 518 peeled_sha = repo.get_peeled(ref)
519 519 result[ref] = peeled_sha
520 520 return result
521 521
522 522 @reraise_safe_exceptions
523 523 def get_refs_path(self, wire):
524 524 repo = self._factory.repo(wire)
525 525 return repo.refs.path
526 526
527 527 @reraise_safe_exceptions
528 528 def head(self, wire):
529 529 repo = self._factory.repo(wire)
530 530 return repo.head()
531 531
532 532 @reraise_safe_exceptions
533 533 def init(self, wire):
534 534 repo_path = str_to_dulwich(wire['path'])
535 535 self.repo = Repo.init(repo_path)
536 536
537 537 @reraise_safe_exceptions
538 538 def init_bare(self, wire):
539 539 repo_path = str_to_dulwich(wire['path'])
540 540 self.repo = Repo.init_bare(repo_path)
541 541
542 542 @reraise_safe_exceptions
543 543 def revision(self, wire, rev):
544 544 repo = self._factory.repo(wire)
545 545 obj = repo[rev]
546 546 obj_data = {
547 547 'id': obj.id,
548 548 }
549 549 try:
550 550 obj_data['tree'] = obj.tree
551 551 except AttributeError:
552 552 pass
553 553 return obj_data
554 554
555 555 @reraise_safe_exceptions
556 556 def commit_attribute(self, wire, rev, attr):
557 557 repo = self._factory.repo(wire)
558 558 obj = repo[rev]
559 559 return getattr(obj, attr)
560 560
561 561 @reraise_safe_exceptions
562 562 def set_refs(self, wire, key, value):
563 563 repo = self._factory.repo(wire)
564 564 repo.refs[key] = value
565 565
566 566 @reraise_safe_exceptions
567 567 def remove_ref(self, wire, key):
568 568 repo = self._factory.repo(wire)
569 569 del repo.refs[key]
570 570
571 571 @reraise_safe_exceptions
572 572 def tree_changes(self, wire, source_id, target_id):
573 573 repo = self._factory.repo(wire)
574 574 source = repo[source_id].tree if source_id else None
575 575 target = repo[target_id].tree
576 576 result = repo.object_store.tree_changes(source, target)
577 577 return list(result)
578 578
579 579 @reraise_safe_exceptions
580 580 def tree_items(self, wire, tree_id):
581 581 repo = self._factory.repo(wire)
582 582 tree = repo[tree_id]
583 583
584 584 result = []
585 585 for item in tree.iteritems():
586 586 item_sha = item.sha
587 587 item_mode = item.mode
588 588
589 589 if FILE_MODE(item_mode) == GIT_LINK:
590 590 item_type = "link"
591 591 else:
592 592 item_type = repo[item_sha].type_name
593 593
594 594 result.append((item.path, item_mode, item_sha, item_type))
595 595 return result
596 596
597 597 @reraise_safe_exceptions
598 598 def update_server_info(self, wire):
599 599 repo = self._factory.repo(wire)
600 600 update_server_info(repo)
601 601
602 602 @reraise_safe_exceptions
603 603 def discover_git_version(self):
604 604 stdout, _ = self.run_git_command(
605 605 {}, ['--version'], _bare=True, _safe=True)
606 606 prefix = 'git version'
607 607 if stdout.startswith(prefix):
608 608 stdout = stdout[len(prefix):]
609 609 return stdout.strip()
610 610
611 611 @reraise_safe_exceptions
612 612 def run_git_command(self, wire, cmd, **opts):
613 613 path = wire.get('path', None)
614 614
615 615 if path and os.path.isdir(path):
616 616 opts['cwd'] = path
617 617
618 618 if '_bare' in opts:
619 619 _copts = []
620 620 del opts['_bare']
621 621 else:
622 622 _copts = ['-c', 'core.quotepath=false', ]
623 623 safe_call = False
624 624 if '_safe' in opts:
625 625 # no exc on failure
626 626 del opts['_safe']
627 627 safe_call = True
628 628
629 629 if '_copts' in opts:
630 630 _copts.extend(opts['_copts'] or [])
631 631 del opts['_copts']
632 632
633 633 gitenv = os.environ.copy()
634 634 gitenv.update(opts.pop('extra_env', {}))
635 635 # need to clean fix GIT_DIR !
636 636 if 'GIT_DIR' in gitenv:
637 637 del gitenv['GIT_DIR']
638 638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 639
640 640 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
641 641
642 642 try:
643 643 _opts = {'env': gitenv, 'shell': False}
644 644 _opts.update(opts)
645 645 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
646 646
647 647 return ''.join(p), ''.join(p.error)
648 648 except (EnvironmentError, OSError) as err:
649 649 cmd = ' '.join(cmd) # human friendly CMD
650 650 tb_err = ("Couldn't run git command (%s).\n"
651 651 "Original error was:%s\n" % (cmd, err))
652 652 log.exception(tb_err)
653 653 if safe_call:
654 654 return '', err
655 655 else:
656 656 raise exceptions.VcsException(tb_err)
657 657
658 @reraise_safe_exceptions
659 def install_hooks(self, wire, force=False):
660 from vcsserver.hook_utils import install_git_hooks
661 repo = self._factory.repo(wire)
662 return install_git_hooks(repo.path, repo.bare, force_create=force)
663
658 664
659 665 def str_to_dulwich(value):
660 666 """
661 667 Dulwich 0.10.1a requires `unicode` objects to be passed in.
662 668 """
663 669 return value.decode(settings.WIRE_ENCODING)
@@ -1,771 +1,776 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired):
78 78 raise_from_original(exceptions.AbortException)
79 79 except RepoLookupError:
80 80 raise_from_original(exceptions.LookupException)
81 81 except RequirementError:
82 82 raise_from_original(exceptions.RequirementException)
83 83 except RepoError:
84 84 raise_from_original(exceptions.VcsException)
85 85 except LookupError:
86 86 raise_from_original(exceptions.LookupException)
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException)
91 91 raise
92 92 return wrapper
93 93
94 94
95 95 class MercurialFactory(RepoFactory):
96 96
97 97 def _create_config(self, config, hooks=True):
98 98 if not hooks:
99 99 hooks_to_clean = frozenset((
100 100 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 102 new_config = []
103 103 for section, option, value in config:
104 104 if section == 'hooks' and option in hooks_to_clean:
105 105 continue
106 106 new_config.append((section, option, value))
107 107 config = new_config
108 108
109 109 baseui = make_ui_from_config(config)
110 110 return baseui
111 111
112 112 def _create_repo(self, wire, create):
113 113 baseui = self._create_config(wire["config"])
114 114 return localrepository(baseui, wire["path"], create)
115 115
116 116
117 117 class HgRemote(object):
118 118
119 119 def __init__(self, factory):
120 120 self._factory = factory
121 121
122 122 self._bulk_methods = {
123 123 "affected_files": self.ctx_files,
124 124 "author": self.ctx_user,
125 125 "branch": self.ctx_branch,
126 126 "children": self.ctx_children,
127 127 "date": self.ctx_date,
128 128 "message": self.ctx_description,
129 129 "parents": self.ctx_parents,
130 130 "status": self.ctx_status,
131 131 "obsolete": self.ctx_obsolete,
132 132 "phase": self.ctx_phase,
133 133 "hidden": self.ctx_hidden,
134 134 "_file_paths": self.ctx_list,
135 135 }
136 136
137 137 @reraise_safe_exceptions
138 138 def discover_hg_version(self):
139 139 from mercurial import util
140 140 return util.version()
141 141
142 142 @reraise_safe_exceptions
143 143 def archive_repo(self, archive_path, mtime, file_info, kind):
144 144 if kind == "tgz":
145 145 archiver = archival.tarit(archive_path, mtime, "gz")
146 146 elif kind == "tbz2":
147 147 archiver = archival.tarit(archive_path, mtime, "bz2")
148 148 elif kind == 'zip':
149 149 archiver = archival.zipit(archive_path, mtime)
150 150 else:
151 151 raise exceptions.ArchiveException(
152 152 'Remote does not support: "%s".' % kind)
153 153
154 154 for f_path, f_mode, f_is_link, f_content in file_info:
155 155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 156 archiver.done()
157 157
158 158 @reraise_safe_exceptions
159 159 def bookmarks(self, wire):
160 160 repo = self._factory.repo(wire)
161 161 return dict(repo._bookmarks)
162 162
163 163 @reraise_safe_exceptions
164 164 def branches(self, wire, normal, closed):
165 165 repo = self._factory.repo(wire)
166 166 iter_branches = repo.branchmap().iterbranches()
167 167 bt = {}
168 168 for branch_name, _heads, tip, is_closed in iter_branches:
169 169 if normal and not is_closed:
170 170 bt[branch_name] = tip
171 171 if closed and is_closed:
172 172 bt[branch_name] = tip
173 173
174 174 return bt
175 175
176 176 @reraise_safe_exceptions
177 177 def bulk_request(self, wire, rev, pre_load):
178 178 result = {}
179 179 for attr in pre_load:
180 180 try:
181 181 method = self._bulk_methods[attr]
182 182 result[attr] = method(wire, rev)
183 183 except KeyError:
184 184 raise exceptions.VcsException(
185 185 'Unknown bulk attribute: "%s"' % attr)
186 186 return result
187 187
188 188 @reraise_safe_exceptions
189 189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 191 clone(baseui, source, dest, noupdate=not update_after_clone)
192 192
193 193 @reraise_safe_exceptions
194 194 def commitctx(
195 195 self, wire, message, parents, commit_time, commit_timezone,
196 196 user, files, extra, removed, updated):
197 197
198 198 def _filectxfn(_repo, memctx, path):
199 199 """
200 200 Marks given path as added/changed/removed in a given _repo. This is
201 201 for internal mercurial commit function.
202 202 """
203 203
204 204 # check if this path is removed
205 205 if path in removed:
206 206 # returning None is a way to mark node for removal
207 207 return None
208 208
209 209 # check if this path is added
210 210 for node in updated:
211 211 if node['path'] == path:
212 212 return memfilectx(
213 213 _repo,
214 214 path=node['path'],
215 215 data=node['content'],
216 216 islink=False,
217 217 isexec=bool(node['mode'] & stat.S_IXUSR),
218 218 copied=False,
219 219 memctx=memctx)
220 220
221 221 raise exceptions.AbortException(
222 222 "Given path haven't been marked as added, "
223 223 "changed or removed (%s)" % path)
224 224
225 225 repo = self._factory.repo(wire)
226 226
227 227 commit_ctx = memctx(
228 228 repo=repo,
229 229 parents=parents,
230 230 text=message,
231 231 files=files,
232 232 filectxfn=_filectxfn,
233 233 user=user,
234 234 date=(commit_time, commit_timezone),
235 235 extra=extra)
236 236
237 237 n = repo.commitctx(commit_ctx)
238 238 new_id = hex(n)
239 239
240 240 return new_id
241 241
242 242 @reraise_safe_exceptions
243 243 def ctx_branch(self, wire, revision):
244 244 repo = self._factory.repo(wire)
245 245 ctx = repo[revision]
246 246 return ctx.branch()
247 247
248 248 @reraise_safe_exceptions
249 249 def ctx_children(self, wire, revision):
250 250 repo = self._factory.repo(wire)
251 251 ctx = repo[revision]
252 252 return [child.rev() for child in ctx.children()]
253 253
254 254 @reraise_safe_exceptions
255 255 def ctx_date(self, wire, revision):
256 256 repo = self._factory.repo(wire)
257 257 ctx = repo[revision]
258 258 return ctx.date()
259 259
260 260 @reraise_safe_exceptions
261 261 def ctx_description(self, wire, revision):
262 262 repo = self._factory.repo(wire)
263 263 ctx = repo[revision]
264 264 return ctx.description()
265 265
266 266 @reraise_safe_exceptions
267 267 def ctx_diff(
268 268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 269 repo = self._factory.repo(wire)
270 270 ctx = repo[revision]
271 271 result = ctx.diff(
272 272 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 273 return list(result)
274 274
275 275 @reraise_safe_exceptions
276 276 def ctx_files(self, wire, revision):
277 277 repo = self._factory.repo(wire)
278 278 ctx = repo[revision]
279 279 return ctx.files()
280 280
281 281 @reraise_safe_exceptions
282 282 def ctx_list(self, path, revision):
283 283 repo = self._factory.repo(path)
284 284 ctx = repo[revision]
285 285 return list(ctx)
286 286
287 287 @reraise_safe_exceptions
288 288 def ctx_parents(self, wire, revision):
289 289 repo = self._factory.repo(wire)
290 290 ctx = repo[revision]
291 291 return [parent.rev() for parent in ctx.parents()]
292 292
293 293 @reraise_safe_exceptions
294 294 def ctx_phase(self, wire, revision):
295 295 repo = self._factory.repo(wire)
296 296 ctx = repo[revision]
297 297 # public=0, draft=1, secret=3
298 298 return ctx.phase()
299 299
300 300 @reraise_safe_exceptions
301 301 def ctx_obsolete(self, wire, revision):
302 302 repo = self._factory.repo(wire)
303 303 ctx = repo[revision]
304 304 return ctx.obsolete()
305 305
306 306 @reraise_safe_exceptions
307 307 def ctx_hidden(self, wire, revision):
308 308 repo = self._factory.repo(wire)
309 309 ctx = repo[revision]
310 310 return ctx.hidden()
311 311
312 312 @reraise_safe_exceptions
313 313 def ctx_substate(self, wire, revision):
314 314 repo = self._factory.repo(wire)
315 315 ctx = repo[revision]
316 316 return ctx.substate
317 317
318 318 @reraise_safe_exceptions
319 319 def ctx_status(self, wire, revision):
320 320 repo = self._factory.repo(wire)
321 321 ctx = repo[revision]
322 322 status = repo[ctx.p1().node()].status(other=ctx.node())
323 323 # object of status (odd, custom named tuple in mercurial) is not
324 324 # correctly serializable, we make it a list, as the underling
325 325 # API expects this to be a list
326 326 return list(status)
327 327
328 328 @reraise_safe_exceptions
329 329 def ctx_user(self, wire, revision):
330 330 repo = self._factory.repo(wire)
331 331 ctx = repo[revision]
332 332 return ctx.user()
333 333
334 334 @reraise_safe_exceptions
335 335 def check_url(self, url, config):
336 336 _proto = None
337 337 if '+' in url[:url.find('://')]:
338 338 _proto = url[0:url.find('+')]
339 339 url = url[url.find('+') + 1:]
340 340 handlers = []
341 341 url_obj = url_parser(url)
342 342 test_uri, authinfo = url_obj.authinfo()
343 343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 344 url_obj.query = obfuscate_qs(url_obj.query)
345 345
346 346 cleaned_uri = str(url_obj)
347 347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 348
349 349 if authinfo:
350 350 # create a password manager
351 351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 352 passmgr.add_password(*authinfo)
353 353
354 354 handlers.extend((httpbasicauthhandler(passmgr),
355 355 httpdigestauthhandler(passmgr)))
356 356
357 357 o = urllib2.build_opener(*handlers)
358 358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 359 ('Accept', 'application/mercurial-0.1')]
360 360
361 361 q = {"cmd": 'between'}
362 362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 363 qs = '?%s' % urllib.urlencode(q)
364 364 cu = "%s%s" % (test_uri, qs)
365 365 req = urllib2.Request(cu, None, {})
366 366
367 367 try:
368 368 log.debug("Trying to open URL %s", cleaned_uri)
369 369 resp = o.open(req)
370 370 if resp.code != 200:
371 371 raise exceptions.URLError('Return Code is not 200')
372 372 except Exception as e:
373 373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 374 # means it cannot be cloned
375 375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376 376
377 377 # now check if it's a proper hg repo, but don't do it for svn
378 378 try:
379 379 if _proto == 'svn':
380 380 pass
381 381 else:
382 382 # check for pure hg repos
383 383 log.debug(
384 384 "Verifying if URL is a Mercurial repository: %s",
385 385 cleaned_uri)
386 386 httppeer(make_ui_from_config(config), url).lookup('tip')
387 387 except Exception as e:
388 388 log.warning("URL is not a valid Mercurial repository: %s",
389 389 cleaned_uri)
390 390 raise exceptions.URLError(
391 391 "url [%s] does not look like an hg repo org_exc: %s"
392 392 % (cleaned_uri, e))
393 393
394 394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 395 return True
396 396
397 397 @reraise_safe_exceptions
398 398 def diff(
399 399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 400 context):
401 401 repo = self._factory.repo(wire)
402 402
403 403 if file_filter:
404 404 match_filter = match(file_filter[0], '', [file_filter[1]])
405 405 else:
406 406 match_filter = file_filter
407 407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408 408
409 409 try:
410 410 return "".join(patch.diff(
411 411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 412 except RepoLookupError:
413 413 raise exceptions.LookupException()
414 414
415 415 @reraise_safe_exceptions
416 416 def file_history(self, wire, revision, path, limit):
417 417 repo = self._factory.repo(wire)
418 418
419 419 ctx = repo[revision]
420 420 fctx = ctx.filectx(path)
421 421
422 422 def history_iter():
423 423 limit_rev = fctx.rev()
424 424 for obj in reversed(list(fctx.filelog())):
425 425 obj = fctx.filectx(obj)
426 426 if limit_rev >= obj.rev():
427 427 yield obj
428 428
429 429 history = []
430 430 for cnt, obj in enumerate(history_iter()):
431 431 if limit and cnt >= limit:
432 432 break
433 433 history.append(hex(obj.node()))
434 434
435 435 return [x for x in history]
436 436
437 437 @reraise_safe_exceptions
438 438 def file_history_untill(self, wire, revision, path, limit):
439 439 repo = self._factory.repo(wire)
440 440 ctx = repo[revision]
441 441 fctx = ctx.filectx(path)
442 442
443 443 file_log = list(fctx.filelog())
444 444 if limit:
445 445 # Limit to the last n items
446 446 file_log = file_log[-limit:]
447 447
448 448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449 449
450 450 @reraise_safe_exceptions
451 451 def fctx_annotate(self, wire, revision, path):
452 452 repo = self._factory.repo(wire)
453 453 ctx = repo[revision]
454 454 fctx = ctx.filectx(path)
455 455
456 456 result = []
457 457 for i, (a_line, content) in enumerate(fctx.annotate()):
458 458 ln_no = i + 1
459 459 sha = hex(a_line.fctx.node())
460 460 result.append((ln_no, sha, content))
461 461 return result
462 462
463 463 @reraise_safe_exceptions
464 464 def fctx_data(self, wire, revision, path):
465 465 repo = self._factory.repo(wire)
466 466 ctx = repo[revision]
467 467 fctx = ctx.filectx(path)
468 468 return fctx.data()
469 469
470 470 @reraise_safe_exceptions
471 471 def fctx_flags(self, wire, revision, path):
472 472 repo = self._factory.repo(wire)
473 473 ctx = repo[revision]
474 474 fctx = ctx.filectx(path)
475 475 return fctx.flags()
476 476
477 477 @reraise_safe_exceptions
478 478 def fctx_size(self, wire, revision, path):
479 479 repo = self._factory.repo(wire)
480 480 ctx = repo[revision]
481 481 fctx = ctx.filectx(path)
482 482 return fctx.size()
483 483
484 484 @reraise_safe_exceptions
485 485 def get_all_commit_ids(self, wire, name):
486 486 repo = self._factory.repo(wire)
487 487 revs = repo.filtered(name).changelog.index
488 488 return map(lambda x: hex(x[7]), revs)[:-1]
489 489
490 490 @reraise_safe_exceptions
491 491 def get_config_value(self, wire, section, name, untrusted=False):
492 492 repo = self._factory.repo(wire)
493 493 return repo.ui.config(section, name, untrusted=untrusted)
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_bool(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.configbool(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_list(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configlist(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def is_large_file(self, wire, path):
507 507 return largefiles.lfutil.isstandin(path)
508 508
509 509 @reraise_safe_exceptions
510 510 def in_largefiles_store(self, wire, sha):
511 511 repo = self._factory.repo(wire)
512 512 return largefiles.lfutil.instore(repo, sha)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_user_cache(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.inusercache(repo.ui, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def store_path(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.storepath(repo, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def link(self, wire, sha, path):
526 526 repo = self._factory.repo(wire)
527 527 largefiles.lfutil.link(
528 528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529 529
530 530 @reraise_safe_exceptions
531 531 def localrepository(self, wire, create=False):
532 532 self._factory.repo(wire, create=create)
533 533
534 534 @reraise_safe_exceptions
535 535 def lookup(self, wire, revision, both):
536 536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 537 if isinstance(revision, float):
538 538 revision = long(revision)
539 539 repo = self._factory.repo(wire)
540 540 try:
541 541 ctx = repo[revision]
542 542 except RepoLookupError:
543 543 raise exceptions.LookupException(revision)
544 544 except LookupError as e:
545 545 raise exceptions.LookupException(e.name)
546 546
547 547 if not both:
548 548 return ctx.hex()
549 549
550 550 ctx = repo[ctx.hex()]
551 551 return ctx.hex(), ctx.rev()
552 552
553 553 @reraise_safe_exceptions
554 554 def pull(self, wire, url, commit_ids=None):
555 555 repo = self._factory.repo(wire)
556 556 # Disable any prompts for this repo
557 557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558 558
559 559 remote = peer(repo, {}, url)
560 560 # Disable any prompts for this remote
561 561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562 562
563 563 if commit_ids:
564 564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
565 565
566 566 return exchange.pull(
567 567 repo, remote, heads=commit_ids, force=None).cgresult
568 568
569 569 @reraise_safe_exceptions
570 570 def sync_push(self, wire, url):
571 571 if self.check_url(url, wire['config']):
572 572 repo = self._factory.repo(wire)
573 573
574 574 # Disable any prompts for this repo
575 575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576 576
577 577 bookmarks = dict(repo._bookmarks).keys()
578 578 remote = peer(repo, {}, url)
579 579 # Disable any prompts for this remote
580 580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581 581
582 582 return exchange.push(
583 583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
584 584
585 585 @reraise_safe_exceptions
586 586 def revision(self, wire, rev):
587 587 repo = self._factory.repo(wire)
588 588 ctx = repo[rev]
589 589 return ctx.rev()
590 590
591 591 @reraise_safe_exceptions
592 592 def rev_range(self, wire, filter):
593 593 repo = self._factory.repo(wire)
594 594 revisions = [rev for rev in revrange(repo, filter)]
595 595 return revisions
596 596
597 597 @reraise_safe_exceptions
598 598 def rev_range_hash(self, wire, node):
599 599 repo = self._factory.repo(wire)
600 600
601 601 def get_revs(repo, rev_opt):
602 602 if rev_opt:
603 603 revs = revrange(repo, rev_opt)
604 604 if len(revs) == 0:
605 605 return (nullrev, nullrev)
606 606 return max(revs), min(revs)
607 607 else:
608 608 return len(repo) - 1, 0
609 609
610 610 stop, start = get_revs(repo, [node + ':'])
611 611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
612 612 return revs
613 613
614 614 @reraise_safe_exceptions
615 615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
616 616 other_path = kwargs.pop('other_path', None)
617 617
618 618 # case when we want to compare two independent repositories
619 619 if other_path and other_path != wire["path"]:
620 620 baseui = self._factory._create_config(wire["config"])
621 621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
622 622 else:
623 623 repo = self._factory.repo(wire)
624 624 return list(repo.revs(rev_spec, *args))
625 625
626 626 @reraise_safe_exceptions
627 627 def strip(self, wire, revision, update, backup):
628 628 repo = self._factory.repo(wire)
629 629 ctx = repo[revision]
630 630 hgext_strip(
631 631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
632 632
633 633 @reraise_safe_exceptions
634 634 def verify(self, wire,):
635 635 repo = self._factory.repo(wire)
636 636 baseui = self._factory._create_config(wire['config'])
637 637 baseui.setconfig('ui', 'quiet', 'false')
638 638 output = io.BytesIO()
639 639
640 640 def write(data, **unused_kwargs):
641 641 output.write(data)
642 642 baseui.write = write
643 643
644 644 repo.ui = baseui
645 645 verify.verify(repo)
646 646 return output.getvalue()
647 647
648 648 @reraise_safe_exceptions
649 649 def tag(self, wire, name, revision, message, local, user,
650 650 tag_time, tag_timezone):
651 651 repo = self._factory.repo(wire)
652 652 ctx = repo[revision]
653 653 node = ctx.node()
654 654
655 655 date = (tag_time, tag_timezone)
656 656 try:
657 657 hg_tag.tag(repo, name, node, message, local, user, date)
658 658 except Abort as e:
659 659 log.exception("Tag operation aborted")
660 660 # Exception can contain unicode which we convert
661 661 raise exceptions.AbortException(repr(e))
662 662
663 663 @reraise_safe_exceptions
664 664 def tags(self, wire):
665 665 repo = self._factory.repo(wire)
666 666 return repo.tags()
667 667
668 668 @reraise_safe_exceptions
669 669 def update(self, wire, node=None, clean=False):
670 670 repo = self._factory.repo(wire)
671 671 baseui = self._factory._create_config(wire['config'])
672 672 commands.update(baseui, repo, node=node, clean=clean)
673 673
674 674 @reraise_safe_exceptions
675 675 def identify(self, wire):
676 676 repo = self._factory.repo(wire)
677 677 baseui = self._factory._create_config(wire['config'])
678 678 output = io.BytesIO()
679 679 baseui.write = output.write
680 680 # This is required to get a full node id
681 681 baseui.debugflag = True
682 682 commands.identify(baseui, repo, id=True)
683 683
684 684 return output.getvalue()
685 685
686 686 @reraise_safe_exceptions
687 687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
688 688 hooks=True):
689 689 repo = self._factory.repo(wire)
690 690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691 691
692 692 # Mercurial internally has a lot of logic that checks ONLY if
693 693 # option is defined, we just pass those if they are defined then
694 694 opts = {}
695 695 if bookmark:
696 696 opts['bookmark'] = bookmark
697 697 if branch:
698 698 opts['branch'] = branch
699 699 if revision:
700 700 opts['rev'] = revision
701 701
702 702 commands.pull(baseui, repo, source, **opts)
703 703
704 704 @reraise_safe_exceptions
705 705 def heads(self, wire, branch=None):
706 706 repo = self._factory.repo(wire)
707 707 baseui = self._factory._create_config(wire['config'])
708 708 output = io.BytesIO()
709 709
710 710 def write(data, **unused_kwargs):
711 711 output.write(data)
712 712
713 713 baseui.write = write
714 714 if branch:
715 715 args = [branch]
716 716 else:
717 717 args = []
718 718 commands.heads(baseui, repo, template='{node} ', *args)
719 719
720 720 return output.getvalue()
721 721
722 722 @reraise_safe_exceptions
723 723 def ancestor(self, wire, revision1, revision2):
724 724 repo = self._factory.repo(wire)
725 725 changelog = repo.changelog
726 726 lookup = repo.lookup
727 727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
728 728 return hex(a)
729 729
730 730 @reraise_safe_exceptions
731 731 def push(self, wire, revisions, dest_path, hooks=True,
732 732 push_branches=False):
733 733 repo = self._factory.repo(wire)
734 734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
735 735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
736 736 new_branch=push_branches)
737 737
738 738 @reraise_safe_exceptions
739 739 def merge(self, wire, revision):
740 740 repo = self._factory.repo(wire)
741 741 baseui = self._factory._create_config(wire['config'])
742 742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
743 743
744 744 # In case of sub repositories are used mercurial prompts the user in
745 745 # case of merge conflicts or different sub repository sources. By
746 746 # setting the interactive flag to `False` mercurial doesn't prompt the
747 747 # used but instead uses a default value.
748 748 repo.ui.setconfig('ui', 'interactive', False)
749 749
750 750 commands.merge(baseui, repo, rev=revision)
751 751
752 752 @reraise_safe_exceptions
753 753 def commit(self, wire, message, username, close_branch=False):
754 754 repo = self._factory.repo(wire)
755 755 baseui = self._factory._create_config(wire['config'])
756 756 repo.ui.setconfig('ui', 'username', username)
757 757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
758 758
759 759 @reraise_safe_exceptions
760 760 def rebase(self, wire, source=None, dest=None, abort=False):
761 761 repo = self._factory.repo(wire)
762 762 baseui = self._factory._create_config(wire['config'])
763 763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
764 764 rebase.rebase(
765 765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
766 766
767 767 @reraise_safe_exceptions
768 768 def bookmark(self, wire, bookmark, revision=None):
769 769 repo = self._factory.repo(wire)
770 770 baseui = self._factory._create_config(wire['config'])
771 771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
772
773 @reraise_safe_exceptions
774 def install_hooks(self, wire, force=False):
775 # we don't need any special hooks for Mercurial
776 pass
@@ -1,467 +1,541 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 import json
24 23 import logging
25 24 import collections
26 25 import importlib
26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 connection.request('POST', '/', body)
49 try:
50 connection.request('POST', '/', body)
51 except Exception:
52 log.error('Connection failed on %s', connection)
53 raise
50 54 response = connection.getresponse()
51 55 return json.loads(response.read())
52 56
53 57 def _serialize(self, hook_name, extras):
54 58 data = {
55 59 'method': hook_name,
56 60 'extras': extras
57 61 }
58 62 return json.dumps(data)
59 63
60 64
61 65 class HooksDummyClient(object):
62 66 def __init__(self, hooks_module):
63 67 self._hooks_module = importlib.import_module(hooks_module)
64 68
65 69 def __call__(self, hook_name, extras):
66 70 with self._hooks_module.Hooks() as hooks:
67 71 return getattr(hooks, hook_name)(extras)
68 72
69 73
70 74 class RemoteMessageWriter(object):
71 75 """Writer base class."""
72 76 def write(self, message):
73 77 raise NotImplementedError()
74 78
75 79
76 80 class HgMessageWriter(RemoteMessageWriter):
77 81 """Writer that knows how to send messages to mercurial clients."""
78 82
79 83 def __init__(self, ui):
80 84 self.ui = ui
81 85
82 86 def write(self, message):
83 87 # TODO: Check why the quiet flag is set by default.
84 88 old = self.ui.quiet
85 89 self.ui.quiet = False
86 90 self.ui.status(message.encode('utf-8'))
87 91 self.ui.quiet = old
88 92
89 93
90 94 class GitMessageWriter(RemoteMessageWriter):
91 95 """Writer that knows how to send messages to git clients."""
92 96
93 97 def __init__(self, stdout=None):
94 98 self.stdout = stdout or sys.stdout
95 99
96 100 def write(self, message):
97 101 self.stdout.write(message.encode('utf-8'))
98 102
99 103
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
106
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
110
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
113
114
100 115 def _handle_exception(result):
101 116 exception_class = result.get('exception')
102 117 exception_traceback = result.get('exception_traceback')
103 118
104 119 if exception_traceback:
105 120 log.error('Got traceback from remote call:%s', exception_traceback)
106 121
107 122 if exception_class == 'HTTPLockedRC':
108 123 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 124 elif exception_class == 'RepositoryError':
110 125 raise exceptions.VcsException(*result['exception_args'])
111 126 elif exception_class:
112 127 raise Exception('Got remote exception "%s" with args "%s"' %
113 128 (exception_class, result['exception_args']))
114 129
115 130
116 131 def _get_hooks_client(extras):
117 132 if 'hooks_uri' in extras:
118 133 protocol = extras.get('hooks_protocol')
119 134 return HooksHttpClient(extras['hooks_uri'])
120 135 else:
121 136 return HooksDummyClient(extras['hooks_module'])
122 137
123 138
124 139 def _call_hook(hook_name, extras, writer):
125 hooks = _get_hooks_client(extras)
126 result = hooks(hook_name, extras)
140 hooks_client = _get_hooks_client(extras)
141 log.debug('Hooks, using client:%s', hooks_client)
142 result = hooks_client(hook_name, extras)
127 143 log.debug('Hooks got result: %s', result)
128 144 writer.write(result['output'])
129 145 _handle_exception(result)
130 146
131 147 return result['status']
132 148
133 149
134 150 def _extras_from_ui(ui):
135 151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
136 152 if not hook_data:
137 153 # maybe it's inside environ ?
138 154 env_hook_data = os.environ.get('RC_SCM_DATA')
139 155 if env_hook_data:
140 156 hook_data = env_hook_data
141 157
142 158 extras = {}
143 159 if hook_data:
144 160 extras = json.loads(hook_data)
145 161 return extras
146 162
147 163
148 164 def _rev_range_hash(repo, node):
149 165
150 166 commits = []
151 167 for rev in xrange(repo[node], len(repo)):
152 168 ctx = repo[rev]
153 169 commit_id = mercurial.node.hex(ctx.node())
154 170 branch = ctx.branch()
155 171 commits.append((commit_id, branch))
156 172
157 173 return commits
158 174
159 175
160 176 def repo_size(ui, repo, **kwargs):
161 177 extras = _extras_from_ui(ui)
162 178 return _call_hook('repo_size', extras, HgMessageWriter(ui))
163 179
164 180
165 181 def pre_pull(ui, repo, **kwargs):
166 182 extras = _extras_from_ui(ui)
167 183 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
168 184
169 185
170 186 def pre_pull_ssh(ui, repo, **kwargs):
171 187 extras = _extras_from_ui(ui)
172 188 if extras and extras.get('SSH'):
173 189 return pre_pull(ui, repo, **kwargs)
174 190 return 0
175 191
176 192
177 193 def post_pull(ui, repo, **kwargs):
178 194 extras = _extras_from_ui(ui)
179 195 return _call_hook('post_pull', extras, HgMessageWriter(ui))
180 196
181 197
182 198 def post_pull_ssh(ui, repo, **kwargs):
183 199 extras = _extras_from_ui(ui)
184 200 if extras and extras.get('SSH'):
185 201 return post_pull(ui, repo, **kwargs)
186 202 return 0
187 203
188 204
189 205 def pre_push(ui, repo, node=None, **kwargs):
190 206 extras = _extras_from_ui(ui)
191 207
192 208 rev_data = []
193 209 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
194 210 branches = collections.defaultdict(list)
195 211 for commit_id, branch in _rev_range_hash(repo, node):
196 212 branches[branch].append(commit_id)
197 213
198 214 for branch, commits in branches.iteritems():
199 215 old_rev = kwargs.get('node_last') or commits[0]
200 216 rev_data.append({
201 217 'old_rev': old_rev,
202 218 'new_rev': commits[-1],
203 219 'ref': '',
204 220 'type': 'branch',
205 221 'name': branch,
206 222 })
207 223
208 224 extras['commit_ids'] = rev_data
209 225 return _call_hook('pre_push', extras, HgMessageWriter(ui))
210 226
211 227
212 228 def pre_push_ssh(ui, repo, node=None, **kwargs):
213 229 if _extras_from_ui(ui).get('SSH'):
214 230 return pre_push(ui, repo, node, **kwargs)
215 231
216 232 return 0
217 233
218 234
219 235 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
220 236 extras = _extras_from_ui(ui)
221 237 if extras.get('SSH'):
222 238 permission = extras['SSH_PERMISSIONS']
223 239
224 240 if 'repository.write' == permission or 'repository.admin' == permission:
225 241 return 0
226 242
227 243 # non-zero ret code
228 244 return 1
229 245
230 246 return 0
231 247
232 248
233 249 def post_push(ui, repo, node, **kwargs):
234 250 extras = _extras_from_ui(ui)
235 251
236 252 commit_ids = []
237 253 branches = []
238 254 bookmarks = []
239 255 tags = []
240 256
241 257 for commit_id, branch in _rev_range_hash(repo, node):
242 258 commit_ids.append(commit_id)
243 259 if branch not in branches:
244 260 branches.append(branch)
245 261
246 262 if hasattr(ui, '_rc_pushkey_branches'):
247 263 bookmarks = ui._rc_pushkey_branches
248 264
249 265 extras['commit_ids'] = commit_ids
250 266 extras['new_refs'] = {
251 267 'branches': branches,
252 268 'bookmarks': bookmarks,
253 269 'tags': tags
254 270 }
255 271
256 272 return _call_hook('post_push', extras, HgMessageWriter(ui))
257 273
258 274
259 275 def post_push_ssh(ui, repo, node, **kwargs):
260 276 if _extras_from_ui(ui).get('SSH'):
261 277 return post_push(ui, repo, node, **kwargs)
262 278 return 0
263 279
264 280
265 281 def key_push(ui, repo, **kwargs):
266 282 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
267 283 # store new bookmarks in our UI object propagated later to post_push
268 284 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
269 285 return
270 286
271 287
272 288 # backward compat
273 289 log_pull_action = post_pull
274 290
275 291 # backward compat
276 292 log_push_action = post_push
277 293
278 294
279 295 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
280 296 """
281 297 Old hook name: keep here for backward compatibility.
282 298
283 299 This is only required when the installed git hooks are not upgraded.
284 300 """
285 301 pass
286 302
287 303
288 304 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
289 305 """
290 306 Old hook name: keep here for backward compatibility.
291 307
292 308 This is only required when the installed git hooks are not upgraded.
293 309 """
294 310 pass
295 311
296 312
297 313 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
298 314
299 315
300 316 def git_pre_pull(extras):
301 317 """
302 318 Pre pull hook.
303 319
304 320 :param extras: dictionary containing the keys defined in simplevcs
305 321 :type extras: dict
306 322
307 323 :return: status code of the hook. 0 for success.
308 324 :rtype: int
309 325 """
310 326 if 'pull' not in extras['hooks']:
311 327 return HookResponse(0, '')
312 328
313 329 stdout = io.BytesIO()
314 330 try:
315 331 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
316 332 except Exception as error:
317 333 status = 128
318 334 stdout.write('ERROR: %s\n' % str(error))
319 335
320 336 return HookResponse(status, stdout.getvalue())
321 337
322 338
323 339 def git_post_pull(extras):
324 340 """
325 341 Post pull hook.
326 342
327 343 :param extras: dictionary containing the keys defined in simplevcs
328 344 :type extras: dict
329 345
330 346 :return: status code of the hook. 0 for success.
331 347 :rtype: int
332 348 """
333 349 if 'pull' not in extras['hooks']:
334 350 return HookResponse(0, '')
335 351
336 352 stdout = io.BytesIO()
337 353 try:
338 354 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
339 355 except Exception as error:
340 356 status = 128
341 357 stdout.write('ERROR: %s\n' % error)
342 358
343 359 return HookResponse(status, stdout.getvalue())
344 360
345 361
346 362 def _parse_git_ref_lines(revision_lines):
347 363 rev_data = []
348 364 for revision_line in revision_lines or []:
349 365 old_rev, new_rev, ref = revision_line.strip().split(' ')
350 366 ref_data = ref.split('/', 2)
351 367 if ref_data[1] in ('tags', 'heads'):
352 368 rev_data.append({
353 369 'old_rev': old_rev,
354 370 'new_rev': new_rev,
355 371 'ref': ref,
356 372 'type': ref_data[1],
357 373 'name': ref_data[2],
358 374 })
359 375 return rev_data
360 376
361 377
362 378 def git_pre_receive(unused_repo_path, revision_lines, env):
363 379 """
364 380 Pre push hook.
365 381
366 382 :param extras: dictionary containing the keys defined in simplevcs
367 383 :type extras: dict
368 384
369 385 :return: status code of the hook. 0 for success.
370 386 :rtype: int
371 387 """
372 388 extras = json.loads(env['RC_SCM_DATA'])
373 389 rev_data = _parse_git_ref_lines(revision_lines)
374 390 if 'push' not in extras['hooks']:
375 391 return 0
376 392 extras['commit_ids'] = rev_data
377 393 return _call_hook('pre_push', extras, GitMessageWriter())
378 394
379 395
380 396 def git_post_receive(unused_repo_path, revision_lines, env):
381 397 """
382 398 Post push hook.
383 399
384 400 :param extras: dictionary containing the keys defined in simplevcs
385 401 :type extras: dict
386 402
387 403 :return: status code of the hook. 0 for success.
388 404 :rtype: int
389 405 """
390 406 extras = json.loads(env['RC_SCM_DATA'])
391 407 if 'push' not in extras['hooks']:
392 408 return 0
393 409
394 410 rev_data = _parse_git_ref_lines(revision_lines)
395 411
396 412 git_revs = []
397 413
398 414 # N.B.(skreft): it is ok to just call git, as git before calling a
399 415 # subcommand sets the PATH environment variable so that it point to the
400 416 # correct version of the git executable.
401 417 empty_commit_id = '0' * 40
402 418 branches = []
403 419 tags = []
404 420 for push_ref in rev_data:
405 421 type_ = push_ref['type']
406 422
407 423 if type_ == 'heads':
408 424 if push_ref['old_rev'] == empty_commit_id:
409 425 # starting new branch case
410 426 if push_ref['name'] not in branches:
411 427 branches.append(push_ref['name'])
412 428
413 429 # Fix up head revision if needed
414 430 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
415 431 try:
416 432 subprocessio.run_command(cmd, env=os.environ.copy())
417 433 except Exception:
418 434 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
419 435 'refs/heads/%s' % push_ref['name']]
420 436 print("Setting default branch to %s" % push_ref['name'])
421 437 subprocessio.run_command(cmd, env=os.environ.copy())
422 438
423 439 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
424 440 '--format=%(refname)', 'refs/heads/*']
425 441 stdout, stderr = subprocessio.run_command(
426 442 cmd, env=os.environ.copy())
427 443 heads = stdout
428 444 heads = heads.replace(push_ref['ref'], '')
429 445 heads = ' '.join(head for head in heads.splitlines() if head)
430 446 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
431 447 '--pretty=format:%H', '--', push_ref['new_rev'],
432 448 '--not', heads]
433 449 stdout, stderr = subprocessio.run_command(
434 450 cmd, env=os.environ.copy())
435 451 git_revs.extend(stdout.splitlines())
436 452 elif push_ref['new_rev'] == empty_commit_id:
437 453 # delete branch case
438 454 git_revs.append('delete_branch=>%s' % push_ref['name'])
439 455 else:
440 456 if push_ref['name'] not in branches:
441 457 branches.append(push_ref['name'])
442 458
443 459 cmd = [settings.GIT_EXECUTABLE, 'log',
444 460 '{old_rev}..{new_rev}'.format(**push_ref),
445 461 '--reverse', '--pretty=format:%H']
446 462 stdout, stderr = subprocessio.run_command(
447 463 cmd, env=os.environ.copy())
448 464 git_revs.extend(stdout.splitlines())
449 465 elif type_ == 'tags':
450 466 if push_ref['name'] not in tags:
451 467 tags.append(push_ref['name'])
452 468 git_revs.append('tag=>%s' % push_ref['name'])
453 469
454 470 extras['commit_ids'] = git_revs
455 471 extras['new_refs'] = {
456 472 'branches': branches,
457 473 'bookmarks': [],
458 474 'tags': tags,
459 475 }
460 476
461 477 if 'repo_size' in extras['hooks']:
462 478 try:
463 479 _call_hook('repo_size', extras, GitMessageWriter())
464 480 except:
465 481 pass
466 482
467 483 return _call_hook('post_push', extras, GitMessageWriter())
484
485
486 def svn_pre_commit(repo_path, commit_data, env):
487 path, txn_id = commit_data
488 branches = []
489 tags = []
490
491 cmd = ['svnlook', 'pget',
492 '-t', txn_id,
493 '--revprop', path, 'rc-scm-extras']
494 stdout, stderr = subprocessio.run_command(
495 cmd, env=os.environ.copy())
496 extras = json.loads(base64.urlsafe_b64decode(stdout))
497
498 extras['commit_ids'] = []
499 extras['txn_id'] = txn_id
500 extras['new_refs'] = {
501 'branches': branches,
502 'bookmarks': [],
503 'tags': tags,
504 }
505 sys.stderr.write(str(extras))
506 return _call_hook('pre_push', extras, SvnMessageWriter())
507
508
509 def svn_post_commit(repo_path, commit_data, env):
510 """
511 commit_data is path, rev, txn_id
512 """
513 path, commit_id, txn_id = commit_data
514 branches = []
515 tags = []
516
517 cmd = ['svnlook', 'pget',
518 '-r', commit_id,
519 '--revprop', path, 'rc-scm-extras']
520 stdout, stderr = subprocessio.run_command(
521 cmd, env=os.environ.copy())
522
523 extras = json.loads(base64.urlsafe_b64decode(stdout))
524
525 extras['commit_ids'] = [commit_id]
526 extras['txn_id'] = txn_id
527 extras['new_refs'] = {
528 'branches': branches,
529 'bookmarks': [],
530 'tags': tags,
531 }
532
533 if 'repo_size' in extras['hooks']:
534 try:
535 _call_hook('repo_size', extras, SvnMessageWriter())
536 except:
537 pass
538
539 return _call_hook('post_push', extras, SvnMessageWriter())
540
541
@@ -1,480 +1,487 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import base64
20 20 import locale
21 21 import logging
22 22 import uuid
23 23 import wsgiref.util
24 24 import traceback
25 25 from itertools import chain
26 26
27 27 import simplejson as json
28 28 import msgpack
29 29 from beaker.cache import CacheManager
30 30 from beaker.util import parse_cache_config_options
31 31 from pyramid.config import Configurator
32 32 from pyramid.wsgi import wsgiapp
33 33 from pyramid.compat import configparser
34 34
35 35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 38 from vcsserver.echo_stub.echo_app import EchoApp
39 39 from vcsserver.exceptions import HTTPRepoLocked
40 40 from vcsserver.server import VcsServer
41 41
42 42 try:
43 43 from vcsserver.git import GitFactory, GitRemote
44 44 except ImportError:
45 45 GitFactory = None
46 46 GitRemote = None
47 47
48 48 try:
49 49 from vcsserver.hg import MercurialFactory, HgRemote
50 50 except ImportError:
51 51 MercurialFactory = None
52 52 HgRemote = None
53 53
54 54 try:
55 55 from vcsserver.svn import SubversionFactory, SvnRemote
56 56 except ImportError:
57 57 SubversionFactory = None
58 58 SvnRemote = None
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 def _is_request_chunked(environ):
64 64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 65 return stream
66 66
67 67
68 68 class VCS(object):
69 69 def __init__(self, locale=None, cache_config=None):
70 70 self.locale = locale
71 71 self.cache_config = cache_config
72 72 self._configure_locale()
73 73 self._initialize_cache()
74 74
75 75 if GitFactory and GitRemote:
76 76 git_repo_cache = self.cache.get_cache_region(
77 77 'git', region='repo_object')
78 78 git_factory = GitFactory(git_repo_cache)
79 79 self._git_remote = GitRemote(git_factory)
80 80 else:
81 81 log.info("Git client import failed")
82 82
83 83 if MercurialFactory and HgRemote:
84 84 hg_repo_cache = self.cache.get_cache_region(
85 85 'hg', region='repo_object')
86 86 hg_factory = MercurialFactory(hg_repo_cache)
87 87 self._hg_remote = HgRemote(hg_factory)
88 88 else:
89 89 log.info("Mercurial client import failed")
90 90
91 91 if SubversionFactory and SvnRemote:
92 92 svn_repo_cache = self.cache.get_cache_region(
93 93 'svn', region='repo_object')
94 94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
95 99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
96 100 else:
97 101 log.info("Subversion client import failed")
98 102
99 103 self._vcsserver = VcsServer()
100 104
101 105 def _initialize_cache(self):
102 106 cache_config = parse_cache_config_options(self.cache_config)
103 107 log.info('Initializing beaker cache: %s' % cache_config)
104 108 self.cache = CacheManager(**cache_config)
105 109
106 110 def _configure_locale(self):
107 111 if self.locale:
108 112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
109 113 else:
110 114 log.info(
111 115 'Configuring locale subsystem based on environment variables')
112 116 try:
113 117 # If self.locale is the empty string, then the locale
114 118 # module will use the environment variables. See the
115 119 # documentation of the package `locale`.
116 120 locale.setlocale(locale.LC_ALL, self.locale)
117 121
118 122 language_code, encoding = locale.getlocale()
119 123 log.info(
120 124 'Locale set to language code "%s" with encoding "%s".',
121 125 language_code, encoding)
122 126 except locale.Error:
123 127 log.exception(
124 128 'Cannot set locale, not configuring the locale system')
125 129
126 130
127 131 class WsgiProxy(object):
128 132 def __init__(self, wsgi):
129 133 self.wsgi = wsgi
130 134
131 135 def __call__(self, environ, start_response):
132 136 input_data = environ['wsgi.input'].read()
133 137 input_data = msgpack.unpackb(input_data)
134 138
135 139 error = None
136 140 try:
137 141 data, status, headers = self.wsgi.handle(
138 142 input_data['environment'], input_data['input_data'],
139 143 *input_data['args'], **input_data['kwargs'])
140 144 except Exception as e:
141 145 data, status, headers = [], None, None
142 146 error = {
143 147 'message': str(e),
144 148 '_vcs_kind': getattr(e, '_vcs_kind', None)
145 149 }
146 150
147 151 start_response(200, {})
148 152 return self._iterator(error, status, headers, data)
149 153
150 154 def _iterator(self, error, status, headers, data):
151 155 initial_data = [
152 156 error,
153 157 status,
154 158 headers,
155 159 ]
156 160
157 161 for d in chain(initial_data, data):
158 162 yield msgpack.packb(d)
159 163
160 164
161 165 class HTTPApplication(object):
162 166 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
163 167
164 168 remote_wsgi = remote_wsgi
165 169 _use_echo_app = False
166 170
167 171 def __init__(self, settings=None, global_config=None):
168 172 self.config = Configurator(settings=settings)
169 173 self.global_config = global_config
170 174
171 175 locale = settings.get('locale', '') or 'en_US.UTF-8'
172 176 vcs = VCS(locale=locale, cache_config=settings)
173 177 self._remotes = {
174 178 'hg': vcs._hg_remote,
175 179 'git': vcs._git_remote,
176 180 'svn': vcs._svn_remote,
177 181 'server': vcs._vcsserver,
178 182 }
179 183 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
180 184 self._use_echo_app = True
181 185 log.warning("Using EchoApp for VCS operations.")
182 186 self.remote_wsgi = remote_wsgi_stub
183 187 self._configure_settings(settings)
184 188 self._configure()
185 189
186 190 def _configure_settings(self, app_settings):
187 191 """
188 192 Configure the settings module.
189 193 """
190 194 git_path = app_settings.get('git_path', None)
191 195 if git_path:
192 196 settings.GIT_EXECUTABLE = git_path
197 binary_dir = app_settings.get('core.binary_dir', None)
198 if binary_dir:
199 settings.BINARY_DIR = binary_dir
193 200
194 201 def _configure(self):
195 202 self.config.add_renderer(
196 203 name='msgpack',
197 204 factory=self._msgpack_renderer_factory)
198 205
199 206 self.config.add_route('service', '/_service')
200 207 self.config.add_route('status', '/status')
201 208 self.config.add_route('hg_proxy', '/proxy/hg')
202 209 self.config.add_route('git_proxy', '/proxy/git')
203 210 self.config.add_route('vcs', '/{backend}')
204 211 self.config.add_route('stream_git', '/stream/git/*repo_name')
205 212 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
206 213
207 214 self.config.add_view(
208 215 self.status_view, route_name='status', renderer='json')
209 216 self.config.add_view(
210 217 self.service_view, route_name='service', renderer='msgpack')
211 218
212 219 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
213 220 self.config.add_view(self.git_proxy(), route_name='git_proxy')
214 221 self.config.add_view(
215 222 self.vcs_view, route_name='vcs', renderer='msgpack',
216 223 custom_predicates=[self.is_vcs_view])
217 224
218 225 self.config.add_view(self.hg_stream(), route_name='stream_hg')
219 226 self.config.add_view(self.git_stream(), route_name='stream_git')
220 227
221 228 def notfound(request):
222 229 return {'status': '404 NOT FOUND'}
223 230 self.config.add_notfound_view(notfound, renderer='json')
224 231
225 232 self.config.add_view(self.handle_vcs_exception, context=Exception)
226 233
227 234 self.config.add_tween(
228 235 'vcsserver.tweens.RequestWrapperTween',
229 236 )
230 237
231 238 def wsgi_app(self):
232 239 return self.config.make_wsgi_app()
233 240
234 241 def vcs_view(self, request):
235 242 remote = self._remotes[request.matchdict['backend']]
236 243 payload = msgpack.unpackb(request.body, use_list=True)
237 244 method = payload.get('method')
238 245 params = payload.get('params')
239 246 wire = params.get('wire')
240 247 args = params.get('args')
241 248 kwargs = params.get('kwargs')
242 249 if wire:
243 250 try:
244 251 wire['context'] = uuid.UUID(wire['context'])
245 252 except KeyError:
246 253 pass
247 254 args.insert(0, wire)
248 255
249 256 log.debug('method called:%s with kwargs:%s', method, kwargs)
250 257 try:
251 258 resp = getattr(remote, method)(*args, **kwargs)
252 259 except Exception as e:
253 260 tb_info = traceback.format_exc()
254 261
255 262 type_ = e.__class__.__name__
256 263 if type_ not in self.ALLOWED_EXCEPTIONS:
257 264 type_ = None
258 265
259 266 resp = {
260 267 'id': payload.get('id'),
261 268 'error': {
262 269 'message': e.message,
263 270 'traceback': tb_info,
264 271 'type': type_
265 272 }
266 273 }
267 274 try:
268 275 resp['error']['_vcs_kind'] = e._vcs_kind
269 276 except AttributeError:
270 277 pass
271 278 else:
272 279 resp = {
273 280 'id': payload.get('id'),
274 281 'result': resp
275 282 }
276 283
277 284 return resp
278 285
279 286 def status_view(self, request):
280 287 import vcsserver
281 288 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
282 289 'pid': os.getpid()}
283 290
284 291 def service_view(self, request):
285 292 import vcsserver
286 293
287 294 payload = msgpack.unpackb(request.body, use_list=True)
288 295
289 296 try:
290 297 path = self.global_config['__file__']
291 298 config = configparser.ConfigParser()
292 299 config.read(path)
293 300 parsed_ini = config
294 301 if parsed_ini.has_section('server:main'):
295 302 parsed_ini = dict(parsed_ini.items('server:main'))
296 303 except Exception:
297 304 log.exception('Failed to read .ini file for display')
298 305 parsed_ini = {}
299 306
300 307 resp = {
301 308 'id': payload.get('id'),
302 309 'result': dict(
303 310 version=vcsserver.__version__,
304 311 config=parsed_ini,
305 312 payload=payload,
306 313 )
307 314 }
308 315 return resp
309 316
310 317 def _msgpack_renderer_factory(self, info):
311 318 def _render(value, system):
312 319 value = msgpack.packb(value)
313 320 request = system.get('request')
314 321 if request is not None:
315 322 response = request.response
316 323 ct = response.content_type
317 324 if ct == response.default_content_type:
318 325 response.content_type = 'application/x-msgpack'
319 326 return value
320 327 return _render
321 328
322 329 def set_env_from_config(self, environ, config):
323 330 dict_conf = {}
324 331 try:
325 332 for elem in config:
326 333 if elem[0] == 'rhodecode':
327 334 dict_conf = json.loads(elem[2])
328 335 break
329 336 except Exception:
330 337 log.exception('Failed to fetch SCM CONFIG')
331 338 return
332 339
333 340 username = dict_conf.get('username')
334 341 if username:
335 342 environ['REMOTE_USER'] = username
336 343 # mercurial specific, some extension api rely on this
337 344 environ['HGUSER'] = username
338 345
339 346 ip = dict_conf.get('ip')
340 347 if ip:
341 348 environ['REMOTE_HOST'] = ip
342 349
343 350 if _is_request_chunked(environ):
344 351 # set the compatibility flag for webob
345 352 environ['wsgi.input_terminated'] = True
346 353
347 354 def hg_proxy(self):
348 355 @wsgiapp
349 356 def _hg_proxy(environ, start_response):
350 357 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
351 358 return app(environ, start_response)
352 359 return _hg_proxy
353 360
354 361 def git_proxy(self):
355 362 @wsgiapp
356 363 def _git_proxy(environ, start_response):
357 364 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
358 365 return app(environ, start_response)
359 366 return _git_proxy
360 367
361 368 def hg_stream(self):
362 369 if self._use_echo_app:
363 370 @wsgiapp
364 371 def _hg_stream(environ, start_response):
365 372 app = EchoApp('fake_path', 'fake_name', None)
366 373 return app(environ, start_response)
367 374 return _hg_stream
368 375 else:
369 376 @wsgiapp
370 377 def _hg_stream(environ, start_response):
371 378 log.debug('http-app: handling hg stream')
372 379 repo_path = environ['HTTP_X_RC_REPO_PATH']
373 380 repo_name = environ['HTTP_X_RC_REPO_NAME']
374 381 packed_config = base64.b64decode(
375 382 environ['HTTP_X_RC_REPO_CONFIG'])
376 383 config = msgpack.unpackb(packed_config)
377 384 app = scm_app.create_hg_wsgi_app(
378 385 repo_path, repo_name, config)
379 386
380 387 # Consistent path information for hgweb
381 388 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
382 389 environ['REPO_NAME'] = repo_name
383 390 self.set_env_from_config(environ, config)
384 391
385 392 log.debug('http-app: starting app handler '
386 393 'with %s and process request', app)
387 394 return app(environ, ResponseFilter(start_response))
388 395 return _hg_stream
389 396
390 397 def git_stream(self):
391 398 if self._use_echo_app:
392 399 @wsgiapp
393 400 def _git_stream(environ, start_response):
394 401 app = EchoApp('fake_path', 'fake_name', None)
395 402 return app(environ, start_response)
396 403 return _git_stream
397 404 else:
398 405 @wsgiapp
399 406 def _git_stream(environ, start_response):
400 407 log.debug('http-app: handling git stream')
401 408 repo_path = environ['HTTP_X_RC_REPO_PATH']
402 409 repo_name = environ['HTTP_X_RC_REPO_NAME']
403 410 packed_config = base64.b64decode(
404 411 environ['HTTP_X_RC_REPO_CONFIG'])
405 412 config = msgpack.unpackb(packed_config)
406 413
407 414 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
408 415 self.set_env_from_config(environ, config)
409 416
410 417 content_type = environ.get('CONTENT_TYPE', '')
411 418
412 419 path = environ['PATH_INFO']
413 420 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
414 421 log.debug(
415 422 'LFS: Detecting if request `%s` is LFS server path based '
416 423 'on content type:`%s`, is_lfs:%s',
417 424 path, content_type, is_lfs_request)
418 425
419 426 if not is_lfs_request:
420 427 # fallback detection by path
421 428 if GIT_LFS_PROTO_PAT.match(path):
422 429 is_lfs_request = True
423 430 log.debug(
424 431 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
425 432 path, is_lfs_request)
426 433
427 434 if is_lfs_request:
428 435 app = scm_app.create_git_lfs_wsgi_app(
429 436 repo_path, repo_name, config)
430 437 else:
431 438 app = scm_app.create_git_wsgi_app(
432 439 repo_path, repo_name, config)
433 440
434 441 log.debug('http-app: starting app handler '
435 442 'with %s and process request', app)
436 443
437 444 return app(environ, start_response)
438 445
439 446 return _git_stream
440 447
441 448 def is_vcs_view(self, context, request):
442 449 """
443 450 View predicate that returns true if given backend is supported by
444 451 defined remotes.
445 452 """
446 453 backend = request.matchdict.get('backend')
447 454 return backend in self._remotes
448 455
449 456 def handle_vcs_exception(self, exception, request):
450 457 _vcs_kind = getattr(exception, '_vcs_kind', '')
451 458 if _vcs_kind == 'repo_locked':
452 459 # Get custom repo-locked status code if present.
453 460 status_code = request.headers.get('X-RC-Locked-Status-Code')
454 461 return HTTPRepoLocked(
455 462 title=exception.message, status_code=status_code)
456 463
457 464 # Re-raise exception if we can not handle it.
458 465 log.exception(
459 466 'error occurred handling this request for path: %s', request.path)
460 467 raise exception
461 468
462 469
463 470 class ResponseFilter(object):
464 471
465 472 def __init__(self, start_response):
466 473 self._start_response = start_response
467 474
468 475 def __call__(self, status, response_headers, exc_info=None):
469 476 headers = tuple(
470 477 (h, v) for h, v in response_headers
471 478 if not wsgiref.util.is_hop_by_hop(h))
472 479 return self._start_response(status, headers, exc_info)
473 480
474 481
475 482 def main(global_config, **settings):
476 483 if MercurialFactory:
477 484 hgpatches.patch_largefiles_capabilities()
478 485 hgpatches.patch_subrepo_type_mapping()
479 486 app = HTTPApplication(settings=settings, global_config=global_config)
480 487 return app.wsgi_app()
@@ -1,19 +1,20 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
20 BINARY_DIR = ''
@@ -1,677 +1,689 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import urllib
26 26 import traceback
27 27
28 28 import svn.client
29 29 import svn.core
30 30 import svn.delta
31 31 import svn.diff
32 32 import svn.fs
33 33 import svn.repos
34 34
35 from vcsserver import svn_diff, exceptions, subprocessio
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 # Set of svn compatible version flags.
42 42 # Compare with subversion/svnadmin/svnadmin.c
43 43 svn_compatible_versions = set([
44 44 'pre-1.4-compatible',
45 45 'pre-1.5-compatible',
46 46 'pre-1.6-compatible',
47 47 'pre-1.8-compatible',
48 48 'pre-1.9-compatible',
49 49 ])
50 50
51 51 svn_compatible_versions_map = {
52 52 'pre-1.4-compatible': '1.3',
53 53 'pre-1.5-compatible': '1.4',
54 54 'pre-1.6-compatible': '1.5',
55 55 'pre-1.8-compatible': '1.7',
56 56 'pre-1.9-compatible': '1.8',
57 57 }
58 58
59 59
60 60 def reraise_safe_exceptions(func):
61 61 """Decorator for converting svn exceptions to something neutral."""
62 62 def wrapper(*args, **kwargs):
63 63 try:
64 64 return func(*args, **kwargs)
65 65 except Exception as e:
66 66 if not hasattr(e, '_vcs_kind'):
67 67 log.exception("Unhandled exception in hg remote call")
68 68 raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class SubversionFactory(RepoFactory):
74 74
75 75 def _create_repo(self, wire, create, compatible_version):
76 76 path = svn.core.svn_path_canonicalize(wire['path'])
77 77 if create:
78 78 fs_config = {'compatible-version': '1.9'}
79 79 if compatible_version:
80 80 if compatible_version not in svn_compatible_versions:
81 81 raise Exception('Unknown SVN compatible version "{}"'
82 82 .format(compatible_version))
83 83 fs_config['compatible-version'] = \
84 84 svn_compatible_versions_map[compatible_version]
85 85
86 86 log.debug('Create SVN repo with config "%s"', fs_config)
87 87 repo = svn.repos.create(path, "", "", None, fs_config)
88 88 else:
89 89 repo = svn.repos.open(path)
90 90
91 91 log.debug('Got SVN object: %s', repo)
92 92 return repo
93 93
94 94 def repo(self, wire, create=False, compatible_version=None):
95 95 def create_new_repo():
96 96 return self._create_repo(wire, create, compatible_version)
97 97
98 98 return self._repo(wire, create_new_repo)
99 99
100 100
101 101 NODE_TYPE_MAPPING = {
102 102 svn.core.svn_node_file: 'file',
103 103 svn.core.svn_node_dir: 'dir',
104 104 }
105 105
106 106
107 107 class SvnRemote(object):
108 108
109 109 def __init__(self, factory, hg_factory=None):
110 110 self._factory = factory
111 111 # TODO: Remove once we do not use internal Mercurial objects anymore
112 112 # for subversion
113 113 self._hg_factory = hg_factory
114 114
115 115 @reraise_safe_exceptions
116 116 def discover_svn_version(self):
117 117 try:
118 118 import svn.core
119 119 svn_ver = svn.core.SVN_VERSION
120 120 except ImportError:
121 121 svn_ver = None
122 122 return svn_ver
123 123
124 124 def check_url(self, url, config_items):
125 125 # this can throw exception if not installed, but we detect this
126 126 from hgsubversion import svnrepo
127 127
128 128 baseui = self._hg_factory._create_config(config_items)
129 129 # uuid function get's only valid UUID from proper repo, else
130 130 # throws exception
131 131 try:
132 132 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 133 except Exception:
134 134 tb = traceback.format_exc()
135 135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 136 raise URLError(
137 137 '"%s" is not a valid Subversion source url.' % (url, ))
138 138 return True
139 139
140 140 def is_path_valid_repository(self, wire, path):
141 141
142 142 # NOTE(marcink): short circuit the check for SVN repo
143 143 # the repos.open might be expensive to check, but we have one cheap
144 144 # pre condition that we can use, to check for 'format' file
145 145
146 146 if not os.path.isfile(os.path.join(path, 'format')):
147 147 return False
148 148
149 149 try:
150 150 svn.repos.open(path)
151 151 except svn.core.SubversionException:
152 152 tb = traceback.format_exc()
153 153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 154 return False
155 155 return True
156 156
157 157 @reraise_safe_exceptions
158 158 def verify(self, wire,):
159 159 repo_path = wire['path']
160 160 if not self.is_path_valid_repository(wire, repo_path):
161 161 raise Exception(
162 162 "Path %s is not a valid Subversion repository." % repo_path)
163 163
164 164 cmd = ['svnadmin', 'info', repo_path]
165 165 stdout, stderr = subprocessio.run_command(cmd)
166 166 return stdout
167 167
168 168 def lookup(self, wire, revision):
169 169 if revision not in [-1, None, 'HEAD']:
170 170 raise NotImplementedError
171 171 repo = self._factory.repo(wire)
172 172 fs_ptr = svn.repos.fs(repo)
173 173 head = svn.fs.youngest_rev(fs_ptr)
174 174 return head
175 175
176 176 def lookup_interval(self, wire, start_ts, end_ts):
177 177 repo = self._factory.repo(wire)
178 178 fsobj = svn.repos.fs(repo)
179 179 start_rev = None
180 180 end_rev = None
181 181 if start_ts:
182 182 start_ts_svn = apr_time_t(start_ts)
183 183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 184 else:
185 185 start_rev = 1
186 186 if end_ts:
187 187 end_ts_svn = apr_time_t(end_ts)
188 188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 189 else:
190 190 end_rev = svn.fs.youngest_rev(fsobj)
191 191 return start_rev, end_rev
192 192
193 193 def revision_properties(self, wire, revision):
194 194 repo = self._factory.repo(wire)
195 195 fs_ptr = svn.repos.fs(repo)
196 196 return svn.fs.revision_proplist(fs_ptr, revision)
197 197
198 198 def revision_changes(self, wire, revision):
199 199
200 200 repo = self._factory.repo(wire)
201 201 fsobj = svn.repos.fs(repo)
202 202 rev_root = svn.fs.revision_root(fsobj, revision)
203 203
204 204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 206 base_dir = ""
207 207 send_deltas = False
208 208 svn.repos.replay2(
209 209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 210 editor_ptr, editor_baton, None)
211 211
212 212 added = []
213 213 changed = []
214 214 removed = []
215 215
216 216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 217 for path, change in editor.changes.iteritems():
218 218 # TODO: Decide what to do with directory nodes. Subversion can add
219 219 # empty directories.
220 220
221 221 if change.item_kind == svn.core.svn_node_dir:
222 222 continue
223 223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 224 added.append(path)
225 225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 226 svn.repos.CHANGE_ACTION_REPLACE]:
227 227 changed.append(path)
228 228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 229 removed.append(path)
230 230 else:
231 231 raise NotImplementedError(
232 232 "Action %s not supported on path %s" % (
233 233 change.action, path))
234 234
235 235 changes = {
236 236 'added': added,
237 237 'changed': changed,
238 238 'removed': removed,
239 239 }
240 240 return changes
241 241
242 242 def node_history(self, wire, path, revision, limit):
243 243 cross_copies = False
244 244 repo = self._factory.repo(wire)
245 245 fsobj = svn.repos.fs(repo)
246 246 rev_root = svn.fs.revision_root(fsobj, revision)
247 247
248 248 history_revisions = []
249 249 history = svn.fs.node_history(rev_root, path)
250 250 history = svn.fs.history_prev(history, cross_copies)
251 251 while history:
252 252 __, node_revision = svn.fs.history_location(history)
253 253 history_revisions.append(node_revision)
254 254 if limit and len(history_revisions) >= limit:
255 255 break
256 256 history = svn.fs.history_prev(history, cross_copies)
257 257 return history_revisions
258 258
259 259 def node_properties(self, wire, path, revision):
260 260 repo = self._factory.repo(wire)
261 261 fsobj = svn.repos.fs(repo)
262 262 rev_root = svn.fs.revision_root(fsobj, revision)
263 263 return svn.fs.node_proplist(rev_root, path)
264 264
265 265 def file_annotate(self, wire, path, revision):
266 266 abs_path = 'file://' + urllib.pathname2url(
267 267 vcspath.join(wire['path'], path))
268 268 file_uri = svn.core.svn_path_canonicalize(abs_path)
269 269
270 270 start_rev = svn_opt_revision_value_t(0)
271 271 peg_rev = svn_opt_revision_value_t(revision)
272 272 end_rev = peg_rev
273 273
274 274 annotations = []
275 275
276 276 def receiver(line_no, revision, author, date, line, pool):
277 277 annotations.append((line_no, revision, line))
278 278
279 279 # TODO: Cannot use blame5, missing typemap function in the swig code
280 280 try:
281 281 svn.client.blame2(
282 282 file_uri, peg_rev, start_rev, end_rev,
283 283 receiver, svn.client.create_context())
284 284 except svn.core.SubversionException as exc:
285 285 log.exception("Error during blame operation.")
286 286 raise Exception(
287 287 "Blame not supported or file does not exist at path %s. "
288 288 "Error %s." % (path, exc))
289 289
290 290 return annotations
291 291
292 292 def get_node_type(self, wire, path, rev=None):
293 293 repo = self._factory.repo(wire)
294 294 fs_ptr = svn.repos.fs(repo)
295 295 if rev is None:
296 296 rev = svn.fs.youngest_rev(fs_ptr)
297 297 root = svn.fs.revision_root(fs_ptr, rev)
298 298 node = svn.fs.check_path(root, path)
299 299 return NODE_TYPE_MAPPING.get(node, None)
300 300
301 301 def get_nodes(self, wire, path, revision=None):
302 302 repo = self._factory.repo(wire)
303 303 fsobj = svn.repos.fs(repo)
304 304 if revision is None:
305 305 revision = svn.fs.youngest_rev(fsobj)
306 306 root = svn.fs.revision_root(fsobj, revision)
307 307 entries = svn.fs.dir_entries(root, path)
308 308 result = []
309 309 for entry_path, entry_info in entries.iteritems():
310 310 result.append(
311 311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 312 return result
313 313
314 314 def get_file_content(self, wire, path, rev=None):
315 315 repo = self._factory.repo(wire)
316 316 fsobj = svn.repos.fs(repo)
317 317 if rev is None:
318 318 rev = svn.fs.youngest_revision(fsobj)
319 319 root = svn.fs.revision_root(fsobj, rev)
320 320 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 321 return content.read()
322 322
323 323 def get_file_size(self, wire, path, revision=None):
324 324 repo = self._factory.repo(wire)
325 325 fsobj = svn.repos.fs(repo)
326 326 if revision is None:
327 327 revision = svn.fs.youngest_revision(fsobj)
328 328 root = svn.fs.revision_root(fsobj, revision)
329 329 size = svn.fs.file_length(root, path)
330 330 return size
331 331
332 332 def create_repository(self, wire, compatible_version=None):
333 333 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 334 self._factory.repo(wire, create=True,
335 335 compatible_version=compatible_version)
336 336
337 337 def import_remote_repository(self, wire, src_url):
338 338 repo_path = wire['path']
339 339 if not self.is_path_valid_repository(wire, repo_path):
340 340 raise Exception(
341 341 "Path %s is not a valid Subversion repository." % repo_path)
342 342
343 343 # TODO: johbo: URL checks ?
344 344 import subprocess
345 345 rdump = subprocess.Popen(
346 346 ['svnrdump', 'dump', '--non-interactive', src_url],
347 347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 348 load = subprocess.Popen(
349 349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350 350
351 351 # TODO: johbo: This can be a very long operation, might be better
352 352 # to track some kind of status and provide an api to check if the
353 353 # import is done.
354 354 rdump.wait()
355 355 load.wait()
356 356
357 357 if rdump.returncode != 0:
358 358 errors = rdump.stderr.read()
359 359 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 360 rdump.returncode, errors)
361 361 reason = 'UNKNOWN'
362 362 if 'svnrdump: E230001:' in errors:
363 363 reason = 'INVALID_CERTIFICATE'
364 364 raise Exception(
365 365 'Failed to dump the remote repository from %s.' % src_url,
366 366 reason)
367 367 if load.returncode != 0:
368 368 raise Exception(
369 369 'Failed to load the dump of remote repository from %s.' %
370 370 (src_url, ))
371 371
372 372 def commit(self, wire, message, author, timestamp, updated, removed):
373 373 assert isinstance(message, str)
374 374 assert isinstance(author, str)
375 375
376 376 repo = self._factory.repo(wire)
377 377 fsobj = svn.repos.fs(repo)
378 378
379 379 rev = svn.fs.youngest_rev(fsobj)
380 380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 381 txn_root = svn.fs.txn_root(txn)
382 382
383 383 for node in updated:
384 384 TxnNodeProcessor(node, txn_root).update()
385 385 for node in removed:
386 386 TxnNodeProcessor(node, txn_root).remove()
387 387
388 388 commit_id = svn.repos.fs_commit_txn(repo, txn)
389 389
390 390 if timestamp:
391 391 apr_time = apr_time_t(timestamp)
392 392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394 394
395 395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 396 return commit_id
397 397
398 398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 399 ignore_whitespace=False, context=3):
400 400
401 401 wire.update(cache=False)
402 402 repo = self._factory.repo(wire)
403 403 diff_creator = SvnDiffer(
404 404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 405 try:
406 406 return diff_creator.generate_diff()
407 407 except svn.core.SubversionException as e:
408 408 log.exception(
409 409 "Error during diff operation operation. "
410 410 "Path might not exist %s, %s" % (path1, path2))
411 411 return ""
412 412
413 413 @reraise_safe_exceptions
414 414 def is_large_file(self, wire, path):
415 415 return False
416 416
417 @reraise_safe_exceptions
418 def install_hooks(self, wire, force=False):
419 from vcsserver.hook_utils import install_svn_hooks
420 repo_path = wire['path']
421 binary_dir = settings.BINARY_DIR
422 executable = None
423 if binary_dir:
424 executable = os.path.join(binary_dir, 'python')
425 return install_svn_hooks(
426 repo_path, executable=executable, force_create=force)
427
417 428
418 429 class SvnDiffer(object):
419 430 """
420 431 Utility to create diffs based on difflib and the Subversion api
421 432 """
422 433
423 434 binary_content = False
424 435
425 436 def __init__(
426 437 self, repo, src_rev, src_path, tgt_rev, tgt_path,
427 438 ignore_whitespace, context):
428 439 self.repo = repo
429 440 self.ignore_whitespace = ignore_whitespace
430 441 self.context = context
431 442
432 443 fsobj = svn.repos.fs(repo)
433 444
434 445 self.tgt_rev = tgt_rev
435 446 self.tgt_path = tgt_path or ''
436 447 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
437 448 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
438 449
439 450 self.src_rev = src_rev
440 451 self.src_path = src_path or self.tgt_path
441 452 self.src_root = svn.fs.revision_root(fsobj, src_rev)
442 453 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
443 454
444 455 self._validate()
445 456
446 457 def _validate(self):
447 458 if (self.tgt_kind != svn.core.svn_node_none and
448 459 self.src_kind != svn.core.svn_node_none and
449 460 self.src_kind != self.tgt_kind):
450 461 # TODO: johbo: proper error handling
451 462 raise Exception(
452 463 "Source and target are not compatible for diff generation. "
453 464 "Source type: %s, target type: %s" %
454 465 (self.src_kind, self.tgt_kind))
455 466
456 467 def generate_diff(self):
457 468 buf = StringIO.StringIO()
458 469 if self.tgt_kind == svn.core.svn_node_dir:
459 470 self._generate_dir_diff(buf)
460 471 else:
461 472 self._generate_file_diff(buf)
462 473 return buf.getvalue()
463 474
464 475 def _generate_dir_diff(self, buf):
465 476 editor = DiffChangeEditor()
466 477 editor_ptr, editor_baton = svn.delta.make_editor(editor)
467 478 svn.repos.dir_delta2(
468 479 self.src_root,
469 480 self.src_path,
470 481 '', # src_entry
471 482 self.tgt_root,
472 483 self.tgt_path,
473 484 editor_ptr, editor_baton,
474 485 authorization_callback_allow_all,
475 486 False, # text_deltas
476 487 svn.core.svn_depth_infinity, # depth
477 488 False, # entry_props
478 489 False, # ignore_ancestry
479 490 )
480 491
481 492 for path, __, change in sorted(editor.changes):
482 493 self._generate_node_diff(
483 494 buf, change, path, self.tgt_path, path, self.src_path)
484 495
485 496 def _generate_file_diff(self, buf):
486 497 change = None
487 498 if self.src_kind == svn.core.svn_node_none:
488 499 change = "add"
489 500 elif self.tgt_kind == svn.core.svn_node_none:
490 501 change = "delete"
491 502 tgt_base, tgt_path = vcspath.split(self.tgt_path)
492 503 src_base, src_path = vcspath.split(self.src_path)
493 504 self._generate_node_diff(
494 505 buf, change, tgt_path, tgt_base, src_path, src_base)
495 506
496 507 def _generate_node_diff(
497 508 self, buf, change, tgt_path, tgt_base, src_path, src_base):
498 509
499 510 if self.src_rev == self.tgt_rev and tgt_base == src_base:
500 511 # makes consistent behaviour with git/hg to return empty diff if
501 512 # we compare same revisions
502 513 return
503 514
504 515 tgt_full_path = vcspath.join(tgt_base, tgt_path)
505 516 src_full_path = vcspath.join(src_base, src_path)
506 517
507 518 self.binary_content = False
508 519 mime_type = self._get_mime_type(tgt_full_path)
509 520
510 521 if mime_type and not mime_type.startswith('text'):
511 522 self.binary_content = True
512 523 buf.write("=" * 67 + '\n')
513 524 buf.write("Cannot display: file marked as a binary type.\n")
514 525 buf.write("svn:mime-type = %s\n" % mime_type)
515 526 buf.write("Index: %s\n" % (tgt_path, ))
516 527 buf.write("=" * 67 + '\n')
517 528 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
518 529 'tgt_path': tgt_path})
519 530
520 531 if change == 'add':
521 532 # TODO: johbo: SVN is missing a zero here compared to git
522 533 buf.write("new file mode 10644\n")
523 534
524 535 #TODO(marcink): intro to binary detection of svn patches
525 536 # if self.binary_content:
526 537 # buf.write('GIT binary patch\n')
527 538
528 539 buf.write("--- /dev/null\t(revision 0)\n")
529 540 src_lines = []
530 541 else:
531 542 if change == 'delete':
532 543 buf.write("deleted file mode 10644\n")
533 544
534 545 #TODO(marcink): intro to binary detection of svn patches
535 546 # if self.binary_content:
536 547 # buf.write('GIT binary patch\n')
537 548
538 549 buf.write("--- a/%s\t(revision %s)\n" % (
539 550 src_path, self.src_rev))
540 551 src_lines = self._svn_readlines(self.src_root, src_full_path)
541 552
542 553 if change == 'delete':
543 554 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
544 555 tgt_lines = []
545 556 else:
546 557 buf.write("+++ b/%s\t(revision %s)\n" % (
547 558 tgt_path, self.tgt_rev))
548 559 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
549 560
550 561 if not self.binary_content:
551 562 udiff = svn_diff.unified_diff(
552 563 src_lines, tgt_lines, context=self.context,
553 564 ignore_blank_lines=self.ignore_whitespace,
554 565 ignore_case=False,
555 566 ignore_space_changes=self.ignore_whitespace)
556 567 buf.writelines(udiff)
557 568
558 569 def _get_mime_type(self, path):
559 570 try:
560 571 mime_type = svn.fs.node_prop(
561 572 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
562 573 except svn.core.SubversionException:
563 574 mime_type = svn.fs.node_prop(
564 575 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
565 576 return mime_type
566 577
567 578 def _svn_readlines(self, fs_root, node_path):
568 579 if self.binary_content:
569 580 return []
570 581 node_kind = svn.fs.check_path(fs_root, node_path)
571 582 if node_kind not in (
572 583 svn.core.svn_node_file, svn.core.svn_node_symlink):
573 584 return []
574 585 content = svn.core.Stream(
575 586 svn.fs.file_contents(fs_root, node_path)).read()
576 587 return content.splitlines(True)
577 588
578 589
590
579 591 class DiffChangeEditor(svn.delta.Editor):
580 592 """
581 593 Records changes between two given revisions
582 594 """
583 595
584 596 def __init__(self):
585 597 self.changes = []
586 598
587 599 def delete_entry(self, path, revision, parent_baton, pool=None):
588 600 self.changes.append((path, None, 'delete'))
589 601
590 602 def add_file(
591 603 self, path, parent_baton, copyfrom_path, copyfrom_revision,
592 604 file_pool=None):
593 605 self.changes.append((path, 'file', 'add'))
594 606
595 607 def open_file(self, path, parent_baton, base_revision, file_pool=None):
596 608 self.changes.append((path, 'file', 'change'))
597 609
598 610
599 611 def authorization_callback_allow_all(root, path, pool):
600 612 return True
601 613
602 614
603 615 class TxnNodeProcessor(object):
604 616 """
605 617 Utility to process the change of one node within a transaction root.
606 618
607 619 It encapsulates the knowledge of how to add, update or remove
608 620 a node for a given transaction root. The purpose is to support the method
609 621 `SvnRemote.commit`.
610 622 """
611 623
612 624 def __init__(self, node, txn_root):
613 625 assert isinstance(node['path'], str)
614 626
615 627 self.node = node
616 628 self.txn_root = txn_root
617 629
618 630 def update(self):
619 631 self._ensure_parent_dirs()
620 632 self._add_file_if_node_does_not_exist()
621 633 self._update_file_content()
622 634 self._update_file_properties()
623 635
624 636 def remove(self):
625 637 svn.fs.delete(self.txn_root, self.node['path'])
626 638 # TODO: Clean up directory if empty
627 639
628 640 def _ensure_parent_dirs(self):
629 641 curdir = vcspath.dirname(self.node['path'])
630 642 dirs_to_create = []
631 643 while not self._svn_path_exists(curdir):
632 644 dirs_to_create.append(curdir)
633 645 curdir = vcspath.dirname(curdir)
634 646
635 647 for curdir in reversed(dirs_to_create):
636 648 log.debug('Creating missing directory "%s"', curdir)
637 649 svn.fs.make_dir(self.txn_root, curdir)
638 650
639 651 def _svn_path_exists(self, path):
640 652 path_status = svn.fs.check_path(self.txn_root, path)
641 653 return path_status != svn.core.svn_node_none
642 654
643 655 def _add_file_if_node_does_not_exist(self):
644 656 kind = svn.fs.check_path(self.txn_root, self.node['path'])
645 657 if kind == svn.core.svn_node_none:
646 658 svn.fs.make_file(self.txn_root, self.node['path'])
647 659
648 660 def _update_file_content(self):
649 661 assert isinstance(self.node['content'], str)
650 662 handler, baton = svn.fs.apply_textdelta(
651 663 self.txn_root, self.node['path'], None, None)
652 664 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
653 665
654 666 def _update_file_properties(self):
655 667 properties = self.node.get('properties', {})
656 668 for key, value in properties.iteritems():
657 669 svn.fs.change_node_prop(
658 670 self.txn_root, self.node['path'], key, value)
659 671
660 672
661 673 def apr_time_t(timestamp):
662 674 """
663 675 Convert a Python timestamp into APR timestamp type apr_time_t
664 676 """
665 677 return timestamp * 1E6
666 678
667 679
668 680 def svn_opt_revision_value_t(num):
669 681 """
670 682 Put `num` into a `svn_opt_revision_value_t` structure.
671 683 """
672 684 value = svn.core.svn_opt_revision_value_t()
673 685 value.number = num
674 686 revision = svn.core.svn_opt_revision_t()
675 687 revision.kind = svn.core.svn_opt_revision_number
676 688 revision.value = value
677 689 return revision
@@ -1,57 +1,58 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 43 print 'Using vcsserver port %s' % (port, )
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
58
@@ -1,71 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72
73
74 def no_newline_id_generator(test_name):
75 """
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
78 """
79 org_name = test_name
80 test_name = test_name\
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
85
86 return test_name or 'test-with-empty-name'
@@ -1,75 +1,82 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 18
19 19 log = logging.getLogger(__name__)
20 20
21 21
22 22 def safe_int(val, default=None):
23 23 """
24 24 Returns int() of val if val is not convertable to int use default
25 25 instead
26 26
27 27 :param val:
28 28 :param default:
29 29 """
30 30
31 31 try:
32 32 val = int(val)
33 33 except (ValueError, TypeError):
34 34 val = default
35 35
36 36 return val
37 37
38 38
39 39 def safe_str(unicode_, to_encoding=['utf8']):
40 40 """
41 41 safe str function. Does few trick to turn unicode_ into string
42 42
43 43 In case of UnicodeEncodeError, we try to return it with encoding detected
44 44 by chardet library if it fails fallback to string with errors replaced
45 45
46 46 :param unicode_: unicode to encode
47 47 :rtype: str
48 48 :returns: str object
49 49 """
50 50
51 51 # if it's not basestr cast to str
52 52 if not isinstance(unicode_, basestring):
53 53 return str(unicode_)
54 54
55 55 if isinstance(unicode_, str):
56 56 return unicode_
57 57
58 58 if not isinstance(to_encoding, (list, tuple)):
59 59 to_encoding = [to_encoding]
60 60
61 61 for enc in to_encoding:
62 62 try:
63 63 return unicode_.encode(enc)
64 64 except UnicodeEncodeError:
65 65 pass
66 66
67 67 try:
68 68 import chardet
69 69 encoding = chardet.detect(unicode_)['encoding']
70 70 if encoding is None:
71 71 raise UnicodeEncodeError()
72 72
73 73 return unicode_.encode(encoding)
74 74 except (ImportError, UnicodeEncodeError):
75 75 return unicode_.encode(to_encoding[0], 'replace')
76
77
78 class AttributeDict(dict):
79 def __getattr__(self, attr):
80 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
General Comments 0
You need to be logged in to leave comments. Login now