##// END OF EJS Templates
svn: added support for hooks management of git and subversion....
marcink -
r407:34976bc5 default
parent child Browse files
Show More
@@ -0,0 +1,154 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20 import re
21 import os
22 import sys
23 import datetime
24 import logging
25 import pkg_resources
26
27 import vcsserver
28
29 log = logging.getLogger(__name__)
30
31
32 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
33 """
34 Creates a RhodeCode hook inside a git repository
35
36 :param repo_path: path to repository
37 :param executable: binary executable to put in the hooks
38 :param force_create: Create even if same name hook exists
39 """
40 executable = executable or sys.executable
41 hooks_path = os.path.join(repo_path, 'hooks')
42 if not bare:
43 hooks_path = os.path.join(repo_path, '.git', 'hooks')
44 if not os.path.isdir(hooks_path):
45 os.makedirs(hooks_path, mode=0777)
46
47 tmpl_post = pkg_resources.resource_string(
48 'vcsserver', '/'.join(
49 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
50 tmpl_pre = pkg_resources.resource_string(
51 'vcsserver', '/'.join(
52 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
53
54 path = '' # not used for now
55 timestamp = datetime.datetime.utcnow().isoformat()
56
57 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
58 log.debug('Installing git hook in repo %s', repo_path)
59 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
60 _rhodecode_hook = check_rhodecode_hook(_hook_file)
61
62 if _rhodecode_hook or force_create:
63 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
64 try:
65 with open(_hook_file, 'wb') as f:
66 template = template.replace(
67 '_TMPL_', vcsserver.__version__)
68 template = template.replace('_DATE_', timestamp)
69 template = template.replace('_ENV_', executable)
70 template = template.replace('_PATH_', path)
71 f.write(template)
72 os.chmod(_hook_file, 0755)
73 except IOError:
74 log.exception('error writing hook file %s', _hook_file)
75 else:
76 log.debug('skipping writing hook file')
77
78 return True
79
80
81 def install_svn_hooks(repo_path, executable=None, force_create=False):
82 """
83 Creates RhodeCode hooks inside a svn repository
84
85 :param repo_path: path to repository
86 :param executable: binary executable to put in the hooks
87 :param force_create: Create even if same name hook exists
88 """
89 executable = executable or sys.executable
90 hooks_path = os.path.join(repo_path, 'hooks')
91 if not os.path.isdir(hooks_path):
92 os.makedirs(hooks_path, mode=0777)
93
94 tmpl_post = pkg_resources.resource_string(
95 'vcsserver', '/'.join(
96 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
97 tmpl_pre = pkg_resources.resource_string(
98 'vcsserver', '/'.join(
99 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
100
101 path = '' # not used for now
102 timestamp = datetime.datetime.utcnow().isoformat()
103
104 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
105 log.debug('Installing svn hook in repo %s', repo_path)
106 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
107 _rhodecode_hook = check_rhodecode_hook(_hook_file)
108
109 if _rhodecode_hook or force_create:
110 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
111
112 try:
113 with open(_hook_file, 'wb') as f:
114 template = template.replace(
115 '_TMPL_', vcsserver.__version__)
116 template = template.replace('_DATE_', timestamp)
117 template = template.replace('_ENV_', executable)
118 template = template.replace('_PATH_', path)
119
120 f.write(template)
121 os.chmod(_hook_file, 0755)
122 except IOError:
123 log.exception('error writing hook file %s', _hook_file)
124 else:
125 log.debug('skipping writing hook file')
126
127 return True
128
129
130 def check_rhodecode_hook(hook_path):
131 """
132 Check if the hook was created by RhodeCode
133 """
134 if not os.path.exists(hook_path):
135 return True
136
137 log.debug('hook exists, checking if it is from rhodecode')
138 hook_content = read_hook_content(hook_path)
139 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
140 if matches:
141 try:
142 version = matches.groups()[0]
143 log.debug('got version %s from hooks.', version)
144 return True
145 except Exception:
146 log.exception("Exception while reading the hook version.")
147
148 return False
149
150
151 def read_hook_content(hook_path):
152 with open(hook_path, 'rb') as f:
153 content = f.read()
154 return content
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_post_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_pre_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,50 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if hooks is None:
25 # exit with success if we cannot import vcsserver.hooks !!
26 # this allows simply push to this repo even without rhodecode
27 sys.exit(0)
28
29 if os.environ.get('RC_SKIP_HOOKS'):
30 sys.exit(0)
31 repo_path = os.getcwd()
32 push_data = sys.argv[1:]
33
34 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
35
36 try:
37 result = hooks.svn_post_commit(repo_path, push_data, os.environ)
38 sys.exit(result)
39 except Exception as error:
40 # TODO: johbo: Improve handling of this special case
41 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
42 raise
43 print 'ERROR:', error
44 sys.exit(1)
45 sys.exit(0)
46
47
48
49 if __name__ == '__main__':
50 main()
@@ -0,0 +1,52 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if os.environ.get('SSH_READ_ONLY') == '1':
25 sys.stderr.write('Only read-only access is allowed')
26 sys.exit(1)
27
28 if hooks is None:
29 # exit with success if we cannot import vcsserver.hooks !!
30 # this allows simply push to this repo even without rhodecode
31 sys.exit(0)
32 if os.environ.get('RC_SKIP_HOOKS'):
33 sys.exit(0)
34 repo_path = os.getcwd()
35 push_data = sys.argv[1:]
36
37 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
38
39 try:
40 result = hooks.svn_pre_commit(repo_path, push_data, os.environ)
41 sys.exit(result)
42 except Exception as error:
43 # TODO: johbo: Improve handling of this special case
44 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
45 raise
46 print 'ERROR:', error
47 sys.exit(1)
48 sys.exit(0)
49
50
51 if __name__ == '__main__':
52 main()
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,206 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import sys
20 import stat
21 import pytest
22 import vcsserver
23 import tempfile
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.utils import AttributeDict
27
28
29 class TestCheckRhodecodeHook(object):
30
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 with open(hook, 'wb') as f:
34 f.write('dummy test')
35 result = hook_utils.check_rhodecode_hook(hook)
36 assert result is False
37
38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 result = hook_utils.check_rhodecode_hook(hook)
41 assert result
42
43 @pytest.mark.parametrize("file_content, expected_result", [
44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 ("RC_HOOK = '3.3.3'\n", False),
46 ], ids=no_newline_id_generator)
47 def test_signatures(self, file_content, expected_result, tmpdir):
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 with open(hook, 'wb') as f:
50 f.write(file_content)
51
52 result = hook_utils.check_rhodecode_hook(hook)
53
54 assert result is expected_result
55
56
57 class BaseInstallHooks(object):
58 HOOK_FILES = ()
59
60 def _check_hook_file_mode(self, file_path):
61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 stat_info = os.stat(file_path)
63
64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 expected_mode = int('755', 8)
66 assert expected_mode == file_mode
67
68 def _check_hook_file_content(self, file_path, executable):
69 executable = executable or sys.executable
70 with open(file_path, 'rt') as hook_file:
71 content = hook_file.read()
72
73 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
78
79 def _create_fake_hook(self, file_path, content):
80 with open(file_path, 'w') as hook_file:
81 hook_file.write(content)
82
83 def create_dummy_repo(self, repo_type):
84 tmpdir = tempfile.mkdtemp()
85 repo = AttributeDict()
86 if repo_type == 'git':
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 os.makedirs(repo.path)
89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 repo.bare = True
91
92 elif repo_type == 'svn':
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 os.makedirs(repo.path)
95 os.makedirs(os.path.join(repo.path, 'hooks'))
96
97 return repo
98
99 def check_hooks(self, repo_path, repo_bare=True):
100 for file_name in self.HOOK_FILES:
101 if repo_bare:
102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 else:
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 self._check_hook_file_mode(file_path)
106 self._check_hook_file_content(file_path, sys.executable)
107
108
109 class TestInstallGitHooks(BaseInstallHooks):
110 HOOK_FILES = ('pre-receive', 'post-receive')
111
112 def test_hooks_are_installed(self):
113 repo = self.create_dummy_repo('git')
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 assert result
116 self.check_hooks(repo.path, repo.bare)
117
118 def test_hooks_are_replaced(self):
119 repo = self.create_dummy_repo('git')
120 hooks_path = os.path.join(repo.path, 'hooks')
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 self._create_fake_hook(
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 assert result
127 self.check_hooks(repo.path, repo.bare)
128
129 def test_non_rc_hooks_are_not_replaced(self):
130 repo = self.create_dummy_repo('git')
131 hooks_path = os.path.join(repo.path, 'hooks')
132 non_rc_content = 'echo "non rc hook"\n'
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
135 file_path, content=non_rc_content)
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
139
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 with open(file_path, 'rt') as hook_file:
142 content = hook_file.read()
143 assert content == non_rc_content
144
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 repo = self.create_dummy_repo('git')
147 hooks_path = os.path.join(repo.path, 'hooks')
148 non_rc_content = 'echo "non rc hook"\n'
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 self._create_fake_hook(
151 file_path, content=non_rc_content)
152
153 result = hook_utils.install_git_hooks(
154 repo.path, repo.bare, force_create=True)
155 assert result
156 self.check_hooks(repo.path, repo.bare)
157
158
159 class TestInstallSvnHooks(BaseInstallHooks):
160 HOOK_FILES = ('pre-commit', 'post-commit')
161
162 def test_hooks_are_installed(self):
163 repo = self.create_dummy_repo('svn')
164 result = hook_utils.install_svn_hooks(repo.path)
165 assert result
166 self.check_hooks(repo.path)
167
168 def test_hooks_are_replaced(self):
169 repo = self.create_dummy_repo('svn')
170 hooks_path = os.path.join(repo.path, 'hooks')
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 self._create_fake_hook(
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174
175 result = hook_utils.install_svn_hooks(repo.path)
176 assert result
177 self.check_hooks(repo.path)
178
179 def test_non_rc_hooks_are_not_replaced(self):
180 repo = self.create_dummy_repo('svn')
181 hooks_path = os.path.join(repo.path, 'hooks')
182 non_rc_content = 'echo "non rc hook"\n'
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
185 file_path, content=non_rc_content)
186
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
189
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 with open(file_path, 'rt') as hook_file:
192 content = hook_file.read()
193 assert content == non_rc_content
194
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 repo = self.create_dummy_repo('svn')
197 hooks_path = os.path.join(repo.path, 'hooks')
198 non_rc_content = 'echo "non rc hook"\n'
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 self._create_fake_hook(
201 file_path, content=non_rc_content)
202
203 result = hook_utils.install_svn_hooks(
204 repo.path, force_create=True)
205 assert result
206 self.check_hooks(repo.path, )
@@ -1,13 +1,16 b''
1 # top level files
1 # top level files
2 include *.rst
2 include *.rst
3 include *.txt
3 include *.txt
4
4
5 # package extras
5 # package extras
6 include vcsserver/VERSION
6 include vcsserver/VERSION
7
7
8 # all config files
8 # all config files
9 recursive-include configs *
9 recursive-include configs *
10
10
11 # hook templates
12 recursive-include vcsserver/hook_utils/hook_templates *
13
11 # skip any tests files
14 # skip any tests files
12 recursive-exclude vcsserver/tests *
15 recursive-exclude vcsserver/tests *
13
16
@@ -1,79 +1,83 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6 [app:main]
6 [app:main]
7 use = egg:rhodecode-vcsserver
7 use = egg:rhodecode-vcsserver
8
8
9 pyramid.default_locale_name = en
9 pyramid.default_locale_name = en
10 pyramid.includes =
10 pyramid.includes =
11
11
12 # default locale used by VCS systems
12 # default locale used by VCS systems
13 locale = en_US.UTF-8
13 locale = en_US.UTF-8
14
14
15 # cache regions, please don't change
15 # cache regions, please don't change
16 beaker.cache.regions = repo_object
16 beaker.cache.regions = repo_object
17 beaker.cache.repo_object.type = memorylru
17 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.max_items = 100
18 beaker.cache.repo_object.max_items = 100
19 # cache auto-expires after N seconds
19 # cache auto-expires after N seconds
20 beaker.cache.repo_object.expire = 300
20 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.enabled = true
21 beaker.cache.repo_object.enabled = true
22
22
23 # path to binaries for vcsserver, it should be set by the installer
24 # at installation time, e.g /home/user/vcsserver-1/profile/bin
25 core.binary_dir = ""
26
23 [server:main]
27 [server:main]
24 ## COMMON ##
28 ## COMMON ##
25 host = 0.0.0.0
29 host = 0.0.0.0
26 port = 9900
30 port = 9900
27
31
28 use = egg:waitress#main
32 use = egg:waitress#main
29
33
30
34
31 ################################
35 ################################
32 ### LOGGING CONFIGURATION ####
36 ### LOGGING CONFIGURATION ####
33 ################################
37 ################################
34 [loggers]
38 [loggers]
35 keys = root, vcsserver, beaker
39 keys = root, vcsserver, beaker
36
40
37 [handlers]
41 [handlers]
38 keys = console
42 keys = console
39
43
40 [formatters]
44 [formatters]
41 keys = generic
45 keys = generic
42
46
43 #############
47 #############
44 ## LOGGERS ##
48 ## LOGGERS ##
45 #############
49 #############
46 [logger_root]
50 [logger_root]
47 level = NOTSET
51 level = NOTSET
48 handlers = console
52 handlers = console
49
53
50 [logger_vcsserver]
54 [logger_vcsserver]
51 level = DEBUG
55 level = DEBUG
52 handlers =
56 handlers =
53 qualname = vcsserver
57 qualname = vcsserver
54 propagate = 1
58 propagate = 1
55
59
56 [logger_beaker]
60 [logger_beaker]
57 level = DEBUG
61 level = DEBUG
58 handlers =
62 handlers =
59 qualname = beaker
63 qualname = beaker
60 propagate = 1
64 propagate = 1
61
65
62
66
63 ##############
67 ##############
64 ## HANDLERS ##
68 ## HANDLERS ##
65 ##############
69 ##############
66
70
67 [handler_console]
71 [handler_console]
68 class = StreamHandler
72 class = StreamHandler
69 args = (sys.stderr,)
73 args = (sys.stderr,)
70 level = DEBUG
74 level = DEBUG
71 formatter = generic
75 formatter = generic
72
76
73 ################
77 ################
74 ## FORMATTERS ##
78 ## FORMATTERS ##
75 ################
79 ################
76
80
77 [formatter_generic]
81 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
82 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
83 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,102 +1,106 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6
6
7 [server:main]
7 [server:main]
8 ## COMMON ##
8 ## COMMON ##
9 host = 127.0.0.1
9 host = 127.0.0.1
10 port = 9900
10 port = 9900
11
11
12
12
13 ##########################
13 ##########################
14 ## GUNICORN WSGI SERVER ##
14 ## GUNICORN WSGI SERVER ##
15 ##########################
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
20 workers = 2
21 ## process name
21 ## process name
22 proc_name = rhodecode_vcsserver
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
29 max_requests = 1000
30 max_requests_jitter = 30
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
33 timeout = 21600
34
34
35
35
36 [app:main]
36 [app:main]
37 use = egg:rhodecode-vcsserver
37 use = egg:rhodecode-vcsserver
38
38
39 pyramid.default_locale_name = en
39 pyramid.default_locale_name = en
40 pyramid.includes =
40 pyramid.includes =
41
41
42 ## default locale used by VCS systems
42 # default locale used by VCS systems
43 locale = en_US.UTF-8
43 locale = en_US.UTF-8
44
44
45 # cache regions, please don't change
45 # cache regions, please don't change
46 beaker.cache.regions = repo_object
46 beaker.cache.regions = repo_object
47 beaker.cache.repo_object.type = memorylru
47 beaker.cache.repo_object.type = memorylru
48 beaker.cache.repo_object.max_items = 100
48 beaker.cache.repo_object.max_items = 100
49 # cache auto-expires after N seconds
49 # cache auto-expires after N seconds
50 beaker.cache.repo_object.expire = 300
50 beaker.cache.repo_object.expire = 300
51 beaker.cache.repo_object.enabled = true
51 beaker.cache.repo_object.enabled = true
52
52
53 # path to binaries for vcsserver, it should be set by the installer
54 # at installation time, e.g /home/user/vcsserver-1/profile/bin
55 core.binary_dir = ""
56
53
57
54 ################################
58 ################################
55 ### LOGGING CONFIGURATION ####
59 ### LOGGING CONFIGURATION ####
56 ################################
60 ################################
57 [loggers]
61 [loggers]
58 keys = root, vcsserver, beaker
62 keys = root, vcsserver, beaker
59
63
60 [handlers]
64 [handlers]
61 keys = console
65 keys = console
62
66
63 [formatters]
67 [formatters]
64 keys = generic
68 keys = generic
65
69
66 #############
70 #############
67 ## LOGGERS ##
71 ## LOGGERS ##
68 #############
72 #############
69 [logger_root]
73 [logger_root]
70 level = NOTSET
74 level = NOTSET
71 handlers = console
75 handlers = console
72
76
73 [logger_vcsserver]
77 [logger_vcsserver]
74 level = DEBUG
78 level = DEBUG
75 handlers =
79 handlers =
76 qualname = vcsserver
80 qualname = vcsserver
77 propagate = 1
81 propagate = 1
78
82
79 [logger_beaker]
83 [logger_beaker]
80 level = DEBUG
84 level = DEBUG
81 handlers =
85 handlers =
82 qualname = beaker
86 qualname = beaker
83 propagate = 1
87 propagate = 1
84
88
85
89
86 ##############
90 ##############
87 ## HANDLERS ##
91 ## HANDLERS ##
88 ##############
92 ##############
89
93
90 [handler_console]
94 [handler_console]
91 class = StreamHandler
95 class = StreamHandler
92 args = (sys.stderr,)
96 args = (sys.stderr,)
93 level = DEBUG
97 level = DEBUG
94 formatter = generic
98 formatter = generic
95
99
96 ################
100 ################
97 ## FORMATTERS ##
101 ## FORMATTERS ##
98 ################
102 ################
99
103
100 [formatter_generic]
104 [formatter_generic]
101 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
105 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
102 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
106 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,663 +1,669 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 from dulwich import index, objects
28 from dulwich import index, objects
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.errors import (
30 from dulwich.errors import (
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
33 UnexpectedCommandError)
33 UnexpectedCommandError)
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
36
36
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
43
43
44 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
45 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
46 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
52 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
53 @wraps(func)
53 @wraps(func)
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e.message)
60 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e.message)
62 except Exception as e:
62 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
65 # exceptions, it's an exceptions from other handlers
65 # exceptions, it's an exceptions from other handlers
66 #if not hasattr(e, '_vcs_kind'):
66 #if not hasattr(e, '_vcs_kind'):
67 #log.exception("Unhandled exception in git remote call")
67 #log.exception("Unhandled exception in git remote call")
68 #raise_from_original(exceptions.UnhandledException)
68 #raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class Repo(DulwichRepo):
73 class Repo(DulwichRepo):
74 """
74 """
75 A wrapper for dulwich Repo class.
75 A wrapper for dulwich Repo class.
76
76
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 "Too many open files" error. We need to close all opened file descriptors
78 "Too many open files" error. We need to close all opened file descriptors
79 once the repo object is destroyed.
79 once the repo object is destroyed.
80
80
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 to 0.12.0 +
82 to 0.12.0 +
83 """
83 """
84 def __del__(self):
84 def __del__(self):
85 if hasattr(self, 'object_store'):
85 if hasattr(self, 'object_store'):
86 self.close()
86 self.close()
87
87
88
88
89 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
90
90
91 def _create_repo(self, wire, create):
91 def _create_repo(self, wire, create):
92 repo_path = str_to_dulwich(wire['path'])
92 repo_path = str_to_dulwich(wire['path'])
93 return Repo(repo_path)
93 return Repo(repo_path)
94
94
95
95
96 class GitRemote(object):
96 class GitRemote(object):
97
97
98 def __init__(self, factory):
98 def __init__(self, factory):
99 self._factory = factory
99 self._factory = factory
100
100
101 self._bulk_methods = {
101 self._bulk_methods = {
102 "author": self.commit_attribute,
102 "author": self.commit_attribute,
103 "date": self.get_object_attrs,
103 "date": self.get_object_attrs,
104 "message": self.commit_attribute,
104 "message": self.commit_attribute,
105 "parents": self.commit_attribute,
105 "parents": self.commit_attribute,
106 "_commit": self.revision,
106 "_commit": self.revision,
107 }
107 }
108
108
109 def _wire_to_config(self, wire):
109 def _wire_to_config(self, wire):
110 if 'config' in wire:
110 if 'config' in wire:
111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return {}
112 return {}
113
113
114 def _assign_ref(self, wire, ref, commit_id):
114 def _assign_ref(self, wire, ref, commit_id):
115 repo = self._factory.repo(wire)
115 repo = self._factory.repo(wire)
116 repo[ref] = commit_id
116 repo[ref] = commit_id
117
117
118 @reraise_safe_exceptions
118 @reraise_safe_exceptions
119 def add_object(self, wire, content):
119 def add_object(self, wire, content):
120 repo = self._factory.repo(wire)
120 repo = self._factory.repo(wire)
121 blob = objects.Blob()
121 blob = objects.Blob()
122 blob.set_raw_string(content)
122 blob.set_raw_string(content)
123 repo.object_store.add_object(blob)
123 repo.object_store.add_object(blob)
124 return blob.id
124 return blob.id
125
125
126 @reraise_safe_exceptions
126 @reraise_safe_exceptions
127 def assert_correct_path(self, wire):
127 def assert_correct_path(self, wire):
128 path = wire.get('path')
128 path = wire.get('path')
129 try:
129 try:
130 self._factory.repo(wire)
130 self._factory.repo(wire)
131 except NotGitRepository as e:
131 except NotGitRepository as e:
132 tb = traceback.format_exc()
132 tb = traceback.format_exc()
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
134 return False
135
135
136 return True
136 return True
137
137
138 @reraise_safe_exceptions
138 @reraise_safe_exceptions
139 def bare(self, wire):
139 def bare(self, wire):
140 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
141 return repo.bare
141 return repo.bare
142
142
143 @reraise_safe_exceptions
143 @reraise_safe_exceptions
144 def blob_as_pretty_string(self, wire, sha):
144 def blob_as_pretty_string(self, wire, sha):
145 repo = self._factory.repo(wire)
145 repo = self._factory.repo(wire)
146 return repo[sha].as_pretty_string()
146 return repo[sha].as_pretty_string()
147
147
148 @reraise_safe_exceptions
148 @reraise_safe_exceptions
149 def blob_raw_length(self, wire, sha):
149 def blob_raw_length(self, wire, sha):
150 repo = self._factory.repo(wire)
150 repo = self._factory.repo(wire)
151 blob = repo[sha]
151 blob = repo[sha]
152 return blob.raw_length()
152 return blob.raw_length()
153
153
154 def _parse_lfs_pointer(self, raw_content):
154 def _parse_lfs_pointer(self, raw_content):
155
155
156 spec_string = 'version https://git-lfs.github.com/spec'
156 spec_string = 'version https://git-lfs.github.com/spec'
157 if raw_content and raw_content.startswith(spec_string):
157 if raw_content and raw_content.startswith(spec_string):
158 pattern = re.compile(r"""
158 pattern = re.compile(r"""
159 (?:\n)?
159 (?:\n)?
160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^size[ ](?P<oid_size>[0-9]+)\n
162 ^size[ ](?P<oid_size>[0-9]+)\n
163 (?:\n)?
163 (?:\n)?
164 """, re.VERBOSE | re.MULTILINE)
164 """, re.VERBOSE | re.MULTILINE)
165 match = pattern.match(raw_content)
165 match = pattern.match(raw_content)
166 if match:
166 if match:
167 return match.groupdict()
167 return match.groupdict()
168
168
169 return {}
169 return {}
170
170
171 @reraise_safe_exceptions
171 @reraise_safe_exceptions
172 def is_large_file(self, wire, sha):
172 def is_large_file(self, wire, sha):
173 repo = self._factory.repo(wire)
173 repo = self._factory.repo(wire)
174 blob = repo[sha]
174 blob = repo[sha]
175 return self._parse_lfs_pointer(blob.as_raw_string())
175 return self._parse_lfs_pointer(blob.as_raw_string())
176
176
177 @reraise_safe_exceptions
177 @reraise_safe_exceptions
178 def in_largefiles_store(self, wire, oid):
178 def in_largefiles_store(self, wire, oid):
179 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
180 conf = self._wire_to_config(wire)
180 conf = self._wire_to_config(wire)
181
181
182 store_location = conf.get('vcs_git_lfs_store_location')
182 store_location = conf.get('vcs_git_lfs_store_location')
183 if store_location:
183 if store_location:
184 repo_name = repo.path
184 repo_name = repo.path
185 store = LFSOidStore(
185 store = LFSOidStore(
186 oid=oid, repo=repo_name, store_location=store_location)
186 oid=oid, repo=repo_name, store_location=store_location)
187 return store.has_oid()
187 return store.has_oid()
188
188
189 return False
189 return False
190
190
191 @reraise_safe_exceptions
191 @reraise_safe_exceptions
192 def store_path(self, wire, oid):
192 def store_path(self, wire, oid):
193 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
194 conf = self._wire_to_config(wire)
194 conf = self._wire_to_config(wire)
195
195
196 store_location = conf.get('vcs_git_lfs_store_location')
196 store_location = conf.get('vcs_git_lfs_store_location')
197 if store_location:
197 if store_location:
198 repo_name = repo.path
198 repo_name = repo.path
199 store = LFSOidStore(
199 store = LFSOidStore(
200 oid=oid, repo=repo_name, store_location=store_location)
200 oid=oid, repo=repo_name, store_location=store_location)
201 return store.oid_path
201 return store.oid_path
202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def bulk_request(self, wire, rev, pre_load):
205 def bulk_request(self, wire, rev, pre_load):
206 result = {}
206 result = {}
207 for attr in pre_load:
207 for attr in pre_load:
208 try:
208 try:
209 method = self._bulk_methods[attr]
209 method = self._bulk_methods[attr]
210 args = [wire, rev]
210 args = [wire, rev]
211 if attr == "date":
211 if attr == "date":
212 args.extend(["commit_time", "commit_timezone"])
212 args.extend(["commit_time", "commit_timezone"])
213 elif attr in ["author", "message", "parents"]:
213 elif attr in ["author", "message", "parents"]:
214 args.append(attr)
214 args.append(attr)
215 result[attr] = method(*args)
215 result[attr] = method(*args)
216 except KeyError:
216 except KeyError:
217 raise exceptions.VcsException(
217 raise exceptions.VcsException(
218 "Unknown bulk attribute: %s" % attr)
218 "Unknown bulk attribute: %s" % attr)
219 return result
219 return result
220
220
221 def _build_opener(self, url):
221 def _build_opener(self, url):
222 handlers = []
222 handlers = []
223 url_obj = url_parser(url)
223 url_obj = url_parser(url)
224 _, authinfo = url_obj.authinfo()
224 _, authinfo = url_obj.authinfo()
225
225
226 if authinfo:
226 if authinfo:
227 # create a password manager
227 # create a password manager
228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr.add_password(*authinfo)
229 passmgr.add_password(*authinfo)
230
230
231 handlers.extend((httpbasicauthhandler(passmgr),
231 handlers.extend((httpbasicauthhandler(passmgr),
232 httpdigestauthhandler(passmgr)))
232 httpdigestauthhandler(passmgr)))
233
233
234 return urllib2.build_opener(*handlers)
234 return urllib2.build_opener(*handlers)
235
235
236 @reraise_safe_exceptions
236 @reraise_safe_exceptions
237 def check_url(self, url, config):
237 def check_url(self, url, config):
238 url_obj = url_parser(url)
238 url_obj = url_parser(url)
239 test_uri, _ = url_obj.authinfo()
239 test_uri, _ = url_obj.authinfo()
240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.query = obfuscate_qs(url_obj.query)
241 url_obj.query = obfuscate_qs(url_obj.query)
242 cleaned_uri = str(url_obj)
242 cleaned_uri = str(url_obj)
243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244
244
245 if not test_uri.endswith('info/refs'):
245 if not test_uri.endswith('info/refs'):
246 test_uri = test_uri.rstrip('/') + '/info/refs'
246 test_uri = test_uri.rstrip('/') + '/info/refs'
247
247
248 o = self._build_opener(url)
248 o = self._build_opener(url)
249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250
250
251 q = {"service": 'git-upload-pack'}
251 q = {"service": 'git-upload-pack'}
252 qs = '?%s' % urllib.urlencode(q)
252 qs = '?%s' % urllib.urlencode(q)
253 cu = "%s%s" % (test_uri, qs)
253 cu = "%s%s" % (test_uri, qs)
254 req = urllib2.Request(cu, None, {})
254 req = urllib2.Request(cu, None, {})
255
255
256 try:
256 try:
257 log.debug("Trying to open URL %s", cleaned_uri)
257 log.debug("Trying to open URL %s", cleaned_uri)
258 resp = o.open(req)
258 resp = o.open(req)
259 if resp.code != 200:
259 if resp.code != 200:
260 raise exceptions.URLError('Return Code is not 200')
260 raise exceptions.URLError('Return Code is not 200')
261 except Exception as e:
261 except Exception as e:
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 # means it cannot be cloned
263 # means it cannot be cloned
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265
265
266 # now detect if it's proper git repo
266 # now detect if it's proper git repo
267 gitdata = resp.read()
267 gitdata = resp.read()
268 if 'service=git-upload-pack' in gitdata:
268 if 'service=git-upload-pack' in gitdata:
269 pass
269 pass
270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 # old style git can return some other format !
271 # old style git can return some other format !
272 pass
272 pass
273 else:
273 else:
274 raise exceptions.URLError(
274 raise exceptions.URLError(
275 "url [%s] does not look like an git" % (cleaned_uri,))
275 "url [%s] does not look like an git" % (cleaned_uri,))
276
276
277 return True
277 return True
278
278
279 @reraise_safe_exceptions
279 @reraise_safe_exceptions
280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 remote_refs = self.fetch(wire, url, apply_refs=False)
281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 repo = self._factory.repo(wire)
282 repo = self._factory.repo(wire)
283 if isinstance(valid_refs, list):
283 if isinstance(valid_refs, list):
284 valid_refs = tuple(valid_refs)
284 valid_refs = tuple(valid_refs)
285
285
286 for k in remote_refs:
286 for k in remote_refs:
287 # only parse heads/tags and skip so called deferred tags
287 # only parse heads/tags and skip so called deferred tags
288 if k.startswith(valid_refs) and not k.endswith(deferred):
288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 repo[k] = remote_refs[k]
289 repo[k] = remote_refs[k]
290
290
291 if update_after_clone:
291 if update_after_clone:
292 # we want to checkout HEAD
292 # we want to checkout HEAD
293 repo["HEAD"] = remote_refs["HEAD"]
293 repo["HEAD"] = remote_refs["HEAD"]
294 index.build_index_from_tree(repo.path, repo.index_path(),
294 index.build_index_from_tree(repo.path, repo.index_path(),
295 repo.object_store, repo["HEAD"].tree)
295 repo.object_store, repo["HEAD"].tree)
296
296
297 # TODO: this is quite complex, check if that can be simplified
297 # TODO: this is quite complex, check if that can be simplified
298 @reraise_safe_exceptions
298 @reraise_safe_exceptions
299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
301 object_store = repo.object_store
301 object_store = repo.object_store
302
302
303 # Create tree and populates it with blobs
303 # Create tree and populates it with blobs
304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305
305
306 for node in updated:
306 for node in updated:
307 # Compute subdirs if needed
307 # Compute subdirs if needed
308 dirpath, nodename = vcspath.split(node['path'])
308 dirpath, nodename = vcspath.split(node['path'])
309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 parent = commit_tree
310 parent = commit_tree
311 ancestors = [('', parent)]
311 ancestors = [('', parent)]
312
312
313 # Tries to dig for the deepest existing tree
313 # Tries to dig for the deepest existing tree
314 while dirnames:
314 while dirnames:
315 curdir = dirnames.pop(0)
315 curdir = dirnames.pop(0)
316 try:
316 try:
317 dir_id = parent[curdir][1]
317 dir_id = parent[curdir][1]
318 except KeyError:
318 except KeyError:
319 # put curdir back into dirnames and stops
319 # put curdir back into dirnames and stops
320 dirnames.insert(0, curdir)
320 dirnames.insert(0, curdir)
321 break
321 break
322 else:
322 else:
323 # If found, updates parent
323 # If found, updates parent
324 parent = repo[dir_id]
324 parent = repo[dir_id]
325 ancestors.append((curdir, parent))
325 ancestors.append((curdir, parent))
326 # Now parent is deepest existing tree and we need to create
326 # Now parent is deepest existing tree and we need to create
327 # subtrees for dirnames (in reverse order)
327 # subtrees for dirnames (in reverse order)
328 # [this only applies for nodes from added]
328 # [this only applies for nodes from added]
329 new_trees = []
329 new_trees = []
330
330
331 blob = objects.Blob.from_string(node['content'])
331 blob = objects.Blob.from_string(node['content'])
332
332
333 if dirnames:
333 if dirnames:
334 # If there are trees which should be created we need to build
334 # If there are trees which should be created we need to build
335 # them now (in reverse order)
335 # them now (in reverse order)
336 reversed_dirnames = list(reversed(dirnames))
336 reversed_dirnames = list(reversed(dirnames))
337 curtree = objects.Tree()
337 curtree = objects.Tree()
338 curtree[node['node_path']] = node['mode'], blob.id
338 curtree[node['node_path']] = node['mode'], blob.id
339 new_trees.append(curtree)
339 new_trees.append(curtree)
340 for dirname in reversed_dirnames[:-1]:
340 for dirname in reversed_dirnames[:-1]:
341 newtree = objects.Tree()
341 newtree = objects.Tree()
342 newtree[dirname] = (DIR_STAT, curtree.id)
342 newtree[dirname] = (DIR_STAT, curtree.id)
343 new_trees.append(newtree)
343 new_trees.append(newtree)
344 curtree = newtree
344 curtree = newtree
345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 else:
346 else:
347 parent.add(
347 parent.add(
348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349
349
350 new_trees.append(parent)
350 new_trees.append(parent)
351 # Update ancestors
351 # Update ancestors
352 reversed_ancestors = reversed(
352 reversed_ancestors = reversed(
353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 for parent, tree, path in reversed_ancestors:
354 for parent, tree, path in reversed_ancestors:
355 parent[path] = (DIR_STAT, tree.id)
355 parent[path] = (DIR_STAT, tree.id)
356 object_store.add_object(tree)
356 object_store.add_object(tree)
357
357
358 object_store.add_object(blob)
358 object_store.add_object(blob)
359 for tree in new_trees:
359 for tree in new_trees:
360 object_store.add_object(tree)
360 object_store.add_object(tree)
361
361
362 for node_path in removed:
362 for node_path in removed:
363 paths = node_path.split('/')
363 paths = node_path.split('/')
364 tree = commit_tree
364 tree = commit_tree
365 trees = [tree]
365 trees = [tree]
366 # Traverse deep into the forest...
366 # Traverse deep into the forest...
367 for path in paths:
367 for path in paths:
368 try:
368 try:
369 obj = repo[tree[path][1]]
369 obj = repo[tree[path][1]]
370 if isinstance(obj, objects.Tree):
370 if isinstance(obj, objects.Tree):
371 trees.append(obj)
371 trees.append(obj)
372 tree = obj
372 tree = obj
373 except KeyError:
373 except KeyError:
374 break
374 break
375 # Cut down the blob and all rotten trees on the way back...
375 # Cut down the blob and all rotten trees on the way back...
376 for path, tree in reversed(zip(paths, trees)):
376 for path, tree in reversed(zip(paths, trees)):
377 del tree[path]
377 del tree[path]
378 if tree:
378 if tree:
379 # This tree still has elements - don't remove it or any
379 # This tree still has elements - don't remove it or any
380 # of it's parents
380 # of it's parents
381 break
381 break
382
382
383 object_store.add_object(commit_tree)
383 object_store.add_object(commit_tree)
384
384
385 # Create commit
385 # Create commit
386 commit = objects.Commit()
386 commit = objects.Commit()
387 commit.tree = commit_tree.id
387 commit.tree = commit_tree.id
388 for k, v in commit_data.iteritems():
388 for k, v in commit_data.iteritems():
389 setattr(commit, k, v)
389 setattr(commit, k, v)
390 object_store.add_object(commit)
390 object_store.add_object(commit)
391
391
392 ref = 'refs/heads/%s' % branch
392 ref = 'refs/heads/%s' % branch
393 repo.refs[ref] = commit.id
393 repo.refs[ref] = commit.id
394
394
395 return commit.id
395 return commit.id
396
396
397 @reraise_safe_exceptions
397 @reraise_safe_exceptions
398 def fetch(self, wire, url, apply_refs=True, refs=None):
398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 if url != 'default' and '://' not in url:
399 if url != 'default' and '://' not in url:
400 client = LocalGitClient(url)
400 client = LocalGitClient(url)
401 else:
401 else:
402 url_obj = url_parser(url)
402 url_obj = url_parser(url)
403 o = self._build_opener(url)
403 o = self._build_opener(url)
404 url, _ = url_obj.authinfo()
404 url, _ = url_obj.authinfo()
405 client = HttpGitClient(base_url=url, opener=o)
405 client = HttpGitClient(base_url=url, opener=o)
406 repo = self._factory.repo(wire)
406 repo = self._factory.repo(wire)
407
407
408 determine_wants = repo.object_store.determine_wants_all
408 determine_wants = repo.object_store.determine_wants_all
409 if refs:
409 if refs:
410 def determine_wants_requested(references):
410 def determine_wants_requested(references):
411 return [references[r] for r in references if r in refs]
411 return [references[r] for r in references if r in refs]
412 determine_wants = determine_wants_requested
412 determine_wants = determine_wants_requested
413
413
414 try:
414 try:
415 remote_refs = client.fetch(
415 remote_refs = client.fetch(
416 path=url, target=repo, determine_wants=determine_wants)
416 path=url, target=repo, determine_wants=determine_wants)
417 except NotGitRepository as e:
417 except NotGitRepository as e:
418 log.warning(
418 log.warning(
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 # Exception can contain unicode which we convert
420 # Exception can contain unicode which we convert
421 raise exceptions.AbortException(repr(e))
421 raise exceptions.AbortException(repr(e))
422
422
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # refs filtered by `determine_wants` function. We need to filter result
424 # refs filtered by `determine_wants` function. We need to filter result
425 # as well
425 # as well
426 if refs:
426 if refs:
427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428
428
429 if apply_refs:
429 if apply_refs:
430 # TODO: johbo: Needs proper test coverage with a git repository
430 # TODO: johbo: Needs proper test coverage with a git repository
431 # that contains a tag object, so that we would end up with
431 # that contains a tag object, so that we would end up with
432 # a peeled ref at this point.
432 # a peeled ref at this point.
433 PEELED_REF_MARKER = '^{}'
433 PEELED_REF_MARKER = '^{}'
434 for k in remote_refs:
434 for k in remote_refs:
435 if k.endswith(PEELED_REF_MARKER):
435 if k.endswith(PEELED_REF_MARKER):
436 log.info("Skipping peeled reference %s", k)
436 log.info("Skipping peeled reference %s", k)
437 continue
437 continue
438 repo[k] = remote_refs[k]
438 repo[k] = remote_refs[k]
439
439
440 if refs:
440 if refs:
441 # mikhail: explicitly set the head to the last ref.
441 # mikhail: explicitly set the head to the last ref.
442 repo['HEAD'] = remote_refs[refs[-1]]
442 repo['HEAD'] = remote_refs[refs[-1]]
443
443
444 # TODO: mikhail: should we return remote_refs here to be
444 # TODO: mikhail: should we return remote_refs here to be
445 # consistent?
445 # consistent?
446 else:
446 else:
447 return remote_refs
447 return remote_refs
448
448
449 @reraise_safe_exceptions
449 @reraise_safe_exceptions
450 def sync_push(self, wire, url, refs=None):
450 def sync_push(self, wire, url, refs=None):
451 if self.check_url(url, wire):
451 if self.check_url(url, wire):
452 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
453 self.run_git_command(
453 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 _copts=['-c', 'core.askpass=""'],
455 _copts=['-c', 'core.askpass=""'],
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457
457
458 @reraise_safe_exceptions
458 @reraise_safe_exceptions
459 def get_remote_refs(self, wire, url):
459 def get_remote_refs(self, wire, url):
460 repo = Repo(url)
460 repo = Repo(url)
461 return repo.get_refs()
461 return repo.get_refs()
462
462
463 @reraise_safe_exceptions
463 @reraise_safe_exceptions
464 def get_description(self, wire):
464 def get_description(self, wire):
465 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
466 return repo.get_description()
466 return repo.get_description()
467
467
468 @reraise_safe_exceptions
468 @reraise_safe_exceptions
469 def get_file_history(self, wire, file_path, commit_id, limit):
469 def get_file_history(self, wire, file_path, commit_id, limit):
470 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
471 include = [commit_id]
471 include = [commit_id]
472 paths = [file_path]
472 paths = [file_path]
473
473
474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 return [x.commit.id for x in walker]
475 return [x.commit.id for x in walker]
476
476
477 @reraise_safe_exceptions
477 @reraise_safe_exceptions
478 def get_missing_revs(self, wire, rev1, rev2, path2):
478 def get_missing_revs(self, wire, rev1, rev2, path2):
479 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
480 LocalGitClient(thin_packs=False).fetch(path2, repo)
480 LocalGitClient(thin_packs=False).fetch(path2, repo)
481
481
482 wire_remote = wire.copy()
482 wire_remote = wire.copy()
483 wire_remote['path'] = path2
483 wire_remote['path'] = path2
484 repo_remote = self._factory.repo(wire_remote)
484 repo_remote = self._factory.repo(wire_remote)
485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486
486
487 revs = [
487 revs = [
488 x.commit.id
488 x.commit.id
489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 return revs
490 return revs
491
491
492 @reraise_safe_exceptions
492 @reraise_safe_exceptions
493 def get_object(self, wire, sha):
493 def get_object(self, wire, sha):
494 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
495 obj = repo.get_object(sha)
495 obj = repo.get_object(sha)
496 commit_id = obj.id
496 commit_id = obj.id
497
497
498 if isinstance(obj, Tag):
498 if isinstance(obj, Tag):
499 commit_id = obj.object[1]
499 commit_id = obj.object[1]
500
500
501 return {
501 return {
502 'id': obj.id,
502 'id': obj.id,
503 'type': obj.type_name,
503 'type': obj.type_name,
504 'commit_id': commit_id
504 'commit_id': commit_id
505 }
505 }
506
506
507 @reraise_safe_exceptions
507 @reraise_safe_exceptions
508 def get_object_attrs(self, wire, sha, *attrs):
508 def get_object_attrs(self, wire, sha, *attrs):
509 repo = self._factory.repo(wire)
509 repo = self._factory.repo(wire)
510 obj = repo.get_object(sha)
510 obj = repo.get_object(sha)
511 return list(getattr(obj, a) for a in attrs)
511 return list(getattr(obj, a) for a in attrs)
512
512
513 @reraise_safe_exceptions
513 @reraise_safe_exceptions
514 def get_refs(self, wire):
514 def get_refs(self, wire):
515 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
516 result = {}
516 result = {}
517 for ref, sha in repo.refs.as_dict().items():
517 for ref, sha in repo.refs.as_dict().items():
518 peeled_sha = repo.get_peeled(ref)
518 peeled_sha = repo.get_peeled(ref)
519 result[ref] = peeled_sha
519 result[ref] = peeled_sha
520 return result
520 return result
521
521
522 @reraise_safe_exceptions
522 @reraise_safe_exceptions
523 def get_refs_path(self, wire):
523 def get_refs_path(self, wire):
524 repo = self._factory.repo(wire)
524 repo = self._factory.repo(wire)
525 return repo.refs.path
525 return repo.refs.path
526
526
527 @reraise_safe_exceptions
527 @reraise_safe_exceptions
528 def head(self, wire):
528 def head(self, wire):
529 repo = self._factory.repo(wire)
529 repo = self._factory.repo(wire)
530 return repo.head()
530 return repo.head()
531
531
532 @reraise_safe_exceptions
532 @reraise_safe_exceptions
533 def init(self, wire):
533 def init(self, wire):
534 repo_path = str_to_dulwich(wire['path'])
534 repo_path = str_to_dulwich(wire['path'])
535 self.repo = Repo.init(repo_path)
535 self.repo = Repo.init(repo_path)
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def init_bare(self, wire):
538 def init_bare(self, wire):
539 repo_path = str_to_dulwich(wire['path'])
539 repo_path = str_to_dulwich(wire['path'])
540 self.repo = Repo.init_bare(repo_path)
540 self.repo = Repo.init_bare(repo_path)
541
541
542 @reraise_safe_exceptions
542 @reraise_safe_exceptions
543 def revision(self, wire, rev):
543 def revision(self, wire, rev):
544 repo = self._factory.repo(wire)
544 repo = self._factory.repo(wire)
545 obj = repo[rev]
545 obj = repo[rev]
546 obj_data = {
546 obj_data = {
547 'id': obj.id,
547 'id': obj.id,
548 }
548 }
549 try:
549 try:
550 obj_data['tree'] = obj.tree
550 obj_data['tree'] = obj.tree
551 except AttributeError:
551 except AttributeError:
552 pass
552 pass
553 return obj_data
553 return obj_data
554
554
555 @reraise_safe_exceptions
555 @reraise_safe_exceptions
556 def commit_attribute(self, wire, rev, attr):
556 def commit_attribute(self, wire, rev, attr):
557 repo = self._factory.repo(wire)
557 repo = self._factory.repo(wire)
558 obj = repo[rev]
558 obj = repo[rev]
559 return getattr(obj, attr)
559 return getattr(obj, attr)
560
560
561 @reraise_safe_exceptions
561 @reraise_safe_exceptions
562 def set_refs(self, wire, key, value):
562 def set_refs(self, wire, key, value):
563 repo = self._factory.repo(wire)
563 repo = self._factory.repo(wire)
564 repo.refs[key] = value
564 repo.refs[key] = value
565
565
566 @reraise_safe_exceptions
566 @reraise_safe_exceptions
567 def remove_ref(self, wire, key):
567 def remove_ref(self, wire, key):
568 repo = self._factory.repo(wire)
568 repo = self._factory.repo(wire)
569 del repo.refs[key]
569 del repo.refs[key]
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def tree_changes(self, wire, source_id, target_id):
572 def tree_changes(self, wire, source_id, target_id):
573 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
574 source = repo[source_id].tree if source_id else None
574 source = repo[source_id].tree if source_id else None
575 target = repo[target_id].tree
575 target = repo[target_id].tree
576 result = repo.object_store.tree_changes(source, target)
576 result = repo.object_store.tree_changes(source, target)
577 return list(result)
577 return list(result)
578
578
579 @reraise_safe_exceptions
579 @reraise_safe_exceptions
580 def tree_items(self, wire, tree_id):
580 def tree_items(self, wire, tree_id):
581 repo = self._factory.repo(wire)
581 repo = self._factory.repo(wire)
582 tree = repo[tree_id]
582 tree = repo[tree_id]
583
583
584 result = []
584 result = []
585 for item in tree.iteritems():
585 for item in tree.iteritems():
586 item_sha = item.sha
586 item_sha = item.sha
587 item_mode = item.mode
587 item_mode = item.mode
588
588
589 if FILE_MODE(item_mode) == GIT_LINK:
589 if FILE_MODE(item_mode) == GIT_LINK:
590 item_type = "link"
590 item_type = "link"
591 else:
591 else:
592 item_type = repo[item_sha].type_name
592 item_type = repo[item_sha].type_name
593
593
594 result.append((item.path, item_mode, item_sha, item_type))
594 result.append((item.path, item_mode, item_sha, item_type))
595 return result
595 return result
596
596
597 @reraise_safe_exceptions
597 @reraise_safe_exceptions
598 def update_server_info(self, wire):
598 def update_server_info(self, wire):
599 repo = self._factory.repo(wire)
599 repo = self._factory.repo(wire)
600 update_server_info(repo)
600 update_server_info(repo)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def discover_git_version(self):
603 def discover_git_version(self):
604 stdout, _ = self.run_git_command(
604 stdout, _ = self.run_git_command(
605 {}, ['--version'], _bare=True, _safe=True)
605 {}, ['--version'], _bare=True, _safe=True)
606 prefix = 'git version'
606 prefix = 'git version'
607 if stdout.startswith(prefix):
607 if stdout.startswith(prefix):
608 stdout = stdout[len(prefix):]
608 stdout = stdout[len(prefix):]
609 return stdout.strip()
609 return stdout.strip()
610
610
611 @reraise_safe_exceptions
611 @reraise_safe_exceptions
612 def run_git_command(self, wire, cmd, **opts):
612 def run_git_command(self, wire, cmd, **opts):
613 path = wire.get('path', None)
613 path = wire.get('path', None)
614
614
615 if path and os.path.isdir(path):
615 if path and os.path.isdir(path):
616 opts['cwd'] = path
616 opts['cwd'] = path
617
617
618 if '_bare' in opts:
618 if '_bare' in opts:
619 _copts = []
619 _copts = []
620 del opts['_bare']
620 del opts['_bare']
621 else:
621 else:
622 _copts = ['-c', 'core.quotepath=false', ]
622 _copts = ['-c', 'core.quotepath=false', ]
623 safe_call = False
623 safe_call = False
624 if '_safe' in opts:
624 if '_safe' in opts:
625 # no exc on failure
625 # no exc on failure
626 del opts['_safe']
626 del opts['_safe']
627 safe_call = True
627 safe_call = True
628
628
629 if '_copts' in opts:
629 if '_copts' in opts:
630 _copts.extend(opts['_copts'] or [])
630 _copts.extend(opts['_copts'] or [])
631 del opts['_copts']
631 del opts['_copts']
632
632
633 gitenv = os.environ.copy()
633 gitenv = os.environ.copy()
634 gitenv.update(opts.pop('extra_env', {}))
634 gitenv.update(opts.pop('extra_env', {}))
635 # need to clean fix GIT_DIR !
635 # need to clean fix GIT_DIR !
636 if 'GIT_DIR' in gitenv:
636 if 'GIT_DIR' in gitenv:
637 del gitenv['GIT_DIR']
637 del gitenv['GIT_DIR']
638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639
639
640 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
640 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
641
641
642 try:
642 try:
643 _opts = {'env': gitenv, 'shell': False}
643 _opts = {'env': gitenv, 'shell': False}
644 _opts.update(opts)
644 _opts.update(opts)
645 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
645 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
646
646
647 return ''.join(p), ''.join(p.error)
647 return ''.join(p), ''.join(p.error)
648 except (EnvironmentError, OSError) as err:
648 except (EnvironmentError, OSError) as err:
649 cmd = ' '.join(cmd) # human friendly CMD
649 cmd = ' '.join(cmd) # human friendly CMD
650 tb_err = ("Couldn't run git command (%s).\n"
650 tb_err = ("Couldn't run git command (%s).\n"
651 "Original error was:%s\n" % (cmd, err))
651 "Original error was:%s\n" % (cmd, err))
652 log.exception(tb_err)
652 log.exception(tb_err)
653 if safe_call:
653 if safe_call:
654 return '', err
654 return '', err
655 else:
655 else:
656 raise exceptions.VcsException(tb_err)
656 raise exceptions.VcsException(tb_err)
657
657
658 @reraise_safe_exceptions
659 def install_hooks(self, wire, force=False):
660 from vcsserver.hook_utils import install_git_hooks
661 repo = self._factory.repo(wire)
662 return install_git_hooks(repo.path, repo.bare, force_create=force)
663
658
664
659 def str_to_dulwich(value):
665 def str_to_dulwich(value):
660 """
666 """
661 Dulwich 0.10.1a requires `unicode` objects to be passed in.
667 Dulwich 0.10.1a requires `unicode` objects to be passed in.
662 """
668 """
663 return value.decode(settings.WIRE_ENCODING)
669 return value.decode(settings.WIRE_ENCODING)
@@ -1,771 +1,776 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
60
60
61 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
64 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
68
68
69 return baseui
69 return baseui
70
70
71
71
72 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired):
78 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException)
79 except RepoLookupError:
79 except RepoLookupError:
80 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException)
81 except RequirementError:
81 except RequirementError:
82 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException)
83 except RepoError:
83 except RepoError:
84 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException)
85 except LookupError:
85 except LookupError:
86 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException)
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException)
91 raise
91 raise
92 return wrapper
92 return wrapper
93
93
94
94
95 class MercurialFactory(RepoFactory):
95 class MercurialFactory(RepoFactory):
96
96
97 def _create_config(self, config, hooks=True):
97 def _create_config(self, config, hooks=True):
98 if not hooks:
98 if not hooks:
99 hooks_to_clean = frozenset((
99 hooks_to_clean = frozenset((
100 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 new_config = []
102 new_config = []
103 for section, option, value in config:
103 for section, option, value in config:
104 if section == 'hooks' and option in hooks_to_clean:
104 if section == 'hooks' and option in hooks_to_clean:
105 continue
105 continue
106 new_config.append((section, option, value))
106 new_config.append((section, option, value))
107 config = new_config
107 config = new_config
108
108
109 baseui = make_ui_from_config(config)
109 baseui = make_ui_from_config(config)
110 return baseui
110 return baseui
111
111
112 def _create_repo(self, wire, create):
112 def _create_repo(self, wire, create):
113 baseui = self._create_config(wire["config"])
113 baseui = self._create_config(wire["config"])
114 return localrepository(baseui, wire["path"], create)
114 return localrepository(baseui, wire["path"], create)
115
115
116
116
117 class HgRemote(object):
117 class HgRemote(object):
118
118
119 def __init__(self, factory):
119 def __init__(self, factory):
120 self._factory = factory
120 self._factory = factory
121
121
122 self._bulk_methods = {
122 self._bulk_methods = {
123 "affected_files": self.ctx_files,
123 "affected_files": self.ctx_files,
124 "author": self.ctx_user,
124 "author": self.ctx_user,
125 "branch": self.ctx_branch,
125 "branch": self.ctx_branch,
126 "children": self.ctx_children,
126 "children": self.ctx_children,
127 "date": self.ctx_date,
127 "date": self.ctx_date,
128 "message": self.ctx_description,
128 "message": self.ctx_description,
129 "parents": self.ctx_parents,
129 "parents": self.ctx_parents,
130 "status": self.ctx_status,
130 "status": self.ctx_status,
131 "obsolete": self.ctx_obsolete,
131 "obsolete": self.ctx_obsolete,
132 "phase": self.ctx_phase,
132 "phase": self.ctx_phase,
133 "hidden": self.ctx_hidden,
133 "hidden": self.ctx_hidden,
134 "_file_paths": self.ctx_list,
134 "_file_paths": self.ctx_list,
135 }
135 }
136
136
137 @reraise_safe_exceptions
137 @reraise_safe_exceptions
138 def discover_hg_version(self):
138 def discover_hg_version(self):
139 from mercurial import util
139 from mercurial import util
140 return util.version()
140 return util.version()
141
141
142 @reraise_safe_exceptions
142 @reraise_safe_exceptions
143 def archive_repo(self, archive_path, mtime, file_info, kind):
143 def archive_repo(self, archive_path, mtime, file_info, kind):
144 if kind == "tgz":
144 if kind == "tgz":
145 archiver = archival.tarit(archive_path, mtime, "gz")
145 archiver = archival.tarit(archive_path, mtime, "gz")
146 elif kind == "tbz2":
146 elif kind == "tbz2":
147 archiver = archival.tarit(archive_path, mtime, "bz2")
147 archiver = archival.tarit(archive_path, mtime, "bz2")
148 elif kind == 'zip':
148 elif kind == 'zip':
149 archiver = archival.zipit(archive_path, mtime)
149 archiver = archival.zipit(archive_path, mtime)
150 else:
150 else:
151 raise exceptions.ArchiveException(
151 raise exceptions.ArchiveException(
152 'Remote does not support: "%s".' % kind)
152 'Remote does not support: "%s".' % kind)
153
153
154 for f_path, f_mode, f_is_link, f_content in file_info:
154 for f_path, f_mode, f_is_link, f_content in file_info:
155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 archiver.done()
156 archiver.done()
157
157
158 @reraise_safe_exceptions
158 @reraise_safe_exceptions
159 def bookmarks(self, wire):
159 def bookmarks(self, wire):
160 repo = self._factory.repo(wire)
160 repo = self._factory.repo(wire)
161 return dict(repo._bookmarks)
161 return dict(repo._bookmarks)
162
162
163 @reraise_safe_exceptions
163 @reraise_safe_exceptions
164 def branches(self, wire, normal, closed):
164 def branches(self, wire, normal, closed):
165 repo = self._factory.repo(wire)
165 repo = self._factory.repo(wire)
166 iter_branches = repo.branchmap().iterbranches()
166 iter_branches = repo.branchmap().iterbranches()
167 bt = {}
167 bt = {}
168 for branch_name, _heads, tip, is_closed in iter_branches:
168 for branch_name, _heads, tip, is_closed in iter_branches:
169 if normal and not is_closed:
169 if normal and not is_closed:
170 bt[branch_name] = tip
170 bt[branch_name] = tip
171 if closed and is_closed:
171 if closed and is_closed:
172 bt[branch_name] = tip
172 bt[branch_name] = tip
173
173
174 return bt
174 return bt
175
175
176 @reraise_safe_exceptions
176 @reraise_safe_exceptions
177 def bulk_request(self, wire, rev, pre_load):
177 def bulk_request(self, wire, rev, pre_load):
178 result = {}
178 result = {}
179 for attr in pre_load:
179 for attr in pre_load:
180 try:
180 try:
181 method = self._bulk_methods[attr]
181 method = self._bulk_methods[attr]
182 result[attr] = method(wire, rev)
182 result[attr] = method(wire, rev)
183 except KeyError:
183 except KeyError:
184 raise exceptions.VcsException(
184 raise exceptions.VcsException(
185 'Unknown bulk attribute: "%s"' % attr)
185 'Unknown bulk attribute: "%s"' % attr)
186 return result
186 return result
187
187
188 @reraise_safe_exceptions
188 @reraise_safe_exceptions
189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 clone(baseui, source, dest, noupdate=not update_after_clone)
191 clone(baseui, source, dest, noupdate=not update_after_clone)
192
192
193 @reraise_safe_exceptions
193 @reraise_safe_exceptions
194 def commitctx(
194 def commitctx(
195 self, wire, message, parents, commit_time, commit_timezone,
195 self, wire, message, parents, commit_time, commit_timezone,
196 user, files, extra, removed, updated):
196 user, files, extra, removed, updated):
197
197
198 def _filectxfn(_repo, memctx, path):
198 def _filectxfn(_repo, memctx, path):
199 """
199 """
200 Marks given path as added/changed/removed in a given _repo. This is
200 Marks given path as added/changed/removed in a given _repo. This is
201 for internal mercurial commit function.
201 for internal mercurial commit function.
202 """
202 """
203
203
204 # check if this path is removed
204 # check if this path is removed
205 if path in removed:
205 if path in removed:
206 # returning None is a way to mark node for removal
206 # returning None is a way to mark node for removal
207 return None
207 return None
208
208
209 # check if this path is added
209 # check if this path is added
210 for node in updated:
210 for node in updated:
211 if node['path'] == path:
211 if node['path'] == path:
212 return memfilectx(
212 return memfilectx(
213 _repo,
213 _repo,
214 path=node['path'],
214 path=node['path'],
215 data=node['content'],
215 data=node['content'],
216 islink=False,
216 islink=False,
217 isexec=bool(node['mode'] & stat.S_IXUSR),
217 isexec=bool(node['mode'] & stat.S_IXUSR),
218 copied=False,
218 copied=False,
219 memctx=memctx)
219 memctx=memctx)
220
220
221 raise exceptions.AbortException(
221 raise exceptions.AbortException(
222 "Given path haven't been marked as added, "
222 "Given path haven't been marked as added, "
223 "changed or removed (%s)" % path)
223 "changed or removed (%s)" % path)
224
224
225 repo = self._factory.repo(wire)
225 repo = self._factory.repo(wire)
226
226
227 commit_ctx = memctx(
227 commit_ctx = memctx(
228 repo=repo,
228 repo=repo,
229 parents=parents,
229 parents=parents,
230 text=message,
230 text=message,
231 files=files,
231 files=files,
232 filectxfn=_filectxfn,
232 filectxfn=_filectxfn,
233 user=user,
233 user=user,
234 date=(commit_time, commit_timezone),
234 date=(commit_time, commit_timezone),
235 extra=extra)
235 extra=extra)
236
236
237 n = repo.commitctx(commit_ctx)
237 n = repo.commitctx(commit_ctx)
238 new_id = hex(n)
238 new_id = hex(n)
239
239
240 return new_id
240 return new_id
241
241
242 @reraise_safe_exceptions
242 @reraise_safe_exceptions
243 def ctx_branch(self, wire, revision):
243 def ctx_branch(self, wire, revision):
244 repo = self._factory.repo(wire)
244 repo = self._factory.repo(wire)
245 ctx = repo[revision]
245 ctx = repo[revision]
246 return ctx.branch()
246 return ctx.branch()
247
247
248 @reraise_safe_exceptions
248 @reraise_safe_exceptions
249 def ctx_children(self, wire, revision):
249 def ctx_children(self, wire, revision):
250 repo = self._factory.repo(wire)
250 repo = self._factory.repo(wire)
251 ctx = repo[revision]
251 ctx = repo[revision]
252 return [child.rev() for child in ctx.children()]
252 return [child.rev() for child in ctx.children()]
253
253
254 @reraise_safe_exceptions
254 @reraise_safe_exceptions
255 def ctx_date(self, wire, revision):
255 def ctx_date(self, wire, revision):
256 repo = self._factory.repo(wire)
256 repo = self._factory.repo(wire)
257 ctx = repo[revision]
257 ctx = repo[revision]
258 return ctx.date()
258 return ctx.date()
259
259
260 @reraise_safe_exceptions
260 @reraise_safe_exceptions
261 def ctx_description(self, wire, revision):
261 def ctx_description(self, wire, revision):
262 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
263 ctx = repo[revision]
263 ctx = repo[revision]
264 return ctx.description()
264 return ctx.description()
265
265
266 @reraise_safe_exceptions
266 @reraise_safe_exceptions
267 def ctx_diff(
267 def ctx_diff(
268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
270 ctx = repo[revision]
270 ctx = repo[revision]
271 result = ctx.diff(
271 result = ctx.diff(
272 git=git, ignore_whitespace=ignore_whitespace, context=context)
272 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 return list(result)
273 return list(result)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def ctx_files(self, wire, revision):
276 def ctx_files(self, wire, revision):
277 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
278 ctx = repo[revision]
278 ctx = repo[revision]
279 return ctx.files()
279 return ctx.files()
280
280
281 @reraise_safe_exceptions
281 @reraise_safe_exceptions
282 def ctx_list(self, path, revision):
282 def ctx_list(self, path, revision):
283 repo = self._factory.repo(path)
283 repo = self._factory.repo(path)
284 ctx = repo[revision]
284 ctx = repo[revision]
285 return list(ctx)
285 return list(ctx)
286
286
287 @reraise_safe_exceptions
287 @reraise_safe_exceptions
288 def ctx_parents(self, wire, revision):
288 def ctx_parents(self, wire, revision):
289 repo = self._factory.repo(wire)
289 repo = self._factory.repo(wire)
290 ctx = repo[revision]
290 ctx = repo[revision]
291 return [parent.rev() for parent in ctx.parents()]
291 return [parent.rev() for parent in ctx.parents()]
292
292
293 @reraise_safe_exceptions
293 @reraise_safe_exceptions
294 def ctx_phase(self, wire, revision):
294 def ctx_phase(self, wire, revision):
295 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
296 ctx = repo[revision]
296 ctx = repo[revision]
297 # public=0, draft=1, secret=3
297 # public=0, draft=1, secret=3
298 return ctx.phase()
298 return ctx.phase()
299
299
300 @reraise_safe_exceptions
300 @reraise_safe_exceptions
301 def ctx_obsolete(self, wire, revision):
301 def ctx_obsolete(self, wire, revision):
302 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
303 ctx = repo[revision]
303 ctx = repo[revision]
304 return ctx.obsolete()
304 return ctx.obsolete()
305
305
306 @reraise_safe_exceptions
306 @reraise_safe_exceptions
307 def ctx_hidden(self, wire, revision):
307 def ctx_hidden(self, wire, revision):
308 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
309 ctx = repo[revision]
309 ctx = repo[revision]
310 return ctx.hidden()
310 return ctx.hidden()
311
311
312 @reraise_safe_exceptions
312 @reraise_safe_exceptions
313 def ctx_substate(self, wire, revision):
313 def ctx_substate(self, wire, revision):
314 repo = self._factory.repo(wire)
314 repo = self._factory.repo(wire)
315 ctx = repo[revision]
315 ctx = repo[revision]
316 return ctx.substate
316 return ctx.substate
317
317
318 @reraise_safe_exceptions
318 @reraise_safe_exceptions
319 def ctx_status(self, wire, revision):
319 def ctx_status(self, wire, revision):
320 repo = self._factory.repo(wire)
320 repo = self._factory.repo(wire)
321 ctx = repo[revision]
321 ctx = repo[revision]
322 status = repo[ctx.p1().node()].status(other=ctx.node())
322 status = repo[ctx.p1().node()].status(other=ctx.node())
323 # object of status (odd, custom named tuple in mercurial) is not
323 # object of status (odd, custom named tuple in mercurial) is not
324 # correctly serializable, we make it a list, as the underling
324 # correctly serializable, we make it a list, as the underling
325 # API expects this to be a list
325 # API expects this to be a list
326 return list(status)
326 return list(status)
327
327
328 @reraise_safe_exceptions
328 @reraise_safe_exceptions
329 def ctx_user(self, wire, revision):
329 def ctx_user(self, wire, revision):
330 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
331 ctx = repo[revision]
331 ctx = repo[revision]
332 return ctx.user()
332 return ctx.user()
333
333
334 @reraise_safe_exceptions
334 @reraise_safe_exceptions
335 def check_url(self, url, config):
335 def check_url(self, url, config):
336 _proto = None
336 _proto = None
337 if '+' in url[:url.find('://')]:
337 if '+' in url[:url.find('://')]:
338 _proto = url[0:url.find('+')]
338 _proto = url[0:url.find('+')]
339 url = url[url.find('+') + 1:]
339 url = url[url.find('+') + 1:]
340 handlers = []
340 handlers = []
341 url_obj = url_parser(url)
341 url_obj = url_parser(url)
342 test_uri, authinfo = url_obj.authinfo()
342 test_uri, authinfo = url_obj.authinfo()
343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 url_obj.query = obfuscate_qs(url_obj.query)
344 url_obj.query = obfuscate_qs(url_obj.query)
345
345
346 cleaned_uri = str(url_obj)
346 cleaned_uri = str(url_obj)
347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348
348
349 if authinfo:
349 if authinfo:
350 # create a password manager
350 # create a password manager
351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 passmgr.add_password(*authinfo)
352 passmgr.add_password(*authinfo)
353
353
354 handlers.extend((httpbasicauthhandler(passmgr),
354 handlers.extend((httpbasicauthhandler(passmgr),
355 httpdigestauthhandler(passmgr)))
355 httpdigestauthhandler(passmgr)))
356
356
357 o = urllib2.build_opener(*handlers)
357 o = urllib2.build_opener(*handlers)
358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 ('Accept', 'application/mercurial-0.1')]
359 ('Accept', 'application/mercurial-0.1')]
360
360
361 q = {"cmd": 'between'}
361 q = {"cmd": 'between'}
362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 qs = '?%s' % urllib.urlencode(q)
363 qs = '?%s' % urllib.urlencode(q)
364 cu = "%s%s" % (test_uri, qs)
364 cu = "%s%s" % (test_uri, qs)
365 req = urllib2.Request(cu, None, {})
365 req = urllib2.Request(cu, None, {})
366
366
367 try:
367 try:
368 log.debug("Trying to open URL %s", cleaned_uri)
368 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
369 resp = o.open(req)
370 if resp.code != 200:
370 if resp.code != 200:
371 raise exceptions.URLError('Return Code is not 200')
371 raise exceptions.URLError('Return Code is not 200')
372 except Exception as e:
372 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
374 # means it cannot be cloned
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376
376
377 # now check if it's a proper hg repo, but don't do it for svn
377 # now check if it's a proper hg repo, but don't do it for svn
378 try:
378 try:
379 if _proto == 'svn':
379 if _proto == 'svn':
380 pass
380 pass
381 else:
381 else:
382 # check for pure hg repos
382 # check for pure hg repos
383 log.debug(
383 log.debug(
384 "Verifying if URL is a Mercurial repository: %s",
384 "Verifying if URL is a Mercurial repository: %s",
385 cleaned_uri)
385 cleaned_uri)
386 httppeer(make_ui_from_config(config), url).lookup('tip')
386 httppeer(make_ui_from_config(config), url).lookup('tip')
387 except Exception as e:
387 except Exception as e:
388 log.warning("URL is not a valid Mercurial repository: %s",
388 log.warning("URL is not a valid Mercurial repository: %s",
389 cleaned_uri)
389 cleaned_uri)
390 raise exceptions.URLError(
390 raise exceptions.URLError(
391 "url [%s] does not look like an hg repo org_exc: %s"
391 "url [%s] does not look like an hg repo org_exc: %s"
392 % (cleaned_uri, e))
392 % (cleaned_uri, e))
393
393
394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 return True
395 return True
396
396
397 @reraise_safe_exceptions
397 @reraise_safe_exceptions
398 def diff(
398 def diff(
399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 context):
400 context):
401 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
402
402
403 if file_filter:
403 if file_filter:
404 match_filter = match(file_filter[0], '', [file_filter[1]])
404 match_filter = match(file_filter[0], '', [file_filter[1]])
405 else:
405 else:
406 match_filter = file_filter
406 match_filter = file_filter
407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408
408
409 try:
409 try:
410 return "".join(patch.diff(
410 return "".join(patch.diff(
411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 except RepoLookupError:
412 except RepoLookupError:
413 raise exceptions.LookupException()
413 raise exceptions.LookupException()
414
414
415 @reraise_safe_exceptions
415 @reraise_safe_exceptions
416 def file_history(self, wire, revision, path, limit):
416 def file_history(self, wire, revision, path, limit):
417 repo = self._factory.repo(wire)
417 repo = self._factory.repo(wire)
418
418
419 ctx = repo[revision]
419 ctx = repo[revision]
420 fctx = ctx.filectx(path)
420 fctx = ctx.filectx(path)
421
421
422 def history_iter():
422 def history_iter():
423 limit_rev = fctx.rev()
423 limit_rev = fctx.rev()
424 for obj in reversed(list(fctx.filelog())):
424 for obj in reversed(list(fctx.filelog())):
425 obj = fctx.filectx(obj)
425 obj = fctx.filectx(obj)
426 if limit_rev >= obj.rev():
426 if limit_rev >= obj.rev():
427 yield obj
427 yield obj
428
428
429 history = []
429 history = []
430 for cnt, obj in enumerate(history_iter()):
430 for cnt, obj in enumerate(history_iter()):
431 if limit and cnt >= limit:
431 if limit and cnt >= limit:
432 break
432 break
433 history.append(hex(obj.node()))
433 history.append(hex(obj.node()))
434
434
435 return [x for x in history]
435 return [x for x in history]
436
436
437 @reraise_safe_exceptions
437 @reraise_safe_exceptions
438 def file_history_untill(self, wire, revision, path, limit):
438 def file_history_untill(self, wire, revision, path, limit):
439 repo = self._factory.repo(wire)
439 repo = self._factory.repo(wire)
440 ctx = repo[revision]
440 ctx = repo[revision]
441 fctx = ctx.filectx(path)
441 fctx = ctx.filectx(path)
442
442
443 file_log = list(fctx.filelog())
443 file_log = list(fctx.filelog())
444 if limit:
444 if limit:
445 # Limit to the last n items
445 # Limit to the last n items
446 file_log = file_log[-limit:]
446 file_log = file_log[-limit:]
447
447
448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def fctx_annotate(self, wire, revision, path):
451 def fctx_annotate(self, wire, revision, path):
452 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
453 ctx = repo[revision]
453 ctx = repo[revision]
454 fctx = ctx.filectx(path)
454 fctx = ctx.filectx(path)
455
455
456 result = []
456 result = []
457 for i, (a_line, content) in enumerate(fctx.annotate()):
457 for i, (a_line, content) in enumerate(fctx.annotate()):
458 ln_no = i + 1
458 ln_no = i + 1
459 sha = hex(a_line.fctx.node())
459 sha = hex(a_line.fctx.node())
460 result.append((ln_no, sha, content))
460 result.append((ln_no, sha, content))
461 return result
461 return result
462
462
463 @reraise_safe_exceptions
463 @reraise_safe_exceptions
464 def fctx_data(self, wire, revision, path):
464 def fctx_data(self, wire, revision, path):
465 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
466 ctx = repo[revision]
466 ctx = repo[revision]
467 fctx = ctx.filectx(path)
467 fctx = ctx.filectx(path)
468 return fctx.data()
468 return fctx.data()
469
469
470 @reraise_safe_exceptions
470 @reraise_safe_exceptions
471 def fctx_flags(self, wire, revision, path):
471 def fctx_flags(self, wire, revision, path):
472 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
473 ctx = repo[revision]
473 ctx = repo[revision]
474 fctx = ctx.filectx(path)
474 fctx = ctx.filectx(path)
475 return fctx.flags()
475 return fctx.flags()
476
476
477 @reraise_safe_exceptions
477 @reraise_safe_exceptions
478 def fctx_size(self, wire, revision, path):
478 def fctx_size(self, wire, revision, path):
479 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
480 ctx = repo[revision]
480 ctx = repo[revision]
481 fctx = ctx.filectx(path)
481 fctx = ctx.filectx(path)
482 return fctx.size()
482 return fctx.size()
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def get_all_commit_ids(self, wire, name):
485 def get_all_commit_ids(self, wire, name):
486 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
487 revs = repo.filtered(name).changelog.index
487 revs = repo.filtered(name).changelog.index
488 return map(lambda x: hex(x[7]), revs)[:-1]
488 return map(lambda x: hex(x[7]), revs)[:-1]
489
489
490 @reraise_safe_exceptions
490 @reraise_safe_exceptions
491 def get_config_value(self, wire, section, name, untrusted=False):
491 def get_config_value(self, wire, section, name, untrusted=False):
492 repo = self._factory.repo(wire)
492 repo = self._factory.repo(wire)
493 return repo.ui.config(section, name, untrusted=untrusted)
493 return repo.ui.config(section, name, untrusted=untrusted)
494
494
495 @reraise_safe_exceptions
495 @reraise_safe_exceptions
496 def get_config_bool(self, wire, section, name, untrusted=False):
496 def get_config_bool(self, wire, section, name, untrusted=False):
497 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
499
499
500 @reraise_safe_exceptions
500 @reraise_safe_exceptions
501 def get_config_list(self, wire, section, name, untrusted=False):
501 def get_config_list(self, wire, section, name, untrusted=False):
502 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
504
504
505 @reraise_safe_exceptions
505 @reraise_safe_exceptions
506 def is_large_file(self, wire, path):
506 def is_large_file(self, wire, path):
507 return largefiles.lfutil.isstandin(path)
507 return largefiles.lfutil.isstandin(path)
508
508
509 @reraise_safe_exceptions
509 @reraise_safe_exceptions
510 def in_largefiles_store(self, wire, sha):
510 def in_largefiles_store(self, wire, sha):
511 repo = self._factory.repo(wire)
511 repo = self._factory.repo(wire)
512 return largefiles.lfutil.instore(repo, sha)
512 return largefiles.lfutil.instore(repo, sha)
513
513
514 @reraise_safe_exceptions
514 @reraise_safe_exceptions
515 def in_user_cache(self, wire, sha):
515 def in_user_cache(self, wire, sha):
516 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
518
518
519 @reraise_safe_exceptions
519 @reraise_safe_exceptions
520 def store_path(self, wire, sha):
520 def store_path(self, wire, sha):
521 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
522 return largefiles.lfutil.storepath(repo, sha)
522 return largefiles.lfutil.storepath(repo, sha)
523
523
524 @reraise_safe_exceptions
524 @reraise_safe_exceptions
525 def link(self, wire, sha, path):
525 def link(self, wire, sha, path):
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 largefiles.lfutil.link(
527 largefiles.lfutil.link(
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529
529
530 @reraise_safe_exceptions
530 @reraise_safe_exceptions
531 def localrepository(self, wire, create=False):
531 def localrepository(self, wire, create=False):
532 self._factory.repo(wire, create=create)
532 self._factory.repo(wire, create=create)
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def lookup(self, wire, revision, both):
535 def lookup(self, wire, revision, both):
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 if isinstance(revision, float):
537 if isinstance(revision, float):
538 revision = long(revision)
538 revision = long(revision)
539 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
540 try:
540 try:
541 ctx = repo[revision]
541 ctx = repo[revision]
542 except RepoLookupError:
542 except RepoLookupError:
543 raise exceptions.LookupException(revision)
543 raise exceptions.LookupException(revision)
544 except LookupError as e:
544 except LookupError as e:
545 raise exceptions.LookupException(e.name)
545 raise exceptions.LookupException(e.name)
546
546
547 if not both:
547 if not both:
548 return ctx.hex()
548 return ctx.hex()
549
549
550 ctx = repo[ctx.hex()]
550 ctx = repo[ctx.hex()]
551 return ctx.hex(), ctx.rev()
551 return ctx.hex(), ctx.rev()
552
552
553 @reraise_safe_exceptions
553 @reraise_safe_exceptions
554 def pull(self, wire, url, commit_ids=None):
554 def pull(self, wire, url, commit_ids=None):
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556 # Disable any prompts for this repo
556 # Disable any prompts for this repo
557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558
558
559 remote = peer(repo, {}, url)
559 remote = peer(repo, {}, url)
560 # Disable any prompts for this remote
560 # Disable any prompts for this remote
561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562
562
563 if commit_ids:
563 if commit_ids:
564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
565
565
566 return exchange.pull(
566 return exchange.pull(
567 repo, remote, heads=commit_ids, force=None).cgresult
567 repo, remote, heads=commit_ids, force=None).cgresult
568
568
569 @reraise_safe_exceptions
569 @reraise_safe_exceptions
570 def sync_push(self, wire, url):
570 def sync_push(self, wire, url):
571 if self.check_url(url, wire['config']):
571 if self.check_url(url, wire['config']):
572 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
573
573
574 # Disable any prompts for this repo
574 # Disable any prompts for this repo
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576
576
577 bookmarks = dict(repo._bookmarks).keys()
577 bookmarks = dict(repo._bookmarks).keys()
578 remote = peer(repo, {}, url)
578 remote = peer(repo, {}, url)
579 # Disable any prompts for this remote
579 # Disable any prompts for this remote
580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581
581
582 return exchange.push(
582 return exchange.push(
583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
584
584
585 @reraise_safe_exceptions
585 @reraise_safe_exceptions
586 def revision(self, wire, rev):
586 def revision(self, wire, rev):
587 repo = self._factory.repo(wire)
587 repo = self._factory.repo(wire)
588 ctx = repo[rev]
588 ctx = repo[rev]
589 return ctx.rev()
589 return ctx.rev()
590
590
591 @reraise_safe_exceptions
591 @reraise_safe_exceptions
592 def rev_range(self, wire, filter):
592 def rev_range(self, wire, filter):
593 repo = self._factory.repo(wire)
593 repo = self._factory.repo(wire)
594 revisions = [rev for rev in revrange(repo, filter)]
594 revisions = [rev for rev in revrange(repo, filter)]
595 return revisions
595 return revisions
596
596
597 @reraise_safe_exceptions
597 @reraise_safe_exceptions
598 def rev_range_hash(self, wire, node):
598 def rev_range_hash(self, wire, node):
599 repo = self._factory.repo(wire)
599 repo = self._factory.repo(wire)
600
600
601 def get_revs(repo, rev_opt):
601 def get_revs(repo, rev_opt):
602 if rev_opt:
602 if rev_opt:
603 revs = revrange(repo, rev_opt)
603 revs = revrange(repo, rev_opt)
604 if len(revs) == 0:
604 if len(revs) == 0:
605 return (nullrev, nullrev)
605 return (nullrev, nullrev)
606 return max(revs), min(revs)
606 return max(revs), min(revs)
607 else:
607 else:
608 return len(repo) - 1, 0
608 return len(repo) - 1, 0
609
609
610 stop, start = get_revs(repo, [node + ':'])
610 stop, start = get_revs(repo, [node + ':'])
611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
612 return revs
612 return revs
613
613
614 @reraise_safe_exceptions
614 @reraise_safe_exceptions
615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
616 other_path = kwargs.pop('other_path', None)
616 other_path = kwargs.pop('other_path', None)
617
617
618 # case when we want to compare two independent repositories
618 # case when we want to compare two independent repositories
619 if other_path and other_path != wire["path"]:
619 if other_path and other_path != wire["path"]:
620 baseui = self._factory._create_config(wire["config"])
620 baseui = self._factory._create_config(wire["config"])
621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
622 else:
622 else:
623 repo = self._factory.repo(wire)
623 repo = self._factory.repo(wire)
624 return list(repo.revs(rev_spec, *args))
624 return list(repo.revs(rev_spec, *args))
625
625
626 @reraise_safe_exceptions
626 @reraise_safe_exceptions
627 def strip(self, wire, revision, update, backup):
627 def strip(self, wire, revision, update, backup):
628 repo = self._factory.repo(wire)
628 repo = self._factory.repo(wire)
629 ctx = repo[revision]
629 ctx = repo[revision]
630 hgext_strip(
630 hgext_strip(
631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
632
632
633 @reraise_safe_exceptions
633 @reraise_safe_exceptions
634 def verify(self, wire,):
634 def verify(self, wire,):
635 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
636 baseui = self._factory._create_config(wire['config'])
636 baseui = self._factory._create_config(wire['config'])
637 baseui.setconfig('ui', 'quiet', 'false')
637 baseui.setconfig('ui', 'quiet', 'false')
638 output = io.BytesIO()
638 output = io.BytesIO()
639
639
640 def write(data, **unused_kwargs):
640 def write(data, **unused_kwargs):
641 output.write(data)
641 output.write(data)
642 baseui.write = write
642 baseui.write = write
643
643
644 repo.ui = baseui
644 repo.ui = baseui
645 verify.verify(repo)
645 verify.verify(repo)
646 return output.getvalue()
646 return output.getvalue()
647
647
648 @reraise_safe_exceptions
648 @reraise_safe_exceptions
649 def tag(self, wire, name, revision, message, local, user,
649 def tag(self, wire, name, revision, message, local, user,
650 tag_time, tag_timezone):
650 tag_time, tag_timezone):
651 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
652 ctx = repo[revision]
652 ctx = repo[revision]
653 node = ctx.node()
653 node = ctx.node()
654
654
655 date = (tag_time, tag_timezone)
655 date = (tag_time, tag_timezone)
656 try:
656 try:
657 hg_tag.tag(repo, name, node, message, local, user, date)
657 hg_tag.tag(repo, name, node, message, local, user, date)
658 except Abort as e:
658 except Abort as e:
659 log.exception("Tag operation aborted")
659 log.exception("Tag operation aborted")
660 # Exception can contain unicode which we convert
660 # Exception can contain unicode which we convert
661 raise exceptions.AbortException(repr(e))
661 raise exceptions.AbortException(repr(e))
662
662
663 @reraise_safe_exceptions
663 @reraise_safe_exceptions
664 def tags(self, wire):
664 def tags(self, wire):
665 repo = self._factory.repo(wire)
665 repo = self._factory.repo(wire)
666 return repo.tags()
666 return repo.tags()
667
667
668 @reraise_safe_exceptions
668 @reraise_safe_exceptions
669 def update(self, wire, node=None, clean=False):
669 def update(self, wire, node=None, clean=False):
670 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
671 baseui = self._factory._create_config(wire['config'])
671 baseui = self._factory._create_config(wire['config'])
672 commands.update(baseui, repo, node=node, clean=clean)
672 commands.update(baseui, repo, node=node, clean=clean)
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def identify(self, wire):
675 def identify(self, wire):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'])
677 baseui = self._factory._create_config(wire['config'])
678 output = io.BytesIO()
678 output = io.BytesIO()
679 baseui.write = output.write
679 baseui.write = output.write
680 # This is required to get a full node id
680 # This is required to get a full node id
681 baseui.debugflag = True
681 baseui.debugflag = True
682 commands.identify(baseui, repo, id=True)
682 commands.identify(baseui, repo, id=True)
683
683
684 return output.getvalue()
684 return output.getvalue()
685
685
686 @reraise_safe_exceptions
686 @reraise_safe_exceptions
687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
688 hooks=True):
688 hooks=True):
689 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691
691
692 # Mercurial internally has a lot of logic that checks ONLY if
692 # Mercurial internally has a lot of logic that checks ONLY if
693 # option is defined, we just pass those if they are defined then
693 # option is defined, we just pass those if they are defined then
694 opts = {}
694 opts = {}
695 if bookmark:
695 if bookmark:
696 opts['bookmark'] = bookmark
696 opts['bookmark'] = bookmark
697 if branch:
697 if branch:
698 opts['branch'] = branch
698 opts['branch'] = branch
699 if revision:
699 if revision:
700 opts['rev'] = revision
700 opts['rev'] = revision
701
701
702 commands.pull(baseui, repo, source, **opts)
702 commands.pull(baseui, repo, source, **opts)
703
703
704 @reraise_safe_exceptions
704 @reraise_safe_exceptions
705 def heads(self, wire, branch=None):
705 def heads(self, wire, branch=None):
706 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
707 baseui = self._factory._create_config(wire['config'])
707 baseui = self._factory._create_config(wire['config'])
708 output = io.BytesIO()
708 output = io.BytesIO()
709
709
710 def write(data, **unused_kwargs):
710 def write(data, **unused_kwargs):
711 output.write(data)
711 output.write(data)
712
712
713 baseui.write = write
713 baseui.write = write
714 if branch:
714 if branch:
715 args = [branch]
715 args = [branch]
716 else:
716 else:
717 args = []
717 args = []
718 commands.heads(baseui, repo, template='{node} ', *args)
718 commands.heads(baseui, repo, template='{node} ', *args)
719
719
720 return output.getvalue()
720 return output.getvalue()
721
721
722 @reraise_safe_exceptions
722 @reraise_safe_exceptions
723 def ancestor(self, wire, revision1, revision2):
723 def ancestor(self, wire, revision1, revision2):
724 repo = self._factory.repo(wire)
724 repo = self._factory.repo(wire)
725 changelog = repo.changelog
725 changelog = repo.changelog
726 lookup = repo.lookup
726 lookup = repo.lookup
727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
728 return hex(a)
728 return hex(a)
729
729
730 @reraise_safe_exceptions
730 @reraise_safe_exceptions
731 def push(self, wire, revisions, dest_path, hooks=True,
731 def push(self, wire, revisions, dest_path, hooks=True,
732 push_branches=False):
732 push_branches=False):
733 repo = self._factory.repo(wire)
733 repo = self._factory.repo(wire)
734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
736 new_branch=push_branches)
736 new_branch=push_branches)
737
737
738 @reraise_safe_exceptions
738 @reraise_safe_exceptions
739 def merge(self, wire, revision):
739 def merge(self, wire, revision):
740 repo = self._factory.repo(wire)
740 repo = self._factory.repo(wire)
741 baseui = self._factory._create_config(wire['config'])
741 baseui = self._factory._create_config(wire['config'])
742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
743
743
744 # In case of sub repositories are used mercurial prompts the user in
744 # In case of sub repositories are used mercurial prompts the user in
745 # case of merge conflicts or different sub repository sources. By
745 # case of merge conflicts or different sub repository sources. By
746 # setting the interactive flag to `False` mercurial doesn't prompt the
746 # setting the interactive flag to `False` mercurial doesn't prompt the
747 # used but instead uses a default value.
747 # used but instead uses a default value.
748 repo.ui.setconfig('ui', 'interactive', False)
748 repo.ui.setconfig('ui', 'interactive', False)
749
749
750 commands.merge(baseui, repo, rev=revision)
750 commands.merge(baseui, repo, rev=revision)
751
751
752 @reraise_safe_exceptions
752 @reraise_safe_exceptions
753 def commit(self, wire, message, username, close_branch=False):
753 def commit(self, wire, message, username, close_branch=False):
754 repo = self._factory.repo(wire)
754 repo = self._factory.repo(wire)
755 baseui = self._factory._create_config(wire['config'])
755 baseui = self._factory._create_config(wire['config'])
756 repo.ui.setconfig('ui', 'username', username)
756 repo.ui.setconfig('ui', 'username', username)
757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
758
758
759 @reraise_safe_exceptions
759 @reraise_safe_exceptions
760 def rebase(self, wire, source=None, dest=None, abort=False):
760 def rebase(self, wire, source=None, dest=None, abort=False):
761 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
762 baseui = self._factory._create_config(wire['config'])
762 baseui = self._factory._create_config(wire['config'])
763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
764 rebase.rebase(
764 rebase.rebase(
765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
766
766
767 @reraise_safe_exceptions
767 @reraise_safe_exceptions
768 def bookmark(self, wire, bookmark, revision=None):
768 def bookmark(self, wire, bookmark, revision=None):
769 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
770 baseui = self._factory._create_config(wire['config'])
770 baseui = self._factory._create_config(wire['config'])
771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
772
773 @reraise_safe_exceptions
774 def install_hooks(self, wire, force=False):
775 # we don't need any special hooks for Mercurial
776 pass
@@ -1,467 +1,541 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import json
24 import logging
23 import logging
25 import collections
24 import collections
26 import importlib
25 import importlib
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
52 log.error('Connection failed on %s', connection)
53 raise
50 response = connection.getresponse()
54 response = connection.getresponse()
51 return json.loads(response.read())
55 return json.loads(response.read())
52
56
53 def _serialize(self, hook_name, extras):
57 def _serialize(self, hook_name, extras):
54 data = {
58 data = {
55 'method': hook_name,
59 'method': hook_name,
56 'extras': extras
60 'extras': extras
57 }
61 }
58 return json.dumps(data)
62 return json.dumps(data)
59
63
60
64
61 class HooksDummyClient(object):
65 class HooksDummyClient(object):
62 def __init__(self, hooks_module):
66 def __init__(self, hooks_module):
63 self._hooks_module = importlib.import_module(hooks_module)
67 self._hooks_module = importlib.import_module(hooks_module)
64
68
65 def __call__(self, hook_name, extras):
69 def __call__(self, hook_name, extras):
66 with self._hooks_module.Hooks() as hooks:
70 with self._hooks_module.Hooks() as hooks:
67 return getattr(hooks, hook_name)(extras)
71 return getattr(hooks, hook_name)(extras)
68
72
69
73
70 class RemoteMessageWriter(object):
74 class RemoteMessageWriter(object):
71 """Writer base class."""
75 """Writer base class."""
72 def write(self, message):
76 def write(self, message):
73 raise NotImplementedError()
77 raise NotImplementedError()
74
78
75
79
76 class HgMessageWriter(RemoteMessageWriter):
80 class HgMessageWriter(RemoteMessageWriter):
77 """Writer that knows how to send messages to mercurial clients."""
81 """Writer that knows how to send messages to mercurial clients."""
78
82
79 def __init__(self, ui):
83 def __init__(self, ui):
80 self.ui = ui
84 self.ui = ui
81
85
82 def write(self, message):
86 def write(self, message):
83 # TODO: Check why the quiet flag is set by default.
87 # TODO: Check why the quiet flag is set by default.
84 old = self.ui.quiet
88 old = self.ui.quiet
85 self.ui.quiet = False
89 self.ui.quiet = False
86 self.ui.status(message.encode('utf-8'))
90 self.ui.status(message.encode('utf-8'))
87 self.ui.quiet = old
91 self.ui.quiet = old
88
92
89
93
90 class GitMessageWriter(RemoteMessageWriter):
94 class GitMessageWriter(RemoteMessageWriter):
91 """Writer that knows how to send messages to git clients."""
95 """Writer that knows how to send messages to git clients."""
92
96
93 def __init__(self, stdout=None):
97 def __init__(self, stdout=None):
94 self.stdout = stdout or sys.stdout
98 self.stdout = stdout or sys.stdout
95
99
96 def write(self, message):
100 def write(self, message):
97 self.stdout.write(message.encode('utf-8'))
101 self.stdout.write(message.encode('utf-8'))
98
102
99
103
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
106
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
110
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
113
114
100 def _handle_exception(result):
115 def _handle_exception(result):
101 exception_class = result.get('exception')
116 exception_class = result.get('exception')
102 exception_traceback = result.get('exception_traceback')
117 exception_traceback = result.get('exception_traceback')
103
118
104 if exception_traceback:
119 if exception_traceback:
105 log.error('Got traceback from remote call:%s', exception_traceback)
120 log.error('Got traceback from remote call:%s', exception_traceback)
106
121
107 if exception_class == 'HTTPLockedRC':
122 if exception_class == 'HTTPLockedRC':
108 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 elif exception_class == 'RepositoryError':
124 elif exception_class == 'RepositoryError':
110 raise exceptions.VcsException(*result['exception_args'])
125 raise exceptions.VcsException(*result['exception_args'])
111 elif exception_class:
126 elif exception_class:
112 raise Exception('Got remote exception "%s" with args "%s"' %
127 raise Exception('Got remote exception "%s" with args "%s"' %
113 (exception_class, result['exception_args']))
128 (exception_class, result['exception_args']))
114
129
115
130
116 def _get_hooks_client(extras):
131 def _get_hooks_client(extras):
117 if 'hooks_uri' in extras:
132 if 'hooks_uri' in extras:
118 protocol = extras.get('hooks_protocol')
133 protocol = extras.get('hooks_protocol')
119 return HooksHttpClient(extras['hooks_uri'])
134 return HooksHttpClient(extras['hooks_uri'])
120 else:
135 else:
121 return HooksDummyClient(extras['hooks_module'])
136 return HooksDummyClient(extras['hooks_module'])
122
137
123
138
124 def _call_hook(hook_name, extras, writer):
139 def _call_hook(hook_name, extras, writer):
125 hooks = _get_hooks_client(extras)
140 hooks_client = _get_hooks_client(extras)
126 result = hooks(hook_name, extras)
141 log.debug('Hooks, using client:%s', hooks_client)
142 result = hooks_client(hook_name, extras)
127 log.debug('Hooks got result: %s', result)
143 log.debug('Hooks got result: %s', result)
128 writer.write(result['output'])
144 writer.write(result['output'])
129 _handle_exception(result)
145 _handle_exception(result)
130
146
131 return result['status']
147 return result['status']
132
148
133
149
134 def _extras_from_ui(ui):
150 def _extras_from_ui(ui):
135 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
136 if not hook_data:
152 if not hook_data:
137 # maybe it's inside environ ?
153 # maybe it's inside environ ?
138 env_hook_data = os.environ.get('RC_SCM_DATA')
154 env_hook_data = os.environ.get('RC_SCM_DATA')
139 if env_hook_data:
155 if env_hook_data:
140 hook_data = env_hook_data
156 hook_data = env_hook_data
141
157
142 extras = {}
158 extras = {}
143 if hook_data:
159 if hook_data:
144 extras = json.loads(hook_data)
160 extras = json.loads(hook_data)
145 return extras
161 return extras
146
162
147
163
148 def _rev_range_hash(repo, node):
164 def _rev_range_hash(repo, node):
149
165
150 commits = []
166 commits = []
151 for rev in xrange(repo[node], len(repo)):
167 for rev in xrange(repo[node], len(repo)):
152 ctx = repo[rev]
168 ctx = repo[rev]
153 commit_id = mercurial.node.hex(ctx.node())
169 commit_id = mercurial.node.hex(ctx.node())
154 branch = ctx.branch()
170 branch = ctx.branch()
155 commits.append((commit_id, branch))
171 commits.append((commit_id, branch))
156
172
157 return commits
173 return commits
158
174
159
175
160 def repo_size(ui, repo, **kwargs):
176 def repo_size(ui, repo, **kwargs):
161 extras = _extras_from_ui(ui)
177 extras = _extras_from_ui(ui)
162 return _call_hook('repo_size', extras, HgMessageWriter(ui))
178 return _call_hook('repo_size', extras, HgMessageWriter(ui))
163
179
164
180
165 def pre_pull(ui, repo, **kwargs):
181 def pre_pull(ui, repo, **kwargs):
166 extras = _extras_from_ui(ui)
182 extras = _extras_from_ui(ui)
167 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
183 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
168
184
169
185
170 def pre_pull_ssh(ui, repo, **kwargs):
186 def pre_pull_ssh(ui, repo, **kwargs):
171 extras = _extras_from_ui(ui)
187 extras = _extras_from_ui(ui)
172 if extras and extras.get('SSH'):
188 if extras and extras.get('SSH'):
173 return pre_pull(ui, repo, **kwargs)
189 return pre_pull(ui, repo, **kwargs)
174 return 0
190 return 0
175
191
176
192
177 def post_pull(ui, repo, **kwargs):
193 def post_pull(ui, repo, **kwargs):
178 extras = _extras_from_ui(ui)
194 extras = _extras_from_ui(ui)
179 return _call_hook('post_pull', extras, HgMessageWriter(ui))
195 return _call_hook('post_pull', extras, HgMessageWriter(ui))
180
196
181
197
182 def post_pull_ssh(ui, repo, **kwargs):
198 def post_pull_ssh(ui, repo, **kwargs):
183 extras = _extras_from_ui(ui)
199 extras = _extras_from_ui(ui)
184 if extras and extras.get('SSH'):
200 if extras and extras.get('SSH'):
185 return post_pull(ui, repo, **kwargs)
201 return post_pull(ui, repo, **kwargs)
186 return 0
202 return 0
187
203
188
204
189 def pre_push(ui, repo, node=None, **kwargs):
205 def pre_push(ui, repo, node=None, **kwargs):
190 extras = _extras_from_ui(ui)
206 extras = _extras_from_ui(ui)
191
207
192 rev_data = []
208 rev_data = []
193 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
209 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
194 branches = collections.defaultdict(list)
210 branches = collections.defaultdict(list)
195 for commit_id, branch in _rev_range_hash(repo, node):
211 for commit_id, branch in _rev_range_hash(repo, node):
196 branches[branch].append(commit_id)
212 branches[branch].append(commit_id)
197
213
198 for branch, commits in branches.iteritems():
214 for branch, commits in branches.iteritems():
199 old_rev = kwargs.get('node_last') or commits[0]
215 old_rev = kwargs.get('node_last') or commits[0]
200 rev_data.append({
216 rev_data.append({
201 'old_rev': old_rev,
217 'old_rev': old_rev,
202 'new_rev': commits[-1],
218 'new_rev': commits[-1],
203 'ref': '',
219 'ref': '',
204 'type': 'branch',
220 'type': 'branch',
205 'name': branch,
221 'name': branch,
206 })
222 })
207
223
208 extras['commit_ids'] = rev_data
224 extras['commit_ids'] = rev_data
209 return _call_hook('pre_push', extras, HgMessageWriter(ui))
225 return _call_hook('pre_push', extras, HgMessageWriter(ui))
210
226
211
227
212 def pre_push_ssh(ui, repo, node=None, **kwargs):
228 def pre_push_ssh(ui, repo, node=None, **kwargs):
213 if _extras_from_ui(ui).get('SSH'):
229 if _extras_from_ui(ui).get('SSH'):
214 return pre_push(ui, repo, node, **kwargs)
230 return pre_push(ui, repo, node, **kwargs)
215
231
216 return 0
232 return 0
217
233
218
234
219 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
235 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
220 extras = _extras_from_ui(ui)
236 extras = _extras_from_ui(ui)
221 if extras.get('SSH'):
237 if extras.get('SSH'):
222 permission = extras['SSH_PERMISSIONS']
238 permission = extras['SSH_PERMISSIONS']
223
239
224 if 'repository.write' == permission or 'repository.admin' == permission:
240 if 'repository.write' == permission or 'repository.admin' == permission:
225 return 0
241 return 0
226
242
227 # non-zero ret code
243 # non-zero ret code
228 return 1
244 return 1
229
245
230 return 0
246 return 0
231
247
232
248
233 def post_push(ui, repo, node, **kwargs):
249 def post_push(ui, repo, node, **kwargs):
234 extras = _extras_from_ui(ui)
250 extras = _extras_from_ui(ui)
235
251
236 commit_ids = []
252 commit_ids = []
237 branches = []
253 branches = []
238 bookmarks = []
254 bookmarks = []
239 tags = []
255 tags = []
240
256
241 for commit_id, branch in _rev_range_hash(repo, node):
257 for commit_id, branch in _rev_range_hash(repo, node):
242 commit_ids.append(commit_id)
258 commit_ids.append(commit_id)
243 if branch not in branches:
259 if branch not in branches:
244 branches.append(branch)
260 branches.append(branch)
245
261
246 if hasattr(ui, '_rc_pushkey_branches'):
262 if hasattr(ui, '_rc_pushkey_branches'):
247 bookmarks = ui._rc_pushkey_branches
263 bookmarks = ui._rc_pushkey_branches
248
264
249 extras['commit_ids'] = commit_ids
265 extras['commit_ids'] = commit_ids
250 extras['new_refs'] = {
266 extras['new_refs'] = {
251 'branches': branches,
267 'branches': branches,
252 'bookmarks': bookmarks,
268 'bookmarks': bookmarks,
253 'tags': tags
269 'tags': tags
254 }
270 }
255
271
256 return _call_hook('post_push', extras, HgMessageWriter(ui))
272 return _call_hook('post_push', extras, HgMessageWriter(ui))
257
273
258
274
259 def post_push_ssh(ui, repo, node, **kwargs):
275 def post_push_ssh(ui, repo, node, **kwargs):
260 if _extras_from_ui(ui).get('SSH'):
276 if _extras_from_ui(ui).get('SSH'):
261 return post_push(ui, repo, node, **kwargs)
277 return post_push(ui, repo, node, **kwargs)
262 return 0
278 return 0
263
279
264
280
265 def key_push(ui, repo, **kwargs):
281 def key_push(ui, repo, **kwargs):
266 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
282 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
267 # store new bookmarks in our UI object propagated later to post_push
283 # store new bookmarks in our UI object propagated later to post_push
268 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
284 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
269 return
285 return
270
286
271
287
272 # backward compat
288 # backward compat
273 log_pull_action = post_pull
289 log_pull_action = post_pull
274
290
275 # backward compat
291 # backward compat
276 log_push_action = post_push
292 log_push_action = post_push
277
293
278
294
279 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
295 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
280 """
296 """
281 Old hook name: keep here for backward compatibility.
297 Old hook name: keep here for backward compatibility.
282
298
283 This is only required when the installed git hooks are not upgraded.
299 This is only required when the installed git hooks are not upgraded.
284 """
300 """
285 pass
301 pass
286
302
287
303
288 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
304 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
289 """
305 """
290 Old hook name: keep here for backward compatibility.
306 Old hook name: keep here for backward compatibility.
291
307
292 This is only required when the installed git hooks are not upgraded.
308 This is only required when the installed git hooks are not upgraded.
293 """
309 """
294 pass
310 pass
295
311
296
312
297 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
313 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
298
314
299
315
300 def git_pre_pull(extras):
316 def git_pre_pull(extras):
301 """
317 """
302 Pre pull hook.
318 Pre pull hook.
303
319
304 :param extras: dictionary containing the keys defined in simplevcs
320 :param extras: dictionary containing the keys defined in simplevcs
305 :type extras: dict
321 :type extras: dict
306
322
307 :return: status code of the hook. 0 for success.
323 :return: status code of the hook. 0 for success.
308 :rtype: int
324 :rtype: int
309 """
325 """
310 if 'pull' not in extras['hooks']:
326 if 'pull' not in extras['hooks']:
311 return HookResponse(0, '')
327 return HookResponse(0, '')
312
328
313 stdout = io.BytesIO()
329 stdout = io.BytesIO()
314 try:
330 try:
315 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
331 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
316 except Exception as error:
332 except Exception as error:
317 status = 128
333 status = 128
318 stdout.write('ERROR: %s\n' % str(error))
334 stdout.write('ERROR: %s\n' % str(error))
319
335
320 return HookResponse(status, stdout.getvalue())
336 return HookResponse(status, stdout.getvalue())
321
337
322
338
323 def git_post_pull(extras):
339 def git_post_pull(extras):
324 """
340 """
325 Post pull hook.
341 Post pull hook.
326
342
327 :param extras: dictionary containing the keys defined in simplevcs
343 :param extras: dictionary containing the keys defined in simplevcs
328 :type extras: dict
344 :type extras: dict
329
345
330 :return: status code of the hook. 0 for success.
346 :return: status code of the hook. 0 for success.
331 :rtype: int
347 :rtype: int
332 """
348 """
333 if 'pull' not in extras['hooks']:
349 if 'pull' not in extras['hooks']:
334 return HookResponse(0, '')
350 return HookResponse(0, '')
335
351
336 stdout = io.BytesIO()
352 stdout = io.BytesIO()
337 try:
353 try:
338 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
354 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
339 except Exception as error:
355 except Exception as error:
340 status = 128
356 status = 128
341 stdout.write('ERROR: %s\n' % error)
357 stdout.write('ERROR: %s\n' % error)
342
358
343 return HookResponse(status, stdout.getvalue())
359 return HookResponse(status, stdout.getvalue())
344
360
345
361
346 def _parse_git_ref_lines(revision_lines):
362 def _parse_git_ref_lines(revision_lines):
347 rev_data = []
363 rev_data = []
348 for revision_line in revision_lines or []:
364 for revision_line in revision_lines or []:
349 old_rev, new_rev, ref = revision_line.strip().split(' ')
365 old_rev, new_rev, ref = revision_line.strip().split(' ')
350 ref_data = ref.split('/', 2)
366 ref_data = ref.split('/', 2)
351 if ref_data[1] in ('tags', 'heads'):
367 if ref_data[1] in ('tags', 'heads'):
352 rev_data.append({
368 rev_data.append({
353 'old_rev': old_rev,
369 'old_rev': old_rev,
354 'new_rev': new_rev,
370 'new_rev': new_rev,
355 'ref': ref,
371 'ref': ref,
356 'type': ref_data[1],
372 'type': ref_data[1],
357 'name': ref_data[2],
373 'name': ref_data[2],
358 })
374 })
359 return rev_data
375 return rev_data
360
376
361
377
362 def git_pre_receive(unused_repo_path, revision_lines, env):
378 def git_pre_receive(unused_repo_path, revision_lines, env):
363 """
379 """
364 Pre push hook.
380 Pre push hook.
365
381
366 :param extras: dictionary containing the keys defined in simplevcs
382 :param extras: dictionary containing the keys defined in simplevcs
367 :type extras: dict
383 :type extras: dict
368
384
369 :return: status code of the hook. 0 for success.
385 :return: status code of the hook. 0 for success.
370 :rtype: int
386 :rtype: int
371 """
387 """
372 extras = json.loads(env['RC_SCM_DATA'])
388 extras = json.loads(env['RC_SCM_DATA'])
373 rev_data = _parse_git_ref_lines(revision_lines)
389 rev_data = _parse_git_ref_lines(revision_lines)
374 if 'push' not in extras['hooks']:
390 if 'push' not in extras['hooks']:
375 return 0
391 return 0
376 extras['commit_ids'] = rev_data
392 extras['commit_ids'] = rev_data
377 return _call_hook('pre_push', extras, GitMessageWriter())
393 return _call_hook('pre_push', extras, GitMessageWriter())
378
394
379
395
380 def git_post_receive(unused_repo_path, revision_lines, env):
396 def git_post_receive(unused_repo_path, revision_lines, env):
381 """
397 """
382 Post push hook.
398 Post push hook.
383
399
384 :param extras: dictionary containing the keys defined in simplevcs
400 :param extras: dictionary containing the keys defined in simplevcs
385 :type extras: dict
401 :type extras: dict
386
402
387 :return: status code of the hook. 0 for success.
403 :return: status code of the hook. 0 for success.
388 :rtype: int
404 :rtype: int
389 """
405 """
390 extras = json.loads(env['RC_SCM_DATA'])
406 extras = json.loads(env['RC_SCM_DATA'])
391 if 'push' not in extras['hooks']:
407 if 'push' not in extras['hooks']:
392 return 0
408 return 0
393
409
394 rev_data = _parse_git_ref_lines(revision_lines)
410 rev_data = _parse_git_ref_lines(revision_lines)
395
411
396 git_revs = []
412 git_revs = []
397
413
398 # N.B.(skreft): it is ok to just call git, as git before calling a
414 # N.B.(skreft): it is ok to just call git, as git before calling a
399 # subcommand sets the PATH environment variable so that it point to the
415 # subcommand sets the PATH environment variable so that it point to the
400 # correct version of the git executable.
416 # correct version of the git executable.
401 empty_commit_id = '0' * 40
417 empty_commit_id = '0' * 40
402 branches = []
418 branches = []
403 tags = []
419 tags = []
404 for push_ref in rev_data:
420 for push_ref in rev_data:
405 type_ = push_ref['type']
421 type_ = push_ref['type']
406
422
407 if type_ == 'heads':
423 if type_ == 'heads':
408 if push_ref['old_rev'] == empty_commit_id:
424 if push_ref['old_rev'] == empty_commit_id:
409 # starting new branch case
425 # starting new branch case
410 if push_ref['name'] not in branches:
426 if push_ref['name'] not in branches:
411 branches.append(push_ref['name'])
427 branches.append(push_ref['name'])
412
428
413 # Fix up head revision if needed
429 # Fix up head revision if needed
414 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
430 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
415 try:
431 try:
416 subprocessio.run_command(cmd, env=os.environ.copy())
432 subprocessio.run_command(cmd, env=os.environ.copy())
417 except Exception:
433 except Exception:
418 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
434 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
419 'refs/heads/%s' % push_ref['name']]
435 'refs/heads/%s' % push_ref['name']]
420 print("Setting default branch to %s" % push_ref['name'])
436 print("Setting default branch to %s" % push_ref['name'])
421 subprocessio.run_command(cmd, env=os.environ.copy())
437 subprocessio.run_command(cmd, env=os.environ.copy())
422
438
423 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
439 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
424 '--format=%(refname)', 'refs/heads/*']
440 '--format=%(refname)', 'refs/heads/*']
425 stdout, stderr = subprocessio.run_command(
441 stdout, stderr = subprocessio.run_command(
426 cmd, env=os.environ.copy())
442 cmd, env=os.environ.copy())
427 heads = stdout
443 heads = stdout
428 heads = heads.replace(push_ref['ref'], '')
444 heads = heads.replace(push_ref['ref'], '')
429 heads = ' '.join(head for head in heads.splitlines() if head)
445 heads = ' '.join(head for head in heads.splitlines() if head)
430 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
446 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
431 '--pretty=format:%H', '--', push_ref['new_rev'],
447 '--pretty=format:%H', '--', push_ref['new_rev'],
432 '--not', heads]
448 '--not', heads]
433 stdout, stderr = subprocessio.run_command(
449 stdout, stderr = subprocessio.run_command(
434 cmd, env=os.environ.copy())
450 cmd, env=os.environ.copy())
435 git_revs.extend(stdout.splitlines())
451 git_revs.extend(stdout.splitlines())
436 elif push_ref['new_rev'] == empty_commit_id:
452 elif push_ref['new_rev'] == empty_commit_id:
437 # delete branch case
453 # delete branch case
438 git_revs.append('delete_branch=>%s' % push_ref['name'])
454 git_revs.append('delete_branch=>%s' % push_ref['name'])
439 else:
455 else:
440 if push_ref['name'] not in branches:
456 if push_ref['name'] not in branches:
441 branches.append(push_ref['name'])
457 branches.append(push_ref['name'])
442
458
443 cmd = [settings.GIT_EXECUTABLE, 'log',
459 cmd = [settings.GIT_EXECUTABLE, 'log',
444 '{old_rev}..{new_rev}'.format(**push_ref),
460 '{old_rev}..{new_rev}'.format(**push_ref),
445 '--reverse', '--pretty=format:%H']
461 '--reverse', '--pretty=format:%H']
446 stdout, stderr = subprocessio.run_command(
462 stdout, stderr = subprocessio.run_command(
447 cmd, env=os.environ.copy())
463 cmd, env=os.environ.copy())
448 git_revs.extend(stdout.splitlines())
464 git_revs.extend(stdout.splitlines())
449 elif type_ == 'tags':
465 elif type_ == 'tags':
450 if push_ref['name'] not in tags:
466 if push_ref['name'] not in tags:
451 tags.append(push_ref['name'])
467 tags.append(push_ref['name'])
452 git_revs.append('tag=>%s' % push_ref['name'])
468 git_revs.append('tag=>%s' % push_ref['name'])
453
469
454 extras['commit_ids'] = git_revs
470 extras['commit_ids'] = git_revs
455 extras['new_refs'] = {
471 extras['new_refs'] = {
456 'branches': branches,
472 'branches': branches,
457 'bookmarks': [],
473 'bookmarks': [],
458 'tags': tags,
474 'tags': tags,
459 }
475 }
460
476
461 if 'repo_size' in extras['hooks']:
477 if 'repo_size' in extras['hooks']:
462 try:
478 try:
463 _call_hook('repo_size', extras, GitMessageWriter())
479 _call_hook('repo_size', extras, GitMessageWriter())
464 except:
480 except:
465 pass
481 pass
466
482
467 return _call_hook('post_push', extras, GitMessageWriter())
483 return _call_hook('post_push', extras, GitMessageWriter())
484
485
486 def svn_pre_commit(repo_path, commit_data, env):
487 path, txn_id = commit_data
488 branches = []
489 tags = []
490
491 cmd = ['svnlook', 'pget',
492 '-t', txn_id,
493 '--revprop', path, 'rc-scm-extras']
494 stdout, stderr = subprocessio.run_command(
495 cmd, env=os.environ.copy())
496 extras = json.loads(base64.urlsafe_b64decode(stdout))
497
498 extras['commit_ids'] = []
499 extras['txn_id'] = txn_id
500 extras['new_refs'] = {
501 'branches': branches,
502 'bookmarks': [],
503 'tags': tags,
504 }
505 sys.stderr.write(str(extras))
506 return _call_hook('pre_push', extras, SvnMessageWriter())
507
508
509 def svn_post_commit(repo_path, commit_data, env):
510 """
511 commit_data is path, rev, txn_id
512 """
513 path, commit_id, txn_id = commit_data
514 branches = []
515 tags = []
516
517 cmd = ['svnlook', 'pget',
518 '-r', commit_id,
519 '--revprop', path, 'rc-scm-extras']
520 stdout, stderr = subprocessio.run_command(
521 cmd, env=os.environ.copy())
522
523 extras = json.loads(base64.urlsafe_b64decode(stdout))
524
525 extras['commit_ids'] = [commit_id]
526 extras['txn_id'] = txn_id
527 extras['new_refs'] = {
528 'branches': branches,
529 'bookmarks': [],
530 'tags': tags,
531 }
532
533 if 'repo_size' in extras['hooks']:
534 try:
535 _call_hook('repo_size', extras, SvnMessageWriter())
536 except:
537 pass
538
539 return _call_hook('post_push', extras, SvnMessageWriter())
540
541
@@ -1,480 +1,487 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import base64
19 import base64
20 import locale
20 import locale
21 import logging
21 import logging
22 import uuid
22 import uuid
23 import wsgiref.util
23 import wsgiref.util
24 import traceback
24 import traceback
25 from itertools import chain
25 from itertools import chain
26
26
27 import simplejson as json
27 import simplejson as json
28 import msgpack
28 import msgpack
29 from beaker.cache import CacheManager
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
30 from beaker.util import parse_cache_config_options
31 from pyramid.config import Configurator
31 from pyramid.config import Configurator
32 from pyramid.wsgi import wsgiapp
32 from pyramid.wsgi import wsgiapp
33 from pyramid.compat import configparser
33 from pyramid.compat import configparser
34
34
35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 from vcsserver.echo_stub.echo_app import EchoApp
38 from vcsserver.echo_stub.echo_app import EchoApp
39 from vcsserver.exceptions import HTTPRepoLocked
39 from vcsserver.exceptions import HTTPRepoLocked
40 from vcsserver.server import VcsServer
40 from vcsserver.server import VcsServer
41
41
42 try:
42 try:
43 from vcsserver.git import GitFactory, GitRemote
43 from vcsserver.git import GitFactory, GitRemote
44 except ImportError:
44 except ImportError:
45 GitFactory = None
45 GitFactory = None
46 GitRemote = None
46 GitRemote = None
47
47
48 try:
48 try:
49 from vcsserver.hg import MercurialFactory, HgRemote
49 from vcsserver.hg import MercurialFactory, HgRemote
50 except ImportError:
50 except ImportError:
51 MercurialFactory = None
51 MercurialFactory = None
52 HgRemote = None
52 HgRemote = None
53
53
54 try:
54 try:
55 from vcsserver.svn import SubversionFactory, SvnRemote
55 from vcsserver.svn import SubversionFactory, SvnRemote
56 except ImportError:
56 except ImportError:
57 SubversionFactory = None
57 SubversionFactory = None
58 SvnRemote = None
58 SvnRemote = None
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 def _is_request_chunked(environ):
63 def _is_request_chunked(environ):
64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 return stream
65 return stream
66
66
67
67
68 class VCS(object):
68 class VCS(object):
69 def __init__(self, locale=None, cache_config=None):
69 def __init__(self, locale=None, cache_config=None):
70 self.locale = locale
70 self.locale = locale
71 self.cache_config = cache_config
71 self.cache_config = cache_config
72 self._configure_locale()
72 self._configure_locale()
73 self._initialize_cache()
73 self._initialize_cache()
74
74
75 if GitFactory and GitRemote:
75 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
76 git_repo_cache = self.cache.get_cache_region(
77 'git', region='repo_object')
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
78 git_factory = GitFactory(git_repo_cache)
79 self._git_remote = GitRemote(git_factory)
79 self._git_remote = GitRemote(git_factory)
80 else:
80 else:
81 log.info("Git client import failed")
81 log.info("Git client import failed")
82
82
83 if MercurialFactory and HgRemote:
83 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
84 hg_repo_cache = self.cache.get_cache_region(
85 'hg', region='repo_object')
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
86 hg_factory = MercurialFactory(hg_repo_cache)
87 self._hg_remote = HgRemote(hg_factory)
87 self._hg_remote = HgRemote(hg_factory)
88 else:
88 else:
89 log.info("Mercurial client import failed")
89 log.info("Mercurial client import failed")
90
90
91 if SubversionFactory and SvnRemote:
91 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
92 svn_repo_cache = self.cache.get_cache_region(
93 'svn', region='repo_object')
93 'svn', region='repo_object')
94 svn_factory = SubversionFactory(svn_repo_cache)
94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
95 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
96 else:
100 else:
97 log.info("Subversion client import failed")
101 log.info("Subversion client import failed")
98
102
99 self._vcsserver = VcsServer()
103 self._vcsserver = VcsServer()
100
104
101 def _initialize_cache(self):
105 def _initialize_cache(self):
102 cache_config = parse_cache_config_options(self.cache_config)
106 cache_config = parse_cache_config_options(self.cache_config)
103 log.info('Initializing beaker cache: %s' % cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
104 self.cache = CacheManager(**cache_config)
108 self.cache = CacheManager(**cache_config)
105
109
106 def _configure_locale(self):
110 def _configure_locale(self):
107 if self.locale:
111 if self.locale:
108 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
109 else:
113 else:
110 log.info(
114 log.info(
111 'Configuring locale subsystem based on environment variables')
115 'Configuring locale subsystem based on environment variables')
112 try:
116 try:
113 # If self.locale is the empty string, then the locale
117 # If self.locale is the empty string, then the locale
114 # module will use the environment variables. See the
118 # module will use the environment variables. See the
115 # documentation of the package `locale`.
119 # documentation of the package `locale`.
116 locale.setlocale(locale.LC_ALL, self.locale)
120 locale.setlocale(locale.LC_ALL, self.locale)
117
121
118 language_code, encoding = locale.getlocale()
122 language_code, encoding = locale.getlocale()
119 log.info(
123 log.info(
120 'Locale set to language code "%s" with encoding "%s".',
124 'Locale set to language code "%s" with encoding "%s".',
121 language_code, encoding)
125 language_code, encoding)
122 except locale.Error:
126 except locale.Error:
123 log.exception(
127 log.exception(
124 'Cannot set locale, not configuring the locale system')
128 'Cannot set locale, not configuring the locale system')
125
129
126
130
127 class WsgiProxy(object):
131 class WsgiProxy(object):
128 def __init__(self, wsgi):
132 def __init__(self, wsgi):
129 self.wsgi = wsgi
133 self.wsgi = wsgi
130
134
131 def __call__(self, environ, start_response):
135 def __call__(self, environ, start_response):
132 input_data = environ['wsgi.input'].read()
136 input_data = environ['wsgi.input'].read()
133 input_data = msgpack.unpackb(input_data)
137 input_data = msgpack.unpackb(input_data)
134
138
135 error = None
139 error = None
136 try:
140 try:
137 data, status, headers = self.wsgi.handle(
141 data, status, headers = self.wsgi.handle(
138 input_data['environment'], input_data['input_data'],
142 input_data['environment'], input_data['input_data'],
139 *input_data['args'], **input_data['kwargs'])
143 *input_data['args'], **input_data['kwargs'])
140 except Exception as e:
144 except Exception as e:
141 data, status, headers = [], None, None
145 data, status, headers = [], None, None
142 error = {
146 error = {
143 'message': str(e),
147 'message': str(e),
144 '_vcs_kind': getattr(e, '_vcs_kind', None)
148 '_vcs_kind': getattr(e, '_vcs_kind', None)
145 }
149 }
146
150
147 start_response(200, {})
151 start_response(200, {})
148 return self._iterator(error, status, headers, data)
152 return self._iterator(error, status, headers, data)
149
153
150 def _iterator(self, error, status, headers, data):
154 def _iterator(self, error, status, headers, data):
151 initial_data = [
155 initial_data = [
152 error,
156 error,
153 status,
157 status,
154 headers,
158 headers,
155 ]
159 ]
156
160
157 for d in chain(initial_data, data):
161 for d in chain(initial_data, data):
158 yield msgpack.packb(d)
162 yield msgpack.packb(d)
159
163
160
164
161 class HTTPApplication(object):
165 class HTTPApplication(object):
162 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
166 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
163
167
164 remote_wsgi = remote_wsgi
168 remote_wsgi = remote_wsgi
165 _use_echo_app = False
169 _use_echo_app = False
166
170
167 def __init__(self, settings=None, global_config=None):
171 def __init__(self, settings=None, global_config=None):
168 self.config = Configurator(settings=settings)
172 self.config = Configurator(settings=settings)
169 self.global_config = global_config
173 self.global_config = global_config
170
174
171 locale = settings.get('locale', '') or 'en_US.UTF-8'
175 locale = settings.get('locale', '') or 'en_US.UTF-8'
172 vcs = VCS(locale=locale, cache_config=settings)
176 vcs = VCS(locale=locale, cache_config=settings)
173 self._remotes = {
177 self._remotes = {
174 'hg': vcs._hg_remote,
178 'hg': vcs._hg_remote,
175 'git': vcs._git_remote,
179 'git': vcs._git_remote,
176 'svn': vcs._svn_remote,
180 'svn': vcs._svn_remote,
177 'server': vcs._vcsserver,
181 'server': vcs._vcsserver,
178 }
182 }
179 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
183 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
180 self._use_echo_app = True
184 self._use_echo_app = True
181 log.warning("Using EchoApp for VCS operations.")
185 log.warning("Using EchoApp for VCS operations.")
182 self.remote_wsgi = remote_wsgi_stub
186 self.remote_wsgi = remote_wsgi_stub
183 self._configure_settings(settings)
187 self._configure_settings(settings)
184 self._configure()
188 self._configure()
185
189
186 def _configure_settings(self, app_settings):
190 def _configure_settings(self, app_settings):
187 """
191 """
188 Configure the settings module.
192 Configure the settings module.
189 """
193 """
190 git_path = app_settings.get('git_path', None)
194 git_path = app_settings.get('git_path', None)
191 if git_path:
195 if git_path:
192 settings.GIT_EXECUTABLE = git_path
196 settings.GIT_EXECUTABLE = git_path
197 binary_dir = app_settings.get('core.binary_dir', None)
198 if binary_dir:
199 settings.BINARY_DIR = binary_dir
193
200
194 def _configure(self):
201 def _configure(self):
195 self.config.add_renderer(
202 self.config.add_renderer(
196 name='msgpack',
203 name='msgpack',
197 factory=self._msgpack_renderer_factory)
204 factory=self._msgpack_renderer_factory)
198
205
199 self.config.add_route('service', '/_service')
206 self.config.add_route('service', '/_service')
200 self.config.add_route('status', '/status')
207 self.config.add_route('status', '/status')
201 self.config.add_route('hg_proxy', '/proxy/hg')
208 self.config.add_route('hg_proxy', '/proxy/hg')
202 self.config.add_route('git_proxy', '/proxy/git')
209 self.config.add_route('git_proxy', '/proxy/git')
203 self.config.add_route('vcs', '/{backend}')
210 self.config.add_route('vcs', '/{backend}')
204 self.config.add_route('stream_git', '/stream/git/*repo_name')
211 self.config.add_route('stream_git', '/stream/git/*repo_name')
205 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
212 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
206
213
207 self.config.add_view(
214 self.config.add_view(
208 self.status_view, route_name='status', renderer='json')
215 self.status_view, route_name='status', renderer='json')
209 self.config.add_view(
216 self.config.add_view(
210 self.service_view, route_name='service', renderer='msgpack')
217 self.service_view, route_name='service', renderer='msgpack')
211
218
212 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
219 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
213 self.config.add_view(self.git_proxy(), route_name='git_proxy')
220 self.config.add_view(self.git_proxy(), route_name='git_proxy')
214 self.config.add_view(
221 self.config.add_view(
215 self.vcs_view, route_name='vcs', renderer='msgpack',
222 self.vcs_view, route_name='vcs', renderer='msgpack',
216 custom_predicates=[self.is_vcs_view])
223 custom_predicates=[self.is_vcs_view])
217
224
218 self.config.add_view(self.hg_stream(), route_name='stream_hg')
225 self.config.add_view(self.hg_stream(), route_name='stream_hg')
219 self.config.add_view(self.git_stream(), route_name='stream_git')
226 self.config.add_view(self.git_stream(), route_name='stream_git')
220
227
221 def notfound(request):
228 def notfound(request):
222 return {'status': '404 NOT FOUND'}
229 return {'status': '404 NOT FOUND'}
223 self.config.add_notfound_view(notfound, renderer='json')
230 self.config.add_notfound_view(notfound, renderer='json')
224
231
225 self.config.add_view(self.handle_vcs_exception, context=Exception)
232 self.config.add_view(self.handle_vcs_exception, context=Exception)
226
233
227 self.config.add_tween(
234 self.config.add_tween(
228 'vcsserver.tweens.RequestWrapperTween',
235 'vcsserver.tweens.RequestWrapperTween',
229 )
236 )
230
237
231 def wsgi_app(self):
238 def wsgi_app(self):
232 return self.config.make_wsgi_app()
239 return self.config.make_wsgi_app()
233
240
234 def vcs_view(self, request):
241 def vcs_view(self, request):
235 remote = self._remotes[request.matchdict['backend']]
242 remote = self._remotes[request.matchdict['backend']]
236 payload = msgpack.unpackb(request.body, use_list=True)
243 payload = msgpack.unpackb(request.body, use_list=True)
237 method = payload.get('method')
244 method = payload.get('method')
238 params = payload.get('params')
245 params = payload.get('params')
239 wire = params.get('wire')
246 wire = params.get('wire')
240 args = params.get('args')
247 args = params.get('args')
241 kwargs = params.get('kwargs')
248 kwargs = params.get('kwargs')
242 if wire:
249 if wire:
243 try:
250 try:
244 wire['context'] = uuid.UUID(wire['context'])
251 wire['context'] = uuid.UUID(wire['context'])
245 except KeyError:
252 except KeyError:
246 pass
253 pass
247 args.insert(0, wire)
254 args.insert(0, wire)
248
255
249 log.debug('method called:%s with kwargs:%s', method, kwargs)
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
250 try:
257 try:
251 resp = getattr(remote, method)(*args, **kwargs)
258 resp = getattr(remote, method)(*args, **kwargs)
252 except Exception as e:
259 except Exception as e:
253 tb_info = traceback.format_exc()
260 tb_info = traceback.format_exc()
254
261
255 type_ = e.__class__.__name__
262 type_ = e.__class__.__name__
256 if type_ not in self.ALLOWED_EXCEPTIONS:
263 if type_ not in self.ALLOWED_EXCEPTIONS:
257 type_ = None
264 type_ = None
258
265
259 resp = {
266 resp = {
260 'id': payload.get('id'),
267 'id': payload.get('id'),
261 'error': {
268 'error': {
262 'message': e.message,
269 'message': e.message,
263 'traceback': tb_info,
270 'traceback': tb_info,
264 'type': type_
271 'type': type_
265 }
272 }
266 }
273 }
267 try:
274 try:
268 resp['error']['_vcs_kind'] = e._vcs_kind
275 resp['error']['_vcs_kind'] = e._vcs_kind
269 except AttributeError:
276 except AttributeError:
270 pass
277 pass
271 else:
278 else:
272 resp = {
279 resp = {
273 'id': payload.get('id'),
280 'id': payload.get('id'),
274 'result': resp
281 'result': resp
275 }
282 }
276
283
277 return resp
284 return resp
278
285
279 def status_view(self, request):
286 def status_view(self, request):
280 import vcsserver
287 import vcsserver
281 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
288 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
282 'pid': os.getpid()}
289 'pid': os.getpid()}
283
290
284 def service_view(self, request):
291 def service_view(self, request):
285 import vcsserver
292 import vcsserver
286
293
287 payload = msgpack.unpackb(request.body, use_list=True)
294 payload = msgpack.unpackb(request.body, use_list=True)
288
295
289 try:
296 try:
290 path = self.global_config['__file__']
297 path = self.global_config['__file__']
291 config = configparser.ConfigParser()
298 config = configparser.ConfigParser()
292 config.read(path)
299 config.read(path)
293 parsed_ini = config
300 parsed_ini = config
294 if parsed_ini.has_section('server:main'):
301 if parsed_ini.has_section('server:main'):
295 parsed_ini = dict(parsed_ini.items('server:main'))
302 parsed_ini = dict(parsed_ini.items('server:main'))
296 except Exception:
303 except Exception:
297 log.exception('Failed to read .ini file for display')
304 log.exception('Failed to read .ini file for display')
298 parsed_ini = {}
305 parsed_ini = {}
299
306
300 resp = {
307 resp = {
301 'id': payload.get('id'),
308 'id': payload.get('id'),
302 'result': dict(
309 'result': dict(
303 version=vcsserver.__version__,
310 version=vcsserver.__version__,
304 config=parsed_ini,
311 config=parsed_ini,
305 payload=payload,
312 payload=payload,
306 )
313 )
307 }
314 }
308 return resp
315 return resp
309
316
310 def _msgpack_renderer_factory(self, info):
317 def _msgpack_renderer_factory(self, info):
311 def _render(value, system):
318 def _render(value, system):
312 value = msgpack.packb(value)
319 value = msgpack.packb(value)
313 request = system.get('request')
320 request = system.get('request')
314 if request is not None:
321 if request is not None:
315 response = request.response
322 response = request.response
316 ct = response.content_type
323 ct = response.content_type
317 if ct == response.default_content_type:
324 if ct == response.default_content_type:
318 response.content_type = 'application/x-msgpack'
325 response.content_type = 'application/x-msgpack'
319 return value
326 return value
320 return _render
327 return _render
321
328
322 def set_env_from_config(self, environ, config):
329 def set_env_from_config(self, environ, config):
323 dict_conf = {}
330 dict_conf = {}
324 try:
331 try:
325 for elem in config:
332 for elem in config:
326 if elem[0] == 'rhodecode':
333 if elem[0] == 'rhodecode':
327 dict_conf = json.loads(elem[2])
334 dict_conf = json.loads(elem[2])
328 break
335 break
329 except Exception:
336 except Exception:
330 log.exception('Failed to fetch SCM CONFIG')
337 log.exception('Failed to fetch SCM CONFIG')
331 return
338 return
332
339
333 username = dict_conf.get('username')
340 username = dict_conf.get('username')
334 if username:
341 if username:
335 environ['REMOTE_USER'] = username
342 environ['REMOTE_USER'] = username
336 # mercurial specific, some extension api rely on this
343 # mercurial specific, some extension api rely on this
337 environ['HGUSER'] = username
344 environ['HGUSER'] = username
338
345
339 ip = dict_conf.get('ip')
346 ip = dict_conf.get('ip')
340 if ip:
347 if ip:
341 environ['REMOTE_HOST'] = ip
348 environ['REMOTE_HOST'] = ip
342
349
343 if _is_request_chunked(environ):
350 if _is_request_chunked(environ):
344 # set the compatibility flag for webob
351 # set the compatibility flag for webob
345 environ['wsgi.input_terminated'] = True
352 environ['wsgi.input_terminated'] = True
346
353
347 def hg_proxy(self):
354 def hg_proxy(self):
348 @wsgiapp
355 @wsgiapp
349 def _hg_proxy(environ, start_response):
356 def _hg_proxy(environ, start_response):
350 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
357 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
351 return app(environ, start_response)
358 return app(environ, start_response)
352 return _hg_proxy
359 return _hg_proxy
353
360
354 def git_proxy(self):
361 def git_proxy(self):
355 @wsgiapp
362 @wsgiapp
356 def _git_proxy(environ, start_response):
363 def _git_proxy(environ, start_response):
357 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
364 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
358 return app(environ, start_response)
365 return app(environ, start_response)
359 return _git_proxy
366 return _git_proxy
360
367
361 def hg_stream(self):
368 def hg_stream(self):
362 if self._use_echo_app:
369 if self._use_echo_app:
363 @wsgiapp
370 @wsgiapp
364 def _hg_stream(environ, start_response):
371 def _hg_stream(environ, start_response):
365 app = EchoApp('fake_path', 'fake_name', None)
372 app = EchoApp('fake_path', 'fake_name', None)
366 return app(environ, start_response)
373 return app(environ, start_response)
367 return _hg_stream
374 return _hg_stream
368 else:
375 else:
369 @wsgiapp
376 @wsgiapp
370 def _hg_stream(environ, start_response):
377 def _hg_stream(environ, start_response):
371 log.debug('http-app: handling hg stream')
378 log.debug('http-app: handling hg stream')
372 repo_path = environ['HTTP_X_RC_REPO_PATH']
379 repo_path = environ['HTTP_X_RC_REPO_PATH']
373 repo_name = environ['HTTP_X_RC_REPO_NAME']
380 repo_name = environ['HTTP_X_RC_REPO_NAME']
374 packed_config = base64.b64decode(
381 packed_config = base64.b64decode(
375 environ['HTTP_X_RC_REPO_CONFIG'])
382 environ['HTTP_X_RC_REPO_CONFIG'])
376 config = msgpack.unpackb(packed_config)
383 config = msgpack.unpackb(packed_config)
377 app = scm_app.create_hg_wsgi_app(
384 app = scm_app.create_hg_wsgi_app(
378 repo_path, repo_name, config)
385 repo_path, repo_name, config)
379
386
380 # Consistent path information for hgweb
387 # Consistent path information for hgweb
381 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
388 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
382 environ['REPO_NAME'] = repo_name
389 environ['REPO_NAME'] = repo_name
383 self.set_env_from_config(environ, config)
390 self.set_env_from_config(environ, config)
384
391
385 log.debug('http-app: starting app handler '
392 log.debug('http-app: starting app handler '
386 'with %s and process request', app)
393 'with %s and process request', app)
387 return app(environ, ResponseFilter(start_response))
394 return app(environ, ResponseFilter(start_response))
388 return _hg_stream
395 return _hg_stream
389
396
390 def git_stream(self):
397 def git_stream(self):
391 if self._use_echo_app:
398 if self._use_echo_app:
392 @wsgiapp
399 @wsgiapp
393 def _git_stream(environ, start_response):
400 def _git_stream(environ, start_response):
394 app = EchoApp('fake_path', 'fake_name', None)
401 app = EchoApp('fake_path', 'fake_name', None)
395 return app(environ, start_response)
402 return app(environ, start_response)
396 return _git_stream
403 return _git_stream
397 else:
404 else:
398 @wsgiapp
405 @wsgiapp
399 def _git_stream(environ, start_response):
406 def _git_stream(environ, start_response):
400 log.debug('http-app: handling git stream')
407 log.debug('http-app: handling git stream')
401 repo_path = environ['HTTP_X_RC_REPO_PATH']
408 repo_path = environ['HTTP_X_RC_REPO_PATH']
402 repo_name = environ['HTTP_X_RC_REPO_NAME']
409 repo_name = environ['HTTP_X_RC_REPO_NAME']
403 packed_config = base64.b64decode(
410 packed_config = base64.b64decode(
404 environ['HTTP_X_RC_REPO_CONFIG'])
411 environ['HTTP_X_RC_REPO_CONFIG'])
405 config = msgpack.unpackb(packed_config)
412 config = msgpack.unpackb(packed_config)
406
413
407 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
414 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
408 self.set_env_from_config(environ, config)
415 self.set_env_from_config(environ, config)
409
416
410 content_type = environ.get('CONTENT_TYPE', '')
417 content_type = environ.get('CONTENT_TYPE', '')
411
418
412 path = environ['PATH_INFO']
419 path = environ['PATH_INFO']
413 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
420 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
414 log.debug(
421 log.debug(
415 'LFS: Detecting if request `%s` is LFS server path based '
422 'LFS: Detecting if request `%s` is LFS server path based '
416 'on content type:`%s`, is_lfs:%s',
423 'on content type:`%s`, is_lfs:%s',
417 path, content_type, is_lfs_request)
424 path, content_type, is_lfs_request)
418
425
419 if not is_lfs_request:
426 if not is_lfs_request:
420 # fallback detection by path
427 # fallback detection by path
421 if GIT_LFS_PROTO_PAT.match(path):
428 if GIT_LFS_PROTO_PAT.match(path):
422 is_lfs_request = True
429 is_lfs_request = True
423 log.debug(
430 log.debug(
424 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
431 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
425 path, is_lfs_request)
432 path, is_lfs_request)
426
433
427 if is_lfs_request:
434 if is_lfs_request:
428 app = scm_app.create_git_lfs_wsgi_app(
435 app = scm_app.create_git_lfs_wsgi_app(
429 repo_path, repo_name, config)
436 repo_path, repo_name, config)
430 else:
437 else:
431 app = scm_app.create_git_wsgi_app(
438 app = scm_app.create_git_wsgi_app(
432 repo_path, repo_name, config)
439 repo_path, repo_name, config)
433
440
434 log.debug('http-app: starting app handler '
441 log.debug('http-app: starting app handler '
435 'with %s and process request', app)
442 'with %s and process request', app)
436
443
437 return app(environ, start_response)
444 return app(environ, start_response)
438
445
439 return _git_stream
446 return _git_stream
440
447
441 def is_vcs_view(self, context, request):
448 def is_vcs_view(self, context, request):
442 """
449 """
443 View predicate that returns true if given backend is supported by
450 View predicate that returns true if given backend is supported by
444 defined remotes.
451 defined remotes.
445 """
452 """
446 backend = request.matchdict.get('backend')
453 backend = request.matchdict.get('backend')
447 return backend in self._remotes
454 return backend in self._remotes
448
455
449 def handle_vcs_exception(self, exception, request):
456 def handle_vcs_exception(self, exception, request):
450 _vcs_kind = getattr(exception, '_vcs_kind', '')
457 _vcs_kind = getattr(exception, '_vcs_kind', '')
451 if _vcs_kind == 'repo_locked':
458 if _vcs_kind == 'repo_locked':
452 # Get custom repo-locked status code if present.
459 # Get custom repo-locked status code if present.
453 status_code = request.headers.get('X-RC-Locked-Status-Code')
460 status_code = request.headers.get('X-RC-Locked-Status-Code')
454 return HTTPRepoLocked(
461 return HTTPRepoLocked(
455 title=exception.message, status_code=status_code)
462 title=exception.message, status_code=status_code)
456
463
457 # Re-raise exception if we can not handle it.
464 # Re-raise exception if we can not handle it.
458 log.exception(
465 log.exception(
459 'error occurred handling this request for path: %s', request.path)
466 'error occurred handling this request for path: %s', request.path)
460 raise exception
467 raise exception
461
468
462
469
463 class ResponseFilter(object):
470 class ResponseFilter(object):
464
471
465 def __init__(self, start_response):
472 def __init__(self, start_response):
466 self._start_response = start_response
473 self._start_response = start_response
467
474
468 def __call__(self, status, response_headers, exc_info=None):
475 def __call__(self, status, response_headers, exc_info=None):
469 headers = tuple(
476 headers = tuple(
470 (h, v) for h, v in response_headers
477 (h, v) for h, v in response_headers
471 if not wsgiref.util.is_hop_by_hop(h))
478 if not wsgiref.util.is_hop_by_hop(h))
472 return self._start_response(status, headers, exc_info)
479 return self._start_response(status, headers, exc_info)
473
480
474
481
475 def main(global_config, **settings):
482 def main(global_config, **settings):
476 if MercurialFactory:
483 if MercurialFactory:
477 hgpatches.patch_largefiles_capabilities()
484 hgpatches.patch_largefiles_capabilities()
478 hgpatches.patch_subrepo_type_mapping()
485 hgpatches.patch_subrepo_type_mapping()
479 app = HTTPApplication(settings=settings, global_config=global_config)
486 app = HTTPApplication(settings=settings, global_config=global_config)
480 return app.wsgi_app()
487 return app.wsgi_app()
@@ -1,19 +1,20 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
19 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
20 BINARY_DIR = ''
@@ -1,677 +1,689 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import urllib
25 import urllib
26 import traceback
26 import traceback
27
27
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.delta
30 import svn.delta
31 import svn.diff
31 import svn.diff
32 import svn.fs
32 import svn.fs
33 import svn.repos
33 import svn.repos
34
34
35 from vcsserver import svn_diff, exceptions, subprocessio
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = set([
44 'pre-1.4-compatible',
44 'pre-1.4-compatible',
45 'pre-1.5-compatible',
45 'pre-1.5-compatible',
46 'pre-1.6-compatible',
46 'pre-1.6-compatible',
47 'pre-1.8-compatible',
47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
48 'pre-1.9-compatible',
49 ])
49 ])
50
50
51 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
52 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
53 'pre-1.5-compatible': '1.4',
53 'pre-1.5-compatible': '1.4',
54 'pre-1.6-compatible': '1.5',
54 'pre-1.6-compatible': '1.5',
55 'pre-1.8-compatible': '1.7',
55 'pre-1.8-compatible': '1.7',
56 'pre-1.9-compatible': '1.8',
56 'pre-1.9-compatible': '1.8',
57 }
57 }
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in hg remote call")
68 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74
74
75 def _create_repo(self, wire, create, compatible_version):
75 def _create_repo(self, wire, create, compatible_version):
76 path = svn.core.svn_path_canonicalize(wire['path'])
76 path = svn.core.svn_path_canonicalize(wire['path'])
77 if create:
77 if create:
78 fs_config = {'compatible-version': '1.9'}
78 fs_config = {'compatible-version': '1.9'}
79 if compatible_version:
79 if compatible_version:
80 if compatible_version not in svn_compatible_versions:
80 if compatible_version not in svn_compatible_versions:
81 raise Exception('Unknown SVN compatible version "{}"'
81 raise Exception('Unknown SVN compatible version "{}"'
82 .format(compatible_version))
82 .format(compatible_version))
83 fs_config['compatible-version'] = \
83 fs_config['compatible-version'] = \
84 svn_compatible_versions_map[compatible_version]
84 svn_compatible_versions_map[compatible_version]
85
85
86 log.debug('Create SVN repo with config "%s"', fs_config)
86 log.debug('Create SVN repo with config "%s"', fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
88 else:
88 else:
89 repo = svn.repos.open(path)
89 repo = svn.repos.open(path)
90
90
91 log.debug('Got SVN object: %s', repo)
91 log.debug('Got SVN object: %s', repo)
92 return repo
92 return repo
93
93
94 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
95 def create_new_repo():
96 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
97
97
98 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
99
99
100
100
101 NODE_TYPE_MAPPING = {
101 NODE_TYPE_MAPPING = {
102 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_dir: 'dir',
103 svn.core.svn_node_dir: 'dir',
104 }
104 }
105
105
106
106
107 class SvnRemote(object):
107 class SvnRemote(object):
108
108
109 def __init__(self, factory, hg_factory=None):
109 def __init__(self, factory, hg_factory=None):
110 self._factory = factory
110 self._factory = factory
111 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # for subversion
112 # for subversion
113 self._hg_factory = hg_factory
113 self._hg_factory = hg_factory
114
114
115 @reraise_safe_exceptions
115 @reraise_safe_exceptions
116 def discover_svn_version(self):
116 def discover_svn_version(self):
117 try:
117 try:
118 import svn.core
118 import svn.core
119 svn_ver = svn.core.SVN_VERSION
119 svn_ver = svn.core.SVN_VERSION
120 except ImportError:
120 except ImportError:
121 svn_ver = None
121 svn_ver = None
122 return svn_ver
122 return svn_ver
123
123
124 def check_url(self, url, config_items):
124 def check_url(self, url, config_items):
125 # this can throw exception if not installed, but we detect this
125 # this can throw exception if not installed, but we detect this
126 from hgsubversion import svnrepo
126 from hgsubversion import svnrepo
127
127
128 baseui = self._hg_factory._create_config(config_items)
128 baseui = self._hg_factory._create_config(config_items)
129 # uuid function get's only valid UUID from proper repo, else
129 # uuid function get's only valid UUID from proper repo, else
130 # throws exception
130 # throws exception
131 try:
131 try:
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 except Exception:
133 except Exception:
134 tb = traceback.format_exc()
134 tb = traceback.format_exc()
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 raise URLError(
136 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
138 return True
139
139
140 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
141
141
142 # NOTE(marcink): short circuit the check for SVN repo
142 # NOTE(marcink): short circuit the check for SVN repo
143 # the repos.open might be expensive to check, but we have one cheap
143 # the repos.open might be expensive to check, but we have one cheap
144 # pre condition that we can use, to check for 'format' file
144 # pre condition that we can use, to check for 'format' file
145
145
146 if not os.path.isfile(os.path.join(path, 'format')):
146 if not os.path.isfile(os.path.join(path, 'format')):
147 return False
147 return False
148
148
149 try:
149 try:
150 svn.repos.open(path)
150 svn.repos.open(path)
151 except svn.core.SubversionException:
151 except svn.core.SubversionException:
152 tb = traceback.format_exc()
152 tb = traceback.format_exc()
153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 return False
154 return False
155 return True
155 return True
156
156
157 @reraise_safe_exceptions
157 @reraise_safe_exceptions
158 def verify(self, wire,):
158 def verify(self, wire,):
159 repo_path = wire['path']
159 repo_path = wire['path']
160 if not self.is_path_valid_repository(wire, repo_path):
160 if not self.is_path_valid_repository(wire, repo_path):
161 raise Exception(
161 raise Exception(
162 "Path %s is not a valid Subversion repository." % repo_path)
162 "Path %s is not a valid Subversion repository." % repo_path)
163
163
164 cmd = ['svnadmin', 'info', repo_path]
164 cmd = ['svnadmin', 'info', repo_path]
165 stdout, stderr = subprocessio.run_command(cmd)
165 stdout, stderr = subprocessio.run_command(cmd)
166 return stdout
166 return stdout
167
167
168 def lookup(self, wire, revision):
168 def lookup(self, wire, revision):
169 if revision not in [-1, None, 'HEAD']:
169 if revision not in [-1, None, 'HEAD']:
170 raise NotImplementedError
170 raise NotImplementedError
171 repo = self._factory.repo(wire)
171 repo = self._factory.repo(wire)
172 fs_ptr = svn.repos.fs(repo)
172 fs_ptr = svn.repos.fs(repo)
173 head = svn.fs.youngest_rev(fs_ptr)
173 head = svn.fs.youngest_rev(fs_ptr)
174 return head
174 return head
175
175
176 def lookup_interval(self, wire, start_ts, end_ts):
176 def lookup_interval(self, wire, start_ts, end_ts):
177 repo = self._factory.repo(wire)
177 repo = self._factory.repo(wire)
178 fsobj = svn.repos.fs(repo)
178 fsobj = svn.repos.fs(repo)
179 start_rev = None
179 start_rev = None
180 end_rev = None
180 end_rev = None
181 if start_ts:
181 if start_ts:
182 start_ts_svn = apr_time_t(start_ts)
182 start_ts_svn = apr_time_t(start_ts)
183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 else:
184 else:
185 start_rev = 1
185 start_rev = 1
186 if end_ts:
186 if end_ts:
187 end_ts_svn = apr_time_t(end_ts)
187 end_ts_svn = apr_time_t(end_ts)
188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 else:
189 else:
190 end_rev = svn.fs.youngest_rev(fsobj)
190 end_rev = svn.fs.youngest_rev(fsobj)
191 return start_rev, end_rev
191 return start_rev, end_rev
192
192
193 def revision_properties(self, wire, revision):
193 def revision_properties(self, wire, revision):
194 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
195 fs_ptr = svn.repos.fs(repo)
195 fs_ptr = svn.repos.fs(repo)
196 return svn.fs.revision_proplist(fs_ptr, revision)
196 return svn.fs.revision_proplist(fs_ptr, revision)
197
197
198 def revision_changes(self, wire, revision):
198 def revision_changes(self, wire, revision):
199
199
200 repo = self._factory.repo(wire)
200 repo = self._factory.repo(wire)
201 fsobj = svn.repos.fs(repo)
201 fsobj = svn.repos.fs(repo)
202 rev_root = svn.fs.revision_root(fsobj, revision)
202 rev_root = svn.fs.revision_root(fsobj, revision)
203
203
204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 base_dir = ""
206 base_dir = ""
207 send_deltas = False
207 send_deltas = False
208 svn.repos.replay2(
208 svn.repos.replay2(
209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 editor_ptr, editor_baton, None)
210 editor_ptr, editor_baton, None)
211
211
212 added = []
212 added = []
213 changed = []
213 changed = []
214 removed = []
214 removed = []
215
215
216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 for path, change in editor.changes.iteritems():
217 for path, change in editor.changes.iteritems():
218 # TODO: Decide what to do with directory nodes. Subversion can add
218 # TODO: Decide what to do with directory nodes. Subversion can add
219 # empty directories.
219 # empty directories.
220
220
221 if change.item_kind == svn.core.svn_node_dir:
221 if change.item_kind == svn.core.svn_node_dir:
222 continue
222 continue
223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 added.append(path)
224 added.append(path)
225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 svn.repos.CHANGE_ACTION_REPLACE]:
226 svn.repos.CHANGE_ACTION_REPLACE]:
227 changed.append(path)
227 changed.append(path)
228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 removed.append(path)
229 removed.append(path)
230 else:
230 else:
231 raise NotImplementedError(
231 raise NotImplementedError(
232 "Action %s not supported on path %s" % (
232 "Action %s not supported on path %s" % (
233 change.action, path))
233 change.action, path))
234
234
235 changes = {
235 changes = {
236 'added': added,
236 'added': added,
237 'changed': changed,
237 'changed': changed,
238 'removed': removed,
238 'removed': removed,
239 }
239 }
240 return changes
240 return changes
241
241
242 def node_history(self, wire, path, revision, limit):
242 def node_history(self, wire, path, revision, limit):
243 cross_copies = False
243 cross_copies = False
244 repo = self._factory.repo(wire)
244 repo = self._factory.repo(wire)
245 fsobj = svn.repos.fs(repo)
245 fsobj = svn.repos.fs(repo)
246 rev_root = svn.fs.revision_root(fsobj, revision)
246 rev_root = svn.fs.revision_root(fsobj, revision)
247
247
248 history_revisions = []
248 history_revisions = []
249 history = svn.fs.node_history(rev_root, path)
249 history = svn.fs.node_history(rev_root, path)
250 history = svn.fs.history_prev(history, cross_copies)
250 history = svn.fs.history_prev(history, cross_copies)
251 while history:
251 while history:
252 __, node_revision = svn.fs.history_location(history)
252 __, node_revision = svn.fs.history_location(history)
253 history_revisions.append(node_revision)
253 history_revisions.append(node_revision)
254 if limit and len(history_revisions) >= limit:
254 if limit and len(history_revisions) >= limit:
255 break
255 break
256 history = svn.fs.history_prev(history, cross_copies)
256 history = svn.fs.history_prev(history, cross_copies)
257 return history_revisions
257 return history_revisions
258
258
259 def node_properties(self, wire, path, revision):
259 def node_properties(self, wire, path, revision):
260 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
261 fsobj = svn.repos.fs(repo)
261 fsobj = svn.repos.fs(repo)
262 rev_root = svn.fs.revision_root(fsobj, revision)
262 rev_root = svn.fs.revision_root(fsobj, revision)
263 return svn.fs.node_proplist(rev_root, path)
263 return svn.fs.node_proplist(rev_root, path)
264
264
265 def file_annotate(self, wire, path, revision):
265 def file_annotate(self, wire, path, revision):
266 abs_path = 'file://' + urllib.pathname2url(
266 abs_path = 'file://' + urllib.pathname2url(
267 vcspath.join(wire['path'], path))
267 vcspath.join(wire['path'], path))
268 file_uri = svn.core.svn_path_canonicalize(abs_path)
268 file_uri = svn.core.svn_path_canonicalize(abs_path)
269
269
270 start_rev = svn_opt_revision_value_t(0)
270 start_rev = svn_opt_revision_value_t(0)
271 peg_rev = svn_opt_revision_value_t(revision)
271 peg_rev = svn_opt_revision_value_t(revision)
272 end_rev = peg_rev
272 end_rev = peg_rev
273
273
274 annotations = []
274 annotations = []
275
275
276 def receiver(line_no, revision, author, date, line, pool):
276 def receiver(line_no, revision, author, date, line, pool):
277 annotations.append((line_no, revision, line))
277 annotations.append((line_no, revision, line))
278
278
279 # TODO: Cannot use blame5, missing typemap function in the swig code
279 # TODO: Cannot use blame5, missing typemap function in the swig code
280 try:
280 try:
281 svn.client.blame2(
281 svn.client.blame2(
282 file_uri, peg_rev, start_rev, end_rev,
282 file_uri, peg_rev, start_rev, end_rev,
283 receiver, svn.client.create_context())
283 receiver, svn.client.create_context())
284 except svn.core.SubversionException as exc:
284 except svn.core.SubversionException as exc:
285 log.exception("Error during blame operation.")
285 log.exception("Error during blame operation.")
286 raise Exception(
286 raise Exception(
287 "Blame not supported or file does not exist at path %s. "
287 "Blame not supported or file does not exist at path %s. "
288 "Error %s." % (path, exc))
288 "Error %s." % (path, exc))
289
289
290 return annotations
290 return annotations
291
291
292 def get_node_type(self, wire, path, rev=None):
292 def get_node_type(self, wire, path, rev=None):
293 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
294 fs_ptr = svn.repos.fs(repo)
294 fs_ptr = svn.repos.fs(repo)
295 if rev is None:
295 if rev is None:
296 rev = svn.fs.youngest_rev(fs_ptr)
296 rev = svn.fs.youngest_rev(fs_ptr)
297 root = svn.fs.revision_root(fs_ptr, rev)
297 root = svn.fs.revision_root(fs_ptr, rev)
298 node = svn.fs.check_path(root, path)
298 node = svn.fs.check_path(root, path)
299 return NODE_TYPE_MAPPING.get(node, None)
299 return NODE_TYPE_MAPPING.get(node, None)
300
300
301 def get_nodes(self, wire, path, revision=None):
301 def get_nodes(self, wire, path, revision=None):
302 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
303 fsobj = svn.repos.fs(repo)
303 fsobj = svn.repos.fs(repo)
304 if revision is None:
304 if revision is None:
305 revision = svn.fs.youngest_rev(fsobj)
305 revision = svn.fs.youngest_rev(fsobj)
306 root = svn.fs.revision_root(fsobj, revision)
306 root = svn.fs.revision_root(fsobj, revision)
307 entries = svn.fs.dir_entries(root, path)
307 entries = svn.fs.dir_entries(root, path)
308 result = []
308 result = []
309 for entry_path, entry_info in entries.iteritems():
309 for entry_path, entry_info in entries.iteritems():
310 result.append(
310 result.append(
311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 return result
312 return result
313
313
314 def get_file_content(self, wire, path, rev=None):
314 def get_file_content(self, wire, path, rev=None):
315 repo = self._factory.repo(wire)
315 repo = self._factory.repo(wire)
316 fsobj = svn.repos.fs(repo)
316 fsobj = svn.repos.fs(repo)
317 if rev is None:
317 if rev is None:
318 rev = svn.fs.youngest_revision(fsobj)
318 rev = svn.fs.youngest_revision(fsobj)
319 root = svn.fs.revision_root(fsobj, rev)
319 root = svn.fs.revision_root(fsobj, rev)
320 content = svn.core.Stream(svn.fs.file_contents(root, path))
320 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 return content.read()
321 return content.read()
322
322
323 def get_file_size(self, wire, path, revision=None):
323 def get_file_size(self, wire, path, revision=None):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 fsobj = svn.repos.fs(repo)
325 fsobj = svn.repos.fs(repo)
326 if revision is None:
326 if revision is None:
327 revision = svn.fs.youngest_revision(fsobj)
327 revision = svn.fs.youngest_revision(fsobj)
328 root = svn.fs.revision_root(fsobj, revision)
328 root = svn.fs.revision_root(fsobj, revision)
329 size = svn.fs.file_length(root, path)
329 size = svn.fs.file_length(root, path)
330 return size
330 return size
331
331
332 def create_repository(self, wire, compatible_version=None):
332 def create_repository(self, wire, compatible_version=None):
333 log.info('Creating Subversion repository in path "%s"', wire['path'])
333 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 self._factory.repo(wire, create=True,
334 self._factory.repo(wire, create=True,
335 compatible_version=compatible_version)
335 compatible_version=compatible_version)
336
336
337 def import_remote_repository(self, wire, src_url):
337 def import_remote_repository(self, wire, src_url):
338 repo_path = wire['path']
338 repo_path = wire['path']
339 if not self.is_path_valid_repository(wire, repo_path):
339 if not self.is_path_valid_repository(wire, repo_path):
340 raise Exception(
340 raise Exception(
341 "Path %s is not a valid Subversion repository." % repo_path)
341 "Path %s is not a valid Subversion repository." % repo_path)
342
342
343 # TODO: johbo: URL checks ?
343 # TODO: johbo: URL checks ?
344 import subprocess
344 import subprocess
345 rdump = subprocess.Popen(
345 rdump = subprocess.Popen(
346 ['svnrdump', 'dump', '--non-interactive', src_url],
346 ['svnrdump', 'dump', '--non-interactive', src_url],
347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 load = subprocess.Popen(
348 load = subprocess.Popen(
349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350
350
351 # TODO: johbo: This can be a very long operation, might be better
351 # TODO: johbo: This can be a very long operation, might be better
352 # to track some kind of status and provide an api to check if the
352 # to track some kind of status and provide an api to check if the
353 # import is done.
353 # import is done.
354 rdump.wait()
354 rdump.wait()
355 load.wait()
355 load.wait()
356
356
357 if rdump.returncode != 0:
357 if rdump.returncode != 0:
358 errors = rdump.stderr.read()
358 errors = rdump.stderr.read()
359 log.error('svnrdump dump failed: statuscode %s: message: %s',
359 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 rdump.returncode, errors)
360 rdump.returncode, errors)
361 reason = 'UNKNOWN'
361 reason = 'UNKNOWN'
362 if 'svnrdump: E230001:' in errors:
362 if 'svnrdump: E230001:' in errors:
363 reason = 'INVALID_CERTIFICATE'
363 reason = 'INVALID_CERTIFICATE'
364 raise Exception(
364 raise Exception(
365 'Failed to dump the remote repository from %s.' % src_url,
365 'Failed to dump the remote repository from %s.' % src_url,
366 reason)
366 reason)
367 if load.returncode != 0:
367 if load.returncode != 0:
368 raise Exception(
368 raise Exception(
369 'Failed to load the dump of remote repository from %s.' %
369 'Failed to load the dump of remote repository from %s.' %
370 (src_url, ))
370 (src_url, ))
371
371
372 def commit(self, wire, message, author, timestamp, updated, removed):
372 def commit(self, wire, message, author, timestamp, updated, removed):
373 assert isinstance(message, str)
373 assert isinstance(message, str)
374 assert isinstance(author, str)
374 assert isinstance(author, str)
375
375
376 repo = self._factory.repo(wire)
376 repo = self._factory.repo(wire)
377 fsobj = svn.repos.fs(repo)
377 fsobj = svn.repos.fs(repo)
378
378
379 rev = svn.fs.youngest_rev(fsobj)
379 rev = svn.fs.youngest_rev(fsobj)
380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 txn_root = svn.fs.txn_root(txn)
381 txn_root = svn.fs.txn_root(txn)
382
382
383 for node in updated:
383 for node in updated:
384 TxnNodeProcessor(node, txn_root).update()
384 TxnNodeProcessor(node, txn_root).update()
385 for node in removed:
385 for node in removed:
386 TxnNodeProcessor(node, txn_root).remove()
386 TxnNodeProcessor(node, txn_root).remove()
387
387
388 commit_id = svn.repos.fs_commit_txn(repo, txn)
388 commit_id = svn.repos.fs_commit_txn(repo, txn)
389
389
390 if timestamp:
390 if timestamp:
391 apr_time = apr_time_t(timestamp)
391 apr_time = apr_time_t(timestamp)
392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394
394
395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 return commit_id
396 return commit_id
397
397
398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 ignore_whitespace=False, context=3):
399 ignore_whitespace=False, context=3):
400
400
401 wire.update(cache=False)
401 wire.update(cache=False)
402 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
403 diff_creator = SvnDiffer(
403 diff_creator = SvnDiffer(
404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 try:
405 try:
406 return diff_creator.generate_diff()
406 return diff_creator.generate_diff()
407 except svn.core.SubversionException as e:
407 except svn.core.SubversionException as e:
408 log.exception(
408 log.exception(
409 "Error during diff operation operation. "
409 "Error during diff operation operation. "
410 "Path might not exist %s, %s" % (path1, path2))
410 "Path might not exist %s, %s" % (path1, path2))
411 return ""
411 return ""
412
412
413 @reraise_safe_exceptions
413 @reraise_safe_exceptions
414 def is_large_file(self, wire, path):
414 def is_large_file(self, wire, path):
415 return False
415 return False
416
416
417 @reraise_safe_exceptions
418 def install_hooks(self, wire, force=False):
419 from vcsserver.hook_utils import install_svn_hooks
420 repo_path = wire['path']
421 binary_dir = settings.BINARY_DIR
422 executable = None
423 if binary_dir:
424 executable = os.path.join(binary_dir, 'python')
425 return install_svn_hooks(
426 repo_path, executable=executable, force_create=force)
427
417
428
418 class SvnDiffer(object):
429 class SvnDiffer(object):
419 """
430 """
420 Utility to create diffs based on difflib and the Subversion api
431 Utility to create diffs based on difflib and the Subversion api
421 """
432 """
422
433
423 binary_content = False
434 binary_content = False
424
435
425 def __init__(
436 def __init__(
426 self, repo, src_rev, src_path, tgt_rev, tgt_path,
437 self, repo, src_rev, src_path, tgt_rev, tgt_path,
427 ignore_whitespace, context):
438 ignore_whitespace, context):
428 self.repo = repo
439 self.repo = repo
429 self.ignore_whitespace = ignore_whitespace
440 self.ignore_whitespace = ignore_whitespace
430 self.context = context
441 self.context = context
431
442
432 fsobj = svn.repos.fs(repo)
443 fsobj = svn.repos.fs(repo)
433
444
434 self.tgt_rev = tgt_rev
445 self.tgt_rev = tgt_rev
435 self.tgt_path = tgt_path or ''
446 self.tgt_path = tgt_path or ''
436 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
447 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
437 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
448 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
438
449
439 self.src_rev = src_rev
450 self.src_rev = src_rev
440 self.src_path = src_path or self.tgt_path
451 self.src_path = src_path or self.tgt_path
441 self.src_root = svn.fs.revision_root(fsobj, src_rev)
452 self.src_root = svn.fs.revision_root(fsobj, src_rev)
442 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
453 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
443
454
444 self._validate()
455 self._validate()
445
456
446 def _validate(self):
457 def _validate(self):
447 if (self.tgt_kind != svn.core.svn_node_none and
458 if (self.tgt_kind != svn.core.svn_node_none and
448 self.src_kind != svn.core.svn_node_none and
459 self.src_kind != svn.core.svn_node_none and
449 self.src_kind != self.tgt_kind):
460 self.src_kind != self.tgt_kind):
450 # TODO: johbo: proper error handling
461 # TODO: johbo: proper error handling
451 raise Exception(
462 raise Exception(
452 "Source and target are not compatible for diff generation. "
463 "Source and target are not compatible for diff generation. "
453 "Source type: %s, target type: %s" %
464 "Source type: %s, target type: %s" %
454 (self.src_kind, self.tgt_kind))
465 (self.src_kind, self.tgt_kind))
455
466
456 def generate_diff(self):
467 def generate_diff(self):
457 buf = StringIO.StringIO()
468 buf = StringIO.StringIO()
458 if self.tgt_kind == svn.core.svn_node_dir:
469 if self.tgt_kind == svn.core.svn_node_dir:
459 self._generate_dir_diff(buf)
470 self._generate_dir_diff(buf)
460 else:
471 else:
461 self._generate_file_diff(buf)
472 self._generate_file_diff(buf)
462 return buf.getvalue()
473 return buf.getvalue()
463
474
464 def _generate_dir_diff(self, buf):
475 def _generate_dir_diff(self, buf):
465 editor = DiffChangeEditor()
476 editor = DiffChangeEditor()
466 editor_ptr, editor_baton = svn.delta.make_editor(editor)
477 editor_ptr, editor_baton = svn.delta.make_editor(editor)
467 svn.repos.dir_delta2(
478 svn.repos.dir_delta2(
468 self.src_root,
479 self.src_root,
469 self.src_path,
480 self.src_path,
470 '', # src_entry
481 '', # src_entry
471 self.tgt_root,
482 self.tgt_root,
472 self.tgt_path,
483 self.tgt_path,
473 editor_ptr, editor_baton,
484 editor_ptr, editor_baton,
474 authorization_callback_allow_all,
485 authorization_callback_allow_all,
475 False, # text_deltas
486 False, # text_deltas
476 svn.core.svn_depth_infinity, # depth
487 svn.core.svn_depth_infinity, # depth
477 False, # entry_props
488 False, # entry_props
478 False, # ignore_ancestry
489 False, # ignore_ancestry
479 )
490 )
480
491
481 for path, __, change in sorted(editor.changes):
492 for path, __, change in sorted(editor.changes):
482 self._generate_node_diff(
493 self._generate_node_diff(
483 buf, change, path, self.tgt_path, path, self.src_path)
494 buf, change, path, self.tgt_path, path, self.src_path)
484
495
485 def _generate_file_diff(self, buf):
496 def _generate_file_diff(self, buf):
486 change = None
497 change = None
487 if self.src_kind == svn.core.svn_node_none:
498 if self.src_kind == svn.core.svn_node_none:
488 change = "add"
499 change = "add"
489 elif self.tgt_kind == svn.core.svn_node_none:
500 elif self.tgt_kind == svn.core.svn_node_none:
490 change = "delete"
501 change = "delete"
491 tgt_base, tgt_path = vcspath.split(self.tgt_path)
502 tgt_base, tgt_path = vcspath.split(self.tgt_path)
492 src_base, src_path = vcspath.split(self.src_path)
503 src_base, src_path = vcspath.split(self.src_path)
493 self._generate_node_diff(
504 self._generate_node_diff(
494 buf, change, tgt_path, tgt_base, src_path, src_base)
505 buf, change, tgt_path, tgt_base, src_path, src_base)
495
506
496 def _generate_node_diff(
507 def _generate_node_diff(
497 self, buf, change, tgt_path, tgt_base, src_path, src_base):
508 self, buf, change, tgt_path, tgt_base, src_path, src_base):
498
509
499 if self.src_rev == self.tgt_rev and tgt_base == src_base:
510 if self.src_rev == self.tgt_rev and tgt_base == src_base:
500 # makes consistent behaviour with git/hg to return empty diff if
511 # makes consistent behaviour with git/hg to return empty diff if
501 # we compare same revisions
512 # we compare same revisions
502 return
513 return
503
514
504 tgt_full_path = vcspath.join(tgt_base, tgt_path)
515 tgt_full_path = vcspath.join(tgt_base, tgt_path)
505 src_full_path = vcspath.join(src_base, src_path)
516 src_full_path = vcspath.join(src_base, src_path)
506
517
507 self.binary_content = False
518 self.binary_content = False
508 mime_type = self._get_mime_type(tgt_full_path)
519 mime_type = self._get_mime_type(tgt_full_path)
509
520
510 if mime_type and not mime_type.startswith('text'):
521 if mime_type and not mime_type.startswith('text'):
511 self.binary_content = True
522 self.binary_content = True
512 buf.write("=" * 67 + '\n')
523 buf.write("=" * 67 + '\n')
513 buf.write("Cannot display: file marked as a binary type.\n")
524 buf.write("Cannot display: file marked as a binary type.\n")
514 buf.write("svn:mime-type = %s\n" % mime_type)
525 buf.write("svn:mime-type = %s\n" % mime_type)
515 buf.write("Index: %s\n" % (tgt_path, ))
526 buf.write("Index: %s\n" % (tgt_path, ))
516 buf.write("=" * 67 + '\n')
527 buf.write("=" * 67 + '\n')
517 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
528 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
518 'tgt_path': tgt_path})
529 'tgt_path': tgt_path})
519
530
520 if change == 'add':
531 if change == 'add':
521 # TODO: johbo: SVN is missing a zero here compared to git
532 # TODO: johbo: SVN is missing a zero here compared to git
522 buf.write("new file mode 10644\n")
533 buf.write("new file mode 10644\n")
523
534
524 #TODO(marcink): intro to binary detection of svn patches
535 #TODO(marcink): intro to binary detection of svn patches
525 # if self.binary_content:
536 # if self.binary_content:
526 # buf.write('GIT binary patch\n')
537 # buf.write('GIT binary patch\n')
527
538
528 buf.write("--- /dev/null\t(revision 0)\n")
539 buf.write("--- /dev/null\t(revision 0)\n")
529 src_lines = []
540 src_lines = []
530 else:
541 else:
531 if change == 'delete':
542 if change == 'delete':
532 buf.write("deleted file mode 10644\n")
543 buf.write("deleted file mode 10644\n")
533
544
534 #TODO(marcink): intro to binary detection of svn patches
545 #TODO(marcink): intro to binary detection of svn patches
535 # if self.binary_content:
546 # if self.binary_content:
536 # buf.write('GIT binary patch\n')
547 # buf.write('GIT binary patch\n')
537
548
538 buf.write("--- a/%s\t(revision %s)\n" % (
549 buf.write("--- a/%s\t(revision %s)\n" % (
539 src_path, self.src_rev))
550 src_path, self.src_rev))
540 src_lines = self._svn_readlines(self.src_root, src_full_path)
551 src_lines = self._svn_readlines(self.src_root, src_full_path)
541
552
542 if change == 'delete':
553 if change == 'delete':
543 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
554 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
544 tgt_lines = []
555 tgt_lines = []
545 else:
556 else:
546 buf.write("+++ b/%s\t(revision %s)\n" % (
557 buf.write("+++ b/%s\t(revision %s)\n" % (
547 tgt_path, self.tgt_rev))
558 tgt_path, self.tgt_rev))
548 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
559 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
549
560
550 if not self.binary_content:
561 if not self.binary_content:
551 udiff = svn_diff.unified_diff(
562 udiff = svn_diff.unified_diff(
552 src_lines, tgt_lines, context=self.context,
563 src_lines, tgt_lines, context=self.context,
553 ignore_blank_lines=self.ignore_whitespace,
564 ignore_blank_lines=self.ignore_whitespace,
554 ignore_case=False,
565 ignore_case=False,
555 ignore_space_changes=self.ignore_whitespace)
566 ignore_space_changes=self.ignore_whitespace)
556 buf.writelines(udiff)
567 buf.writelines(udiff)
557
568
558 def _get_mime_type(self, path):
569 def _get_mime_type(self, path):
559 try:
570 try:
560 mime_type = svn.fs.node_prop(
571 mime_type = svn.fs.node_prop(
561 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
572 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
562 except svn.core.SubversionException:
573 except svn.core.SubversionException:
563 mime_type = svn.fs.node_prop(
574 mime_type = svn.fs.node_prop(
564 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
575 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
565 return mime_type
576 return mime_type
566
577
567 def _svn_readlines(self, fs_root, node_path):
578 def _svn_readlines(self, fs_root, node_path):
568 if self.binary_content:
579 if self.binary_content:
569 return []
580 return []
570 node_kind = svn.fs.check_path(fs_root, node_path)
581 node_kind = svn.fs.check_path(fs_root, node_path)
571 if node_kind not in (
582 if node_kind not in (
572 svn.core.svn_node_file, svn.core.svn_node_symlink):
583 svn.core.svn_node_file, svn.core.svn_node_symlink):
573 return []
584 return []
574 content = svn.core.Stream(
585 content = svn.core.Stream(
575 svn.fs.file_contents(fs_root, node_path)).read()
586 svn.fs.file_contents(fs_root, node_path)).read()
576 return content.splitlines(True)
587 return content.splitlines(True)
577
588
578
589
590
579 class DiffChangeEditor(svn.delta.Editor):
591 class DiffChangeEditor(svn.delta.Editor):
580 """
592 """
581 Records changes between two given revisions
593 Records changes between two given revisions
582 """
594 """
583
595
584 def __init__(self):
596 def __init__(self):
585 self.changes = []
597 self.changes = []
586
598
587 def delete_entry(self, path, revision, parent_baton, pool=None):
599 def delete_entry(self, path, revision, parent_baton, pool=None):
588 self.changes.append((path, None, 'delete'))
600 self.changes.append((path, None, 'delete'))
589
601
590 def add_file(
602 def add_file(
591 self, path, parent_baton, copyfrom_path, copyfrom_revision,
603 self, path, parent_baton, copyfrom_path, copyfrom_revision,
592 file_pool=None):
604 file_pool=None):
593 self.changes.append((path, 'file', 'add'))
605 self.changes.append((path, 'file', 'add'))
594
606
595 def open_file(self, path, parent_baton, base_revision, file_pool=None):
607 def open_file(self, path, parent_baton, base_revision, file_pool=None):
596 self.changes.append((path, 'file', 'change'))
608 self.changes.append((path, 'file', 'change'))
597
609
598
610
599 def authorization_callback_allow_all(root, path, pool):
611 def authorization_callback_allow_all(root, path, pool):
600 return True
612 return True
601
613
602
614
603 class TxnNodeProcessor(object):
615 class TxnNodeProcessor(object):
604 """
616 """
605 Utility to process the change of one node within a transaction root.
617 Utility to process the change of one node within a transaction root.
606
618
607 It encapsulates the knowledge of how to add, update or remove
619 It encapsulates the knowledge of how to add, update or remove
608 a node for a given transaction root. The purpose is to support the method
620 a node for a given transaction root. The purpose is to support the method
609 `SvnRemote.commit`.
621 `SvnRemote.commit`.
610 """
622 """
611
623
612 def __init__(self, node, txn_root):
624 def __init__(self, node, txn_root):
613 assert isinstance(node['path'], str)
625 assert isinstance(node['path'], str)
614
626
615 self.node = node
627 self.node = node
616 self.txn_root = txn_root
628 self.txn_root = txn_root
617
629
618 def update(self):
630 def update(self):
619 self._ensure_parent_dirs()
631 self._ensure_parent_dirs()
620 self._add_file_if_node_does_not_exist()
632 self._add_file_if_node_does_not_exist()
621 self._update_file_content()
633 self._update_file_content()
622 self._update_file_properties()
634 self._update_file_properties()
623
635
624 def remove(self):
636 def remove(self):
625 svn.fs.delete(self.txn_root, self.node['path'])
637 svn.fs.delete(self.txn_root, self.node['path'])
626 # TODO: Clean up directory if empty
638 # TODO: Clean up directory if empty
627
639
628 def _ensure_parent_dirs(self):
640 def _ensure_parent_dirs(self):
629 curdir = vcspath.dirname(self.node['path'])
641 curdir = vcspath.dirname(self.node['path'])
630 dirs_to_create = []
642 dirs_to_create = []
631 while not self._svn_path_exists(curdir):
643 while not self._svn_path_exists(curdir):
632 dirs_to_create.append(curdir)
644 dirs_to_create.append(curdir)
633 curdir = vcspath.dirname(curdir)
645 curdir = vcspath.dirname(curdir)
634
646
635 for curdir in reversed(dirs_to_create):
647 for curdir in reversed(dirs_to_create):
636 log.debug('Creating missing directory "%s"', curdir)
648 log.debug('Creating missing directory "%s"', curdir)
637 svn.fs.make_dir(self.txn_root, curdir)
649 svn.fs.make_dir(self.txn_root, curdir)
638
650
639 def _svn_path_exists(self, path):
651 def _svn_path_exists(self, path):
640 path_status = svn.fs.check_path(self.txn_root, path)
652 path_status = svn.fs.check_path(self.txn_root, path)
641 return path_status != svn.core.svn_node_none
653 return path_status != svn.core.svn_node_none
642
654
643 def _add_file_if_node_does_not_exist(self):
655 def _add_file_if_node_does_not_exist(self):
644 kind = svn.fs.check_path(self.txn_root, self.node['path'])
656 kind = svn.fs.check_path(self.txn_root, self.node['path'])
645 if kind == svn.core.svn_node_none:
657 if kind == svn.core.svn_node_none:
646 svn.fs.make_file(self.txn_root, self.node['path'])
658 svn.fs.make_file(self.txn_root, self.node['path'])
647
659
648 def _update_file_content(self):
660 def _update_file_content(self):
649 assert isinstance(self.node['content'], str)
661 assert isinstance(self.node['content'], str)
650 handler, baton = svn.fs.apply_textdelta(
662 handler, baton = svn.fs.apply_textdelta(
651 self.txn_root, self.node['path'], None, None)
663 self.txn_root, self.node['path'], None, None)
652 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
664 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
653
665
654 def _update_file_properties(self):
666 def _update_file_properties(self):
655 properties = self.node.get('properties', {})
667 properties = self.node.get('properties', {})
656 for key, value in properties.iteritems():
668 for key, value in properties.iteritems():
657 svn.fs.change_node_prop(
669 svn.fs.change_node_prop(
658 self.txn_root, self.node['path'], key, value)
670 self.txn_root, self.node['path'], key, value)
659
671
660
672
661 def apr_time_t(timestamp):
673 def apr_time_t(timestamp):
662 """
674 """
663 Convert a Python timestamp into APR timestamp type apr_time_t
675 Convert a Python timestamp into APR timestamp type apr_time_t
664 """
676 """
665 return timestamp * 1E6
677 return timestamp * 1E6
666
678
667
679
668 def svn_opt_revision_value_t(num):
680 def svn_opt_revision_value_t(num):
669 """
681 """
670 Put `num` into a `svn_opt_revision_value_t` structure.
682 Put `num` into a `svn_opt_revision_value_t` structure.
671 """
683 """
672 value = svn.core.svn_opt_revision_value_t()
684 value = svn.core.svn_opt_revision_value_t()
673 value.number = num
685 value.number = num
674 revision = svn.core.svn_opt_revision_t()
686 revision = svn.core.svn_opt_revision_t()
675 revision.kind = svn.core.svn_opt_revision_number
687 revision.kind = svn.core.svn_opt_revision_number
676 revision.value = value
688 revision.value = value
677 return revision
689 return revision
@@ -1,57 +1,58 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print 'Using vcsserver port %s' % (port, )
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
58
@@ -1,71 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class ContextINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
61 key, val = ini_params.items()[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
72
73
74 def no_newline_id_generator(test_name):
75 """
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
78 """
79 org_name = test_name
80 test_name = test_name\
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
85
86 return test_name or 'test-with-empty-name'
@@ -1,75 +1,82 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18
18
19 log = logging.getLogger(__name__)
19 log = logging.getLogger(__name__)
20
20
21
21
22 def safe_int(val, default=None):
22 def safe_int(val, default=None):
23 """
23 """
24 Returns int() of val if val is not convertable to int use default
24 Returns int() of val if val is not convertable to int use default
25 instead
25 instead
26
26
27 :param val:
27 :param val:
28 :param default:
28 :param default:
29 """
29 """
30
30
31 try:
31 try:
32 val = int(val)
32 val = int(val)
33 except (ValueError, TypeError):
33 except (ValueError, TypeError):
34 val = default
34 val = default
35
35
36 return val
36 return val
37
37
38
38
39 def safe_str(unicode_, to_encoding=['utf8']):
39 def safe_str(unicode_, to_encoding=['utf8']):
40 """
40 """
41 safe str function. Does few trick to turn unicode_ into string
41 safe str function. Does few trick to turn unicode_ into string
42
42
43 In case of UnicodeEncodeError, we try to return it with encoding detected
43 In case of UnicodeEncodeError, we try to return it with encoding detected
44 by chardet library if it fails fallback to string with errors replaced
44 by chardet library if it fails fallback to string with errors replaced
45
45
46 :param unicode_: unicode to encode
46 :param unicode_: unicode to encode
47 :rtype: str
47 :rtype: str
48 :returns: str object
48 :returns: str object
49 """
49 """
50
50
51 # if it's not basestr cast to str
51 # if it's not basestr cast to str
52 if not isinstance(unicode_, basestring):
52 if not isinstance(unicode_, basestring):
53 return str(unicode_)
53 return str(unicode_)
54
54
55 if isinstance(unicode_, str):
55 if isinstance(unicode_, str):
56 return unicode_
56 return unicode_
57
57
58 if not isinstance(to_encoding, (list, tuple)):
58 if not isinstance(to_encoding, (list, tuple)):
59 to_encoding = [to_encoding]
59 to_encoding = [to_encoding]
60
60
61 for enc in to_encoding:
61 for enc in to_encoding:
62 try:
62 try:
63 return unicode_.encode(enc)
63 return unicode_.encode(enc)
64 except UnicodeEncodeError:
64 except UnicodeEncodeError:
65 pass
65 pass
66
66
67 try:
67 try:
68 import chardet
68 import chardet
69 encoding = chardet.detect(unicode_)['encoding']
69 encoding = chardet.detect(unicode_)['encoding']
70 if encoding is None:
70 if encoding is None:
71 raise UnicodeEncodeError()
71 raise UnicodeEncodeError()
72
72
73 return unicode_.encode(encoding)
73 return unicode_.encode(encoding)
74 except (ImportError, UnicodeEncodeError):
74 except (ImportError, UnicodeEncodeError):
75 return unicode_.encode(to_encoding[0], 'replace')
75 return unicode_.encode(to_encoding[0], 'replace')
76
77
78 class AttributeDict(dict):
79 def __getattr__(self, attr):
80 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
General Comments 0
You need to be logged in to leave comments. Login now