##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r411:ce60257b merge stable
parent child Browse files
Show More
@@ -0,0 +1,20 b''
1 diff -rup Beaker-1.9.1-orig/beaker/container.py Beaker-1.9.1/beaker/container.py
2 --- Beaker-1.9.1-orig/beaker/container.py 2018-04-10 10:23:04.000000000 +0200
3 +++ Beaker-1.9.1/beaker/container.py 2018-04-10 10:23:34.000000000 +0200
4 @@ -353,13 +353,13 @@ class Value(object):
5 debug("get_value returning old value while new one is created")
6 return value
7 else:
8 - debug("lock_creatfunc (didnt wait)")
9 + debug("lock_creatfunc `%s` (didnt wait)", self.createfunc.__name__)
10 has_createlock = True
11
12 if not has_createlock:
13 - debug("lock_createfunc (waiting)")
14 + debug("lock_createfunc `%s` (waiting)", self.createfunc.__name__)
15 creation_lock.acquire()
16 - debug("lock_createfunc (waited)")
17 + debug("lock_createfunc `%s` (waited)", self.createfunc.__name__)
18
19 try:
20 # see if someone created the value already
@@ -0,0 +1,154 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20 import re
21 import os
22 import sys
23 import datetime
24 import logging
25 import pkg_resources
26
27 import vcsserver
28
29 log = logging.getLogger(__name__)
30
31
32 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
33 """
34 Creates a RhodeCode hook inside a git repository
35
36 :param repo_path: path to repository
37 :param executable: binary executable to put in the hooks
38 :param force_create: Create even if same name hook exists
39 """
40 executable = executable or sys.executable
41 hooks_path = os.path.join(repo_path, 'hooks')
42 if not bare:
43 hooks_path = os.path.join(repo_path, '.git', 'hooks')
44 if not os.path.isdir(hooks_path):
45 os.makedirs(hooks_path, mode=0777)
46
47 tmpl_post = pkg_resources.resource_string(
48 'vcsserver', '/'.join(
49 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
50 tmpl_pre = pkg_resources.resource_string(
51 'vcsserver', '/'.join(
52 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
53
54 path = '' # not used for now
55 timestamp = datetime.datetime.utcnow().isoformat()
56
57 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
58 log.debug('Installing git hook in repo %s', repo_path)
59 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
60 _rhodecode_hook = check_rhodecode_hook(_hook_file)
61
62 if _rhodecode_hook or force_create:
63 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
64 try:
65 with open(_hook_file, 'wb') as f:
66 template = template.replace(
67 '_TMPL_', vcsserver.__version__)
68 template = template.replace('_DATE_', timestamp)
69 template = template.replace('_ENV_', executable)
70 template = template.replace('_PATH_', path)
71 f.write(template)
72 os.chmod(_hook_file, 0755)
73 except IOError:
74 log.exception('error writing hook file %s', _hook_file)
75 else:
76 log.debug('skipping writing hook file')
77
78 return True
79
80
81 def install_svn_hooks(repo_path, executable=None, force_create=False):
82 """
83 Creates RhodeCode hooks inside a svn repository
84
85 :param repo_path: path to repository
86 :param executable: binary executable to put in the hooks
87 :param force_create: Create even if same name hook exists
88 """
89 executable = executable or sys.executable
90 hooks_path = os.path.join(repo_path, 'hooks')
91 if not os.path.isdir(hooks_path):
92 os.makedirs(hooks_path, mode=0777)
93
94 tmpl_post = pkg_resources.resource_string(
95 'vcsserver', '/'.join(
96 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
97 tmpl_pre = pkg_resources.resource_string(
98 'vcsserver', '/'.join(
99 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
100
101 path = '' # not used for now
102 timestamp = datetime.datetime.utcnow().isoformat()
103
104 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
105 log.debug('Installing svn hook in repo %s', repo_path)
106 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
107 _rhodecode_hook = check_rhodecode_hook(_hook_file)
108
109 if _rhodecode_hook or force_create:
110 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
111
112 try:
113 with open(_hook_file, 'wb') as f:
114 template = template.replace(
115 '_TMPL_', vcsserver.__version__)
116 template = template.replace('_DATE_', timestamp)
117 template = template.replace('_ENV_', executable)
118 template = template.replace('_PATH_', path)
119
120 f.write(template)
121 os.chmod(_hook_file, 0755)
122 except IOError:
123 log.exception('error writing hook file %s', _hook_file)
124 else:
125 log.debug('skipping writing hook file')
126
127 return True
128
129
130 def check_rhodecode_hook(hook_path):
131 """
132 Check if the hook was created by RhodeCode
133 """
134 if not os.path.exists(hook_path):
135 return True
136
137 log.debug('hook exists, checking if it is from rhodecode')
138 hook_content = read_hook_content(hook_path)
139 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
140 if matches:
141 try:
142 version = matches.groups()[0]
143 log.debug('got version %s from hooks.', version)
144 return True
145 except Exception:
146 log.exception("Exception while reading the hook version.")
147
148 return False
149
150
151 def read_hook_content(hook_path):
152 with open(hook_path, 'rb') as f:
153 content = f.read()
154 return content
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_post_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,51 b''
1 #!_ENV_
2 import os
3 import sys
4 path_adjust = [_PATH_]
5
6 if path_adjust:
7 sys.path = path_adjust
8
9 try:
10 from vcsserver import hooks
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
14 print traceback.format_exc()
15 hooks = None
16
17
18 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
20
21
22 def main():
23 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
27
28 if os.environ.get('RC_SKIP_HOOKS'):
29 sys.exit(0)
30
31 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
34 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
38 try:
39 result = hooks.git_pre_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
41 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
45 print 'ERROR:', error
46 sys.exit(1)
47 sys.exit(0)
48
49
50 if __name__ == '__main__':
51 main()
@@ -0,0 +1,50 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if hooks is None:
25 # exit with success if we cannot import vcsserver.hooks !!
26 # this allows simply push to this repo even without rhodecode
27 sys.exit(0)
28
29 if os.environ.get('RC_SKIP_HOOKS'):
30 sys.exit(0)
31 repo_path = os.getcwd()
32 push_data = sys.argv[1:]
33
34 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
35
36 try:
37 result = hooks.svn_post_commit(repo_path, push_data, os.environ)
38 sys.exit(result)
39 except Exception as error:
40 # TODO: johbo: Improve handling of this special case
41 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
42 raise
43 print 'ERROR:', error
44 sys.exit(1)
45 sys.exit(0)
46
47
48
49 if __name__ == '__main__':
50 main()
@@ -0,0 +1,52 b''
1 #!_ENV_
2
3 import os
4 import sys
5 path_adjust = [_PATH_]
6
7 if path_adjust:
8 sys.path = path_adjust
9
10 try:
11 from vcsserver import hooks
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
15 print traceback.format_exc()
16 hooks = None
17
18
19 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
21
22
23 def main():
24 if os.environ.get('SSH_READ_ONLY') == '1':
25 sys.stderr.write('Only read-only access is allowed')
26 sys.exit(1)
27
28 if hooks is None:
29 # exit with success if we cannot import vcsserver.hooks !!
30 # this allows simply push to this repo even without rhodecode
31 sys.exit(0)
32 if os.environ.get('RC_SKIP_HOOKS'):
33 sys.exit(0)
34 repo_path = os.getcwd()
35 push_data = sys.argv[1:]
36
37 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
38
39 try:
40 result = hooks.svn_pre_commit(repo_path, push_data, os.environ)
41 sys.exit(result)
42 except Exception as error:
43 # TODO: johbo: Improve handling of this special case
44 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
45 raise
46 print 'ERROR:', error
47 sys.exit(1)
48 sys.exit(0)
49
50
51 if __name__ == '__main__':
52 main()
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,206 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import sys
20 import stat
21 import pytest
22 import vcsserver
23 import tempfile
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.utils import AttributeDict
27
28
29 class TestCheckRhodecodeHook(object):
30
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 with open(hook, 'wb') as f:
34 f.write('dummy test')
35 result = hook_utils.check_rhodecode_hook(hook)
36 assert result is False
37
38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 result = hook_utils.check_rhodecode_hook(hook)
41 assert result
42
43 @pytest.mark.parametrize("file_content, expected_result", [
44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 ("RC_HOOK = '3.3.3'\n", False),
46 ], ids=no_newline_id_generator)
47 def test_signatures(self, file_content, expected_result, tmpdir):
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 with open(hook, 'wb') as f:
50 f.write(file_content)
51
52 result = hook_utils.check_rhodecode_hook(hook)
53
54 assert result is expected_result
55
56
57 class BaseInstallHooks(object):
58 HOOK_FILES = ()
59
60 def _check_hook_file_mode(self, file_path):
61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 stat_info = os.stat(file_path)
63
64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 expected_mode = int('755', 8)
66 assert expected_mode == file_mode
67
68 def _check_hook_file_content(self, file_path, executable):
69 executable = executable or sys.executable
70 with open(file_path, 'rt') as hook_file:
71 content = hook_file.read()
72
73 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
78
79 def _create_fake_hook(self, file_path, content):
80 with open(file_path, 'w') as hook_file:
81 hook_file.write(content)
82
83 def create_dummy_repo(self, repo_type):
84 tmpdir = tempfile.mkdtemp()
85 repo = AttributeDict()
86 if repo_type == 'git':
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 os.makedirs(repo.path)
89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 repo.bare = True
91
92 elif repo_type == 'svn':
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 os.makedirs(repo.path)
95 os.makedirs(os.path.join(repo.path, 'hooks'))
96
97 return repo
98
99 def check_hooks(self, repo_path, repo_bare=True):
100 for file_name in self.HOOK_FILES:
101 if repo_bare:
102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 else:
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 self._check_hook_file_mode(file_path)
106 self._check_hook_file_content(file_path, sys.executable)
107
108
109 class TestInstallGitHooks(BaseInstallHooks):
110 HOOK_FILES = ('pre-receive', 'post-receive')
111
112 def test_hooks_are_installed(self):
113 repo = self.create_dummy_repo('git')
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 assert result
116 self.check_hooks(repo.path, repo.bare)
117
118 def test_hooks_are_replaced(self):
119 repo = self.create_dummy_repo('git')
120 hooks_path = os.path.join(repo.path, 'hooks')
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 self._create_fake_hook(
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 assert result
127 self.check_hooks(repo.path, repo.bare)
128
129 def test_non_rc_hooks_are_not_replaced(self):
130 repo = self.create_dummy_repo('git')
131 hooks_path = os.path.join(repo.path, 'hooks')
132 non_rc_content = 'echo "non rc hook"\n'
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
135 file_path, content=non_rc_content)
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
139
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 with open(file_path, 'rt') as hook_file:
142 content = hook_file.read()
143 assert content == non_rc_content
144
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 repo = self.create_dummy_repo('git')
147 hooks_path = os.path.join(repo.path, 'hooks')
148 non_rc_content = 'echo "non rc hook"\n'
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 self._create_fake_hook(
151 file_path, content=non_rc_content)
152
153 result = hook_utils.install_git_hooks(
154 repo.path, repo.bare, force_create=True)
155 assert result
156 self.check_hooks(repo.path, repo.bare)
157
158
159 class TestInstallSvnHooks(BaseInstallHooks):
160 HOOK_FILES = ('pre-commit', 'post-commit')
161
162 def test_hooks_are_installed(self):
163 repo = self.create_dummy_repo('svn')
164 result = hook_utils.install_svn_hooks(repo.path)
165 assert result
166 self.check_hooks(repo.path)
167
168 def test_hooks_are_replaced(self):
169 repo = self.create_dummy_repo('svn')
170 hooks_path = os.path.join(repo.path, 'hooks')
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 self._create_fake_hook(
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174
175 result = hook_utils.install_svn_hooks(repo.path)
176 assert result
177 self.check_hooks(repo.path)
178
179 def test_non_rc_hooks_are_not_replaced(self):
180 repo = self.create_dummy_repo('svn')
181 hooks_path = os.path.join(repo.path, 'hooks')
182 non_rc_content = 'echo "non rc hook"\n'
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
185 file_path, content=non_rc_content)
186
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
189
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 with open(file_path, 'rt') as hook_file:
192 content = hook_file.read()
193 assert content == non_rc_content
194
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 repo = self.create_dummy_repo('svn')
197 hooks_path = os.path.join(repo.path, 'hooks')
198 non_rc_content = 'echo "non rc hook"\n'
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 self._create_fake_hook(
201 file_path, content=non_rc_content)
202
203 result = hook_utils.install_svn_hooks(
204 repo.path, force_create=True)
205 assert result
206 self.check_hooks(repo.path, )
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.11.6
2 current_version = 4.12.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.11.6
12 state = in_progress
13 version = 4.12.0
16 14
@@ -1,13 +1,16 b''
1 1 # top level files
2 2 include *.rst
3 3 include *.txt
4 4
5 5 # package extras
6 6 include vcsserver/VERSION
7 7
8 8 # all config files
9 9 recursive-include configs *
10 10
11 # hook templates
12 recursive-include vcsserver/hook_utils/hook_templates *
13
11 14 # skip any tests files
12 15 recursive-exclude vcsserver/tests *
13 16
@@ -1,79 +1,83 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 6 [app:main]
7 7 use = egg:rhodecode-vcsserver
8 8
9 9 pyramid.default_locale_name = en
10 10 pyramid.includes =
11 11
12 12 # default locale used by VCS systems
13 13 locale = en_US.UTF-8
14 14
15 15 # cache regions, please don't change
16 16 beaker.cache.regions = repo_object
17 17 beaker.cache.repo_object.type = memorylru
18 18 beaker.cache.repo_object.max_items = 100
19 19 # cache auto-expires after N seconds
20 20 beaker.cache.repo_object.expire = 300
21 21 beaker.cache.repo_object.enabled = true
22 22
23 # path to binaries for vcsserver, it should be set by the installer
24 # at installation time, e.g /home/user/vcsserver-1/profile/bin
25 core.binary_dir = ""
26
23 27 [server:main]
24 28 ## COMMON ##
25 29 host = 0.0.0.0
26 30 port = 9900
27 31
28 32 use = egg:waitress#main
29 33
30 34
31 35 ################################
32 36 ### LOGGING CONFIGURATION ####
33 37 ################################
34 38 [loggers]
35 39 keys = root, vcsserver, beaker
36 40
37 41 [handlers]
38 42 keys = console
39 43
40 44 [formatters]
41 45 keys = generic
42 46
43 47 #############
44 48 ## LOGGERS ##
45 49 #############
46 50 [logger_root]
47 51 level = NOTSET
48 52 handlers = console
49 53
50 54 [logger_vcsserver]
51 55 level = DEBUG
52 56 handlers =
53 57 qualname = vcsserver
54 58 propagate = 1
55 59
56 60 [logger_beaker]
57 61 level = DEBUG
58 62 handlers =
59 63 qualname = beaker
60 64 propagate = 1
61 65
62 66
63 67 ##############
64 68 ## HANDLERS ##
65 69 ##############
66 70
67 71 [handler_console]
68 72 class = StreamHandler
69 73 args = (sys.stderr,)
70 74 level = DEBUG
71 75 formatter = generic
72 76
73 77 ################
74 78 ## FORMATTERS ##
75 79 ################
76 80
77 81 [formatter_generic]
78 82 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
79 83 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,102 +1,106 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 6
7 7 [server:main]
8 8 ## COMMON ##
9 9 host = 127.0.0.1
10 10 port = 9900
11 11
12 12
13 13 ##########################
14 14 ## GUNICORN WSGI SERVER ##
15 15 ##########################
16 16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 17 use = egg:gunicorn#main
18 18 ## Sets the number of process workers. Recommended
19 19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 20 workers = 2
21 21 ## process name
22 22 proc_name = rhodecode_vcsserver
23 23 ## type of worker class, currently `sync` is the only option allowed.
24 24 worker_class = sync
25 25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 26 #worker_connections = 10
27 27 ## max number of requests that worker will handle before being gracefully
28 28 ## restarted, could prevent memory leaks
29 29 max_requests = 1000
30 30 max_requests_jitter = 30
31 31 ## amount of time a worker can spend with handling a request before it
32 32 ## gets killed and restarted. Set to 6hrs
33 33 timeout = 21600
34 34
35 35
36 36 [app:main]
37 37 use = egg:rhodecode-vcsserver
38 38
39 39 pyramid.default_locale_name = en
40 40 pyramid.includes =
41 41
42 ## default locale used by VCS systems
42 # default locale used by VCS systems
43 43 locale = en_US.UTF-8
44 44
45 45 # cache regions, please don't change
46 46 beaker.cache.regions = repo_object
47 47 beaker.cache.repo_object.type = memorylru
48 48 beaker.cache.repo_object.max_items = 100
49 49 # cache auto-expires after N seconds
50 50 beaker.cache.repo_object.expire = 300
51 51 beaker.cache.repo_object.enabled = true
52 52
53 # path to binaries for vcsserver, it should be set by the installer
54 # at installation time, e.g /home/user/vcsserver-1/profile/bin
55 core.binary_dir = ""
56
53 57
54 58 ################################
55 59 ### LOGGING CONFIGURATION ####
56 60 ################################
57 61 [loggers]
58 62 keys = root, vcsserver, beaker
59 63
60 64 [handlers]
61 65 keys = console
62 66
63 67 [formatters]
64 68 keys = generic
65 69
66 70 #############
67 71 ## LOGGERS ##
68 72 #############
69 73 [logger_root]
70 74 level = NOTSET
71 75 handlers = console
72 76
73 77 [logger_vcsserver]
74 78 level = DEBUG
75 79 handlers =
76 80 qualname = vcsserver
77 81 propagate = 1
78 82
79 83 [logger_beaker]
80 84 level = DEBUG
81 85 handlers =
82 86 qualname = beaker
83 87 propagate = 1
84 88
85 89
86 90 ##############
87 91 ## HANDLERS ##
88 92 ##############
89 93
90 94 [handler_console]
91 95 class = StreamHandler
92 96 args = (sys.stderr,)
93 97 level = DEBUG
94 98 formatter = generic
95 99
96 100 ################
97 101 ## FORMATTERS ##
98 102 ################
99 103
100 104 [formatter_generic]
101 105 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
102 106 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,54 +1,60 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs, basePythonPackages }:
8 8
9 9 let
10 10 sed = "sed -i";
11 11 in
12 12
13 13 self: super: {
14 14
15 Beaker = super.Beaker.override (attrs: {
16 patches = [
17 ./patch-beaker-lock-func-debug.diff
18 ];
19 });
20
15 21 subvertpy = super.subvertpy.override (attrs: {
16 22 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 23 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
18 24 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
19 25 pkgs.aprutil
20 26 pkgs.subversion
21 27 ];
22 28 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
23 29 ${sed} -e "s/'gcc'/'clang'/" setup.py
24 30 '';
25 31 });
26 32
27 33 hgsubversion = super.hgsubversion.override (attrs: {
28 34 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 35 pkgs.sqlite
30 36 basePythonPackages.sqlite3
31 37 ];
32 38 });
33 39
34 40 mercurial = super.mercurial.override (attrs: {
35 41 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
36 42 self.python.modules.curses
37 43 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
38 44 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
39 45 });
40 46
41 47 pyramid = super.pyramid.override (attrs: {
42 48 postFixup = ''
43 49 wrapPythonPrograms
44 50 # TODO: johbo: "wrapPython" adds this magic line which
45 51 # confuses pserve.
46 52 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
47 53 '';
48 54 });
49 55
50 56 # Avoid that setuptools is replaced, this leads to trouble
51 57 # with buildPythonPackage.
52 58 setuptools = basePythonPackages.setuptools;
53 59
54 60 }
@@ -1,877 +1,877 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.9.0";
6 name = "Beaker-1.9.1";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [funcsigs];
10 10 src = fetchurl {
11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
11 url = "https://pypi.python.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
12 md5 = "46fda0a164e2b0d24ccbda51a2310301";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.10";
19 name = "Jinja2-2.9.6";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [MarkupSafe];
23 23 src = fetchurl {
24 url = "https://pypi.python.org/packages/56/e6/332789f295cf22308386cf5bbd1f4e00ed11484299c5d7383378cf48ba47/Jinja2-2.10.tar.gz";
25 md5 = "61ef1117f945486472850819b8d1eb3d";
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 Mako = super.buildPythonPackage {
32 32 name = "Mako-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [MarkupSafe];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 MarkupSafe = super.buildPythonPackage {
45 45 name = "MarkupSafe-1.0";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
51 51 md5 = "2fcedc9284d50e577b5192e8e3578355";
52 52 };
53 53 meta = {
54 54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 55 };
56 56 };
57 57 PasteDeploy = super.buildPythonPackage {
58 58 name = "PasteDeploy-1.5.2";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 64 md5 = "352b7205c78c8de4987578d19431af3b";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.mit ];
68 68 };
69 69 };
70 70 WebOb = super.buildPythonPackage {
71 71 name = "WebOb-1.7.4";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
77 77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.mit ];
81 81 };
82 82 };
83 83 WebTest = super.buildPythonPackage {
84 84 name = "WebTest-2.0.29";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 88 src = fetchurl {
89 89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
90 90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 97 name = "backports.shutil-get-terminal-size-1.0.0";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 103 md5 = "03267762480bd86b50580dc19dff3c66";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.mit ];
107 107 };
108 108 };
109 109 beautifulsoup4 = super.buildPythonPackage {
110 110 name = "beautifulsoup4-4.6.0";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.mit ];
120 120 };
121 121 };
122 122 configobj = super.buildPythonPackage {
123 123 name = "configobj-5.0.6";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [six];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 133 };
134 134 };
135 135 cov-core = super.buildPythonPackage {
136 136 name = "cov-core-1.15.0";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [coverage];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 coverage = super.buildPythonPackage {
149 149 name = "coverage-3.7.1";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 159 };
160 160 };
161 161 decorator = super.buildPythonPackage {
162 162 name = "decorator-4.1.2";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [];
166 166 src = fetchurl {
167 167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 172 };
173 173 };
174 174 dulwich = super.buildPythonPackage {
175 175 name = "dulwich-0.13.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 181 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.gpl2Plus ];
185 185 };
186 186 };
187 187 enum34 = super.buildPythonPackage {
188 188 name = "enum34-1.1.6";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [];
192 192 src = fetchurl {
193 193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 194 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 funcsigs = super.buildPythonPackage {
201 201 name = "funcsigs-1.0.2";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
208 208 };
209 209 meta = {
210 210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 211 };
212 212 };
213 213 gevent = super.buildPythonPackage {
214 214 name = "gevent-1.2.2";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [greenlet];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
220 220 md5 = "7f0baf355384fe5ff2ecf66853422554";
221 221 };
222 222 meta = {
223 223 license = [ pkgs.lib.licenses.mit ];
224 224 };
225 225 };
226 226 gprof2dot = super.buildPythonPackage {
227 227 name = "gprof2dot-2017.9.19";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [];
231 231 src = fetchurl {
232 232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
234 234 };
235 235 meta = {
236 236 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 237 };
238 238 };
239 239 greenlet = super.buildPythonPackage {
240 name = "greenlet-0.4.12";
240 name = "greenlet-0.4.13";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
245 url = "https://pypi.python.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
246 md5 = "6e0b9dd5385f81d478451ec8ed1d62b3";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 gunicorn = super.buildPythonPackage {
253 253 name = "gunicorn-19.7.1";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
259 259 md5 = "174d3c3cd670a5be0404d84c484e590c";
260 260 };
261 261 meta = {
262 262 license = [ pkgs.lib.licenses.mit ];
263 263 };
264 264 };
265 265 hg-evolve = super.buildPythonPackage {
266 266 name = "hg-evolve-7.0.1";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [];
270 270 src = fetchurl {
271 271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
272 272 md5 = "2dfa926846ea873a8406bababb06b277";
273 273 };
274 274 meta = {
275 275 license = [ { fullName = "GPLv2+"; } ];
276 276 };
277 277 };
278 278 hgsubversion = super.buildPythonPackage {
279 279 name = "hgsubversion-1.9";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [mercurial subvertpy];
283 283 src = fetchurl {
284 284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
285 285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.gpl1 ];
289 289 };
290 290 };
291 291 hupper = super.buildPythonPackage {
292 292 name = "hupper-1.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 infrae.cache = super.buildPythonPackage {
305 305 name = "infrae.cache-1.0.1";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 309 src = fetchurl {
310 310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
311 311 md5 = "b09076a766747e6ed2a755cc62088e32";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.zpt21 ];
315 315 };
316 316 };
317 317 ipdb = super.buildPythonPackage {
318 318 name = "ipdb-0.10.3";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 321 propagatedBuildInputs = with self; [setuptools ipython];
322 322 src = fetchurl {
323 323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.bsdOriginal ];
328 328 };
329 329 };
330 330 ipython = super.buildPythonPackage {
331 331 name = "ipython-5.1.0";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
337 337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 341 };
342 342 };
343 343 ipython-genutils = super.buildPythonPackage {
344 344 name = "ipython-genutils-0.2.0";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
350 350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.bsdOriginal ];
354 354 };
355 355 };
356 356 mercurial = super.buildPythonPackage {
357 357 name = "mercurial-4.4.2";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [];
361 361 src = fetchurl {
362 362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
363 363 md5 = "95769125cf7e9dbc341a983253acefcd";
364 364 };
365 365 meta = {
366 366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
367 367 };
368 368 };
369 369 mock = super.buildPythonPackage {
370 370 name = "mock-1.0.1";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [];
374 374 src = fetchurl {
375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
376 md5 = "869f08d003c289a97c1a6610faf5e913";
375 url = "https://pypi.python.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
376 md5 = "c3971991738caa55ec7c356bbc154ee2";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 380 };
381 381 };
382 382 msgpack-python = super.buildPythonPackage {
383 383 name = "msgpack-python-0.4.8";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.asl20 ];
393 393 };
394 394 };
395 395 pathlib2 = super.buildPythonPackage {
396 396 name = "pathlib2-2.3.0";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [six scandir];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
402 402 md5 = "89c90409d11fd5947966b6a30a47d18c";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.mit ];
406 406 };
407 407 };
408 408 pexpect = super.buildPythonPackage {
409 name = "pexpect-4.3.0";
409 name = "pexpect-4.4.0";
410 410 buildInputs = with self; [];
411 411 doCheck = false;
412 412 propagatedBuildInputs = with self; [ptyprocess];
413 413 src = fetchurl {
414 url = "https://pypi.python.org/packages/f8/44/5466c30e49762bb92e442bbdf4472d6904608d211258eb3198a11f0309a4/pexpect-4.3.0.tar.gz";
415 md5 = "047a486dcd26134b74f2e67046bb61a0";
414 url = "https://pypi.python.org/packages/fa/c3/60c0cbf96f242d0b47a82e9ca634dcd6dcb043832cf05e17540812e1c707/pexpect-4.4.0.tar.gz";
415 md5 = "e9b07f0765df8245ac72201d757baaef";
416 416 };
417 417 meta = {
418 418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 419 };
420 420 };
421 421 pickleshare = super.buildPythonPackage {
422 422 name = "pickleshare-0.7.4";
423 423 buildInputs = with self; [];
424 424 doCheck = false;
425 425 propagatedBuildInputs = with self; [pathlib2];
426 426 src = fetchurl {
427 427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
428 428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
429 429 };
430 430 meta = {
431 431 license = [ pkgs.lib.licenses.mit ];
432 432 };
433 433 };
434 434 plaster = super.buildPythonPackage {
435 435 name = "plaster-1.0";
436 436 buildInputs = with self; [];
437 437 doCheck = false;
438 438 propagatedBuildInputs = with self; [setuptools];
439 439 src = fetchurl {
440 440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
441 441 md5 = "80e6beb4760c16fea31754babcc0576e";
442 442 };
443 443 meta = {
444 444 license = [ pkgs.lib.licenses.mit ];
445 445 };
446 446 };
447 447 plaster-pastedeploy = super.buildPythonPackage {
448 448 name = "plaster-pastedeploy-0.4.2";
449 449 buildInputs = with self; [];
450 450 doCheck = false;
451 451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 452 src = fetchurl {
453 453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
454 454 md5 = "58fd7852002909378e818c9d5b71e90a";
455 455 };
456 456 meta = {
457 457 license = [ pkgs.lib.licenses.mit ];
458 458 };
459 459 };
460 460 prompt-toolkit = super.buildPythonPackage {
461 461 name = "prompt-toolkit-1.0.15";
462 462 buildInputs = with self; [];
463 463 doCheck = false;
464 464 propagatedBuildInputs = with self; [six wcwidth];
465 465 src = fetchurl {
466 466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 467 md5 = "8fe70295006dbc8afedd43e5eba99032";
468 468 };
469 469 meta = {
470 470 license = [ pkgs.lib.licenses.bsdOriginal ];
471 471 };
472 472 };
473 473 ptyprocess = super.buildPythonPackage {
474 474 name = "ptyprocess-0.5.2";
475 475 buildInputs = with self; [];
476 476 doCheck = false;
477 477 propagatedBuildInputs = with self; [];
478 478 src = fetchurl {
479 479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
480 480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
481 481 };
482 482 meta = {
483 483 license = [ ];
484 484 };
485 485 };
486 486 py = super.buildPythonPackage {
487 487 name = "py-1.5.2";
488 488 buildInputs = with self; [];
489 489 doCheck = false;
490 490 propagatedBuildInputs = with self; [];
491 491 src = fetchurl {
492 492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
493 493 md5 = "279ca69c632069e1b71e11b14641ca28";
494 494 };
495 495 meta = {
496 496 license = [ pkgs.lib.licenses.mit ];
497 497 };
498 498 };
499 499 pygments = super.buildPythonPackage {
500 500 name = "pygments-2.2.0";
501 501 buildInputs = with self; [];
502 502 doCheck = false;
503 503 propagatedBuildInputs = with self; [];
504 504 src = fetchurl {
505 505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 506 md5 = "13037baca42f16917cbd5ad2fab50844";
507 507 };
508 508 meta = {
509 509 license = [ pkgs.lib.licenses.bsdOriginal ];
510 510 };
511 511 };
512 512 pyramid = super.buildPythonPackage {
513 513 name = "pyramid-1.9.1";
514 514 buildInputs = with self; [];
515 515 doCheck = false;
516 516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
517 517 src = fetchurl {
518 518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
519 519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
520 520 };
521 521 meta = {
522 522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 523 };
524 524 };
525 525 pyramid-jinja2 = super.buildPythonPackage {
526 526 name = "pyramid-jinja2-2.7";
527 527 buildInputs = with self; [];
528 528 doCheck = false;
529 529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
530 530 src = fetchurl {
531 531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
532 532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
533 533 };
534 534 meta = {
535 535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 536 };
537 537 };
538 538 pyramid-mako = super.buildPythonPackage {
539 539 name = "pyramid-mako-1.0.2";
540 540 buildInputs = with self; [];
541 541 doCheck = false;
542 542 propagatedBuildInputs = with self; [pyramid Mako];
543 543 src = fetchurl {
544 544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 546 };
547 547 meta = {
548 548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 549 };
550 550 };
551 551 pytest = super.buildPythonPackage {
552 552 name = "pytest-3.2.5";
553 553 buildInputs = with self; [];
554 554 doCheck = false;
555 555 propagatedBuildInputs = with self; [py setuptools];
556 556 src = fetchurl {
557 557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
558 558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
559 559 };
560 560 meta = {
561 561 license = [ pkgs.lib.licenses.mit ];
562 562 };
563 563 };
564 564 pytest-catchlog = super.buildPythonPackage {
565 565 name = "pytest-catchlog-1.2.2";
566 566 buildInputs = with self; [];
567 567 doCheck = false;
568 568 propagatedBuildInputs = with self; [py pytest];
569 569 src = fetchurl {
570 570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 571 md5 = "09d890c54c7456c818102b7ff8c182c8";
572 572 };
573 573 meta = {
574 574 license = [ pkgs.lib.licenses.mit ];
575 575 };
576 576 };
577 577 pytest-cov = super.buildPythonPackage {
578 578 name = "pytest-cov-2.5.1";
579 579 buildInputs = with self; [];
580 580 doCheck = false;
581 581 propagatedBuildInputs = with self; [pytest coverage];
582 582 src = fetchurl {
583 583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
585 585 };
586 586 meta = {
587 587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 588 };
589 589 };
590 590 pytest-profiling = super.buildPythonPackage {
591 591 name = "pytest-profiling-1.2.11";
592 592 buildInputs = with self; [];
593 593 doCheck = false;
594 594 propagatedBuildInputs = with self; [six pytest gprof2dot];
595 595 src = fetchurl {
596 596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
597 597 md5 = "9ef6b60248731be5d44477980408e8f7";
598 598 };
599 599 meta = {
600 600 license = [ pkgs.lib.licenses.mit ];
601 601 };
602 602 };
603 603 pytest-runner = super.buildPythonPackage {
604 604 name = "pytest-runner-3.0";
605 605 buildInputs = with self; [];
606 606 doCheck = false;
607 607 propagatedBuildInputs = with self; [];
608 608 src = fetchurl {
609 609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
611 611 };
612 612 meta = {
613 613 license = [ pkgs.lib.licenses.mit ];
614 614 };
615 615 };
616 616 pytest-sugar = super.buildPythonPackage {
617 617 name = "pytest-sugar-0.9.0";
618 618 buildInputs = with self; [];
619 619 doCheck = false;
620 620 propagatedBuildInputs = with self; [pytest termcolor];
621 621 src = fetchurl {
622 622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
623 623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
624 624 };
625 625 meta = {
626 626 license = [ pkgs.lib.licenses.bsdOriginal ];
627 627 };
628 628 };
629 629 pytest-timeout = super.buildPythonPackage {
630 630 name = "pytest-timeout-1.2.0";
631 631 buildInputs = with self; [];
632 632 doCheck = false;
633 633 propagatedBuildInputs = with self; [pytest];
634 634 src = fetchurl {
635 635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
636 636 md5 = "83607d91aa163562c7ee835da57d061d";
637 637 };
638 638 meta = {
639 639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 640 };
641 641 };
642 642 repoze.lru = super.buildPythonPackage {
643 643 name = "repoze.lru-0.7";
644 644 buildInputs = with self; [];
645 645 doCheck = false;
646 646 propagatedBuildInputs = with self; [];
647 647 src = fetchurl {
648 648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
650 650 };
651 651 meta = {
652 652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 653 };
654 654 };
655 655 rhodecode-vcsserver = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.11.6";
656 name = "rhodecode-vcsserver-4.12.0";
657 657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
658 658 doCheck = true;
659 659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
660 660 src = ./.;
661 661 meta = {
662 662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 663 };
664 664 };
665 665 scandir = super.buildPythonPackage {
666 name = "scandir-1.6";
666 name = "scandir-1.7";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 url = "https://pypi.python.org/packages/77/3f/916f524f50ee65e3f465a280d2851bd63685250fddb3020c212b3977664d/scandir-1.6.tar.gz";
672 md5 = "0180ddb97c96cbb2d4f25d2ae11c64ac";
671 url = "https://pypi.python.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
672 md5 = "037e5f24d1a0e78b17faca72dea9555f";
673 673 };
674 674 meta = {
675 675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 676 };
677 677 };
678 678 setuptools = super.buildPythonPackage {
679 679 name = "setuptools-30.1.0";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
685 685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.mit ];
689 689 };
690 690 };
691 691 simplegeneric = super.buildPythonPackage {
692 692 name = "simplegeneric-0.8.1";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 698 md5 = "f9c1fab00fd981be588fc32759f474e3";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.zpt21 ];
702 702 };
703 703 };
704 704 simplejson = super.buildPythonPackage {
705 705 name = "simplejson-3.11.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 715 };
716 716 };
717 717 six = super.buildPythonPackage {
718 718 name = "six-1.11.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 subprocess32 = super.buildPythonPackage {
731 731 name = "subprocess32-3.2.7";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
737 737 md5 = "824c801e479d3e916879aae3e9c15e16";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.psfl ];
741 741 };
742 742 };
743 743 subvertpy = super.buildPythonPackage {
744 744 name = "subvertpy-0.10.1";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 750 md5 = "a70e03579902d480f5e9f8c570f6536b";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 754 };
755 755 };
756 756 termcolor = super.buildPythonPackage {
757 757 name = "termcolor-1.1.0";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 763 md5 = "043e89644f8909d462fbbfa511c768df";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.mit ];
767 767 };
768 768 };
769 769 traitlets = super.buildPythonPackage {
770 770 name = "traitlets-4.3.2";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 translationstring = super.buildPythonPackage {
783 783 name = "translationstring-1.3";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
790 790 };
791 791 meta = {
792 792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 793 };
794 794 };
795 795 venusian = super.buildPythonPackage {
796 796 name = "venusian-1.1.0";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
803 803 };
804 804 meta = {
805 805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 806 };
807 807 };
808 808 waitress = super.buildPythonPackage {
809 809 name = "waitress-1.1.0";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [];
813 813 src = fetchurl {
814 814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.zpt21 ];
819 819 };
820 820 };
821 821 wcwidth = super.buildPythonPackage {
822 822 name = "wcwidth-0.1.7";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.mit ];
832 832 };
833 833 };
834 834 wheel = super.buildPythonPackage {
835 835 name = "wheel-0.29.0";
836 836 buildInputs = with self; [];
837 837 doCheck = false;
838 838 propagatedBuildInputs = with self; [];
839 839 src = fetchurl {
840 840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
841 841 md5 = "555a67e4507cedee23a0deb9651e452f";
842 842 };
843 843 meta = {
844 844 license = [ pkgs.lib.licenses.mit ];
845 845 };
846 846 };
847 847 zope.deprecation = super.buildPythonPackage {
848 name = "zope.deprecation-4.1.2";
848 name = "zope.deprecation-4.3.0";
849 849 buildInputs = with self; [];
850 850 doCheck = false;
851 851 propagatedBuildInputs = with self; [setuptools];
852 852 src = fetchurl {
853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
853 url = "https://pypi.python.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
854 md5 = "2166b2cb7e0e96a21104e6f8f9b696bb";
855 855 };
856 856 meta = {
857 857 license = [ pkgs.lib.licenses.zpt21 ];
858 858 };
859 859 };
860 860 zope.interface = super.buildPythonPackage {
861 name = "zope.interface-4.1.3";
861 name = "zope.interface-4.4.3";
862 862 buildInputs = with self; [];
863 863 doCheck = false;
864 864 propagatedBuildInputs = with self; [setuptools];
865 865 src = fetchurl {
866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
866 url = "https://pypi.python.org/packages/bd/d2/25349ed41f9dcff7b3baf87bd88a4c82396cf6e02f1f42bb68657a3132af/zope.interface-4.4.3.tar.gz";
867 md5 = "8700a4f527c1203b34b10c2b4e7a6912";
868 868 };
869 869 meta = {
870 870 license = [ pkgs.lib.licenses.zpt21 ];
871 871 };
872 872 };
873 873
874 874 ### Test requirements
875 875
876 876
877 877 }
@@ -1,40 +1,41 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3
4 Beaker==1.9.0
4 Beaker==1.9.1
5 5 configobj==5.0.6
6 6 decorator==4.1.2
7 7 dulwich==0.13.0
8 8 hgsubversion==1.9.0
9 9 hg-evolve==7.0.1
10 10 infrae.cache==1.0.1
11 11 mercurial==4.4.2
12 12 msgpack-python==0.4.8
13 13 pyramid-jinja2==2.7
14 Jinja2==2.9.6
14 15 pyramid==1.9.1
15 16 pyramid-mako==1.0.2
16 17 repoze.lru==0.7
17 18 simplejson==3.11.1
18 19 subprocess32==3.2.7
19 20
20 21 subvertpy==0.10.1
21 22
22 23 six==1.11.0
23 24 translationstring==1.3
24 25 WebOb==1.7.4
25 26 wheel==0.29.0
26 zope.deprecation==4.1.2
27 zope.interface==4.1.3
27 zope.deprecation==4.3.0
28 zope.interface==4.4.3
28 29
29 30 ## http servers
30 31 gevent==1.2.2
31 greenlet==0.4.12
32 greenlet==0.4.13
32 33 gunicorn==19.7.1
33 34 waitress==1.1.0
34 35
35 36 ## debug
36 37 ipdb==0.10.3
37 38 ipython==5.1.0
38 39
39 40 ## test related requirements
40 41 -r requirements_test.txt
@@ -1,1 +1,1 b''
1 4.11.6 No newline at end of file
1 4.12.0 No newline at end of file
@@ -1,658 +1,669 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 59 raise exceptions.LookupException(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 61 raise exceptions.VcsException(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 90
91 91 def _create_repo(self, wire, create):
92 92 repo_path = str_to_dulwich(wire['path'])
93 93 return Repo(repo_path)
94 94
95 95
96 96 class GitRemote(object):
97 97
98 98 def __init__(self, factory):
99 99 self._factory = factory
100 100
101 101 self._bulk_methods = {
102 102 "author": self.commit_attribute,
103 103 "date": self.get_object_attrs,
104 104 "message": self.commit_attribute,
105 105 "parents": self.commit_attribute,
106 106 "_commit": self.revision,
107 107 }
108 108
109 109 def _wire_to_config(self, wire):
110 110 if 'config' in wire:
111 111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 112 return {}
113 113
114 114 def _assign_ref(self, wire, ref, commit_id):
115 115 repo = self._factory.repo(wire)
116 116 repo[ref] = commit_id
117 117
118 118 @reraise_safe_exceptions
119 119 def add_object(self, wire, content):
120 120 repo = self._factory.repo(wire)
121 121 blob = objects.Blob()
122 122 blob.set_raw_string(content)
123 123 repo.object_store.add_object(blob)
124 124 return blob.id
125 125
126 126 @reraise_safe_exceptions
127 127 def assert_correct_path(self, wire):
128 128 path = wire.get('path')
129 129 try:
130 130 self._factory.repo(wire)
131 131 except NotGitRepository as e:
132 132 tb = traceback.format_exc()
133 133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 134 return False
135 135
136 136 return True
137 137
138 138 @reraise_safe_exceptions
139 139 def bare(self, wire):
140 140 repo = self._factory.repo(wire)
141 141 return repo.bare
142 142
143 143 @reraise_safe_exceptions
144 144 def blob_as_pretty_string(self, wire, sha):
145 145 repo = self._factory.repo(wire)
146 146 return repo[sha].as_pretty_string()
147 147
148 148 @reraise_safe_exceptions
149 149 def blob_raw_length(self, wire, sha):
150 150 repo = self._factory.repo(wire)
151 151 blob = repo[sha]
152 152 return blob.raw_length()
153 153
154 154 def _parse_lfs_pointer(self, raw_content):
155 155
156 156 spec_string = 'version https://git-lfs.github.com/spec'
157 157 if raw_content and raw_content.startswith(spec_string):
158 158 pattern = re.compile(r"""
159 159 (?:\n)?
160 160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 162 ^size[ ](?P<oid_size>[0-9]+)\n
163 163 (?:\n)?
164 164 """, re.VERBOSE | re.MULTILINE)
165 165 match = pattern.match(raw_content)
166 166 if match:
167 167 return match.groupdict()
168 168
169 169 return {}
170 170
171 171 @reraise_safe_exceptions
172 172 def is_large_file(self, wire, sha):
173 173 repo = self._factory.repo(wire)
174 174 blob = repo[sha]
175 175 return self._parse_lfs_pointer(blob.as_raw_string())
176 176
177 177 @reraise_safe_exceptions
178 178 def in_largefiles_store(self, wire, oid):
179 179 repo = self._factory.repo(wire)
180 180 conf = self._wire_to_config(wire)
181 181
182 182 store_location = conf.get('vcs_git_lfs_store_location')
183 183 if store_location:
184 184 repo_name = repo.path
185 185 store = LFSOidStore(
186 186 oid=oid, repo=repo_name, store_location=store_location)
187 187 return store.has_oid()
188 188
189 189 return False
190 190
191 191 @reraise_safe_exceptions
192 192 def store_path(self, wire, oid):
193 193 repo = self._factory.repo(wire)
194 194 conf = self._wire_to_config(wire)
195 195
196 196 store_location = conf.get('vcs_git_lfs_store_location')
197 197 if store_location:
198 198 repo_name = repo.path
199 199 store = LFSOidStore(
200 200 oid=oid, repo=repo_name, store_location=store_location)
201 201 return store.oid_path
202 202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 203
204 204 @reraise_safe_exceptions
205 205 def bulk_request(self, wire, rev, pre_load):
206 206 result = {}
207 207 for attr in pre_load:
208 208 try:
209 209 method = self._bulk_methods[attr]
210 210 args = [wire, rev]
211 211 if attr == "date":
212 212 args.extend(["commit_time", "commit_timezone"])
213 213 elif attr in ["author", "message", "parents"]:
214 214 args.append(attr)
215 215 result[attr] = method(*args)
216 216 except KeyError:
217 217 raise exceptions.VcsException(
218 218 "Unknown bulk attribute: %s" % attr)
219 219 return result
220 220
221 221 def _build_opener(self, url):
222 222 handlers = []
223 223 url_obj = url_parser(url)
224 224 _, authinfo = url_obj.authinfo()
225 225
226 226 if authinfo:
227 227 # create a password manager
228 228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 229 passmgr.add_password(*authinfo)
230 230
231 231 handlers.extend((httpbasicauthhandler(passmgr),
232 232 httpdigestauthhandler(passmgr)))
233 233
234 234 return urllib2.build_opener(*handlers)
235 235
236 236 @reraise_safe_exceptions
237 237 def check_url(self, url, config):
238 238 url_obj = url_parser(url)
239 239 test_uri, _ = url_obj.authinfo()
240 240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 241 url_obj.query = obfuscate_qs(url_obj.query)
242 242 cleaned_uri = str(url_obj)
243 243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 244
245 245 if not test_uri.endswith('info/refs'):
246 246 test_uri = test_uri.rstrip('/') + '/info/refs'
247 247
248 248 o = self._build_opener(url)
249 249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 250
251 251 q = {"service": 'git-upload-pack'}
252 252 qs = '?%s' % urllib.urlencode(q)
253 253 cu = "%s%s" % (test_uri, qs)
254 254 req = urllib2.Request(cu, None, {})
255 255
256 256 try:
257 257 log.debug("Trying to open URL %s", cleaned_uri)
258 258 resp = o.open(req)
259 259 if resp.code != 200:
260 260 raise exceptions.URLError('Return Code is not 200')
261 261 except Exception as e:
262 262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 263 # means it cannot be cloned
264 264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 265
266 266 # now detect if it's proper git repo
267 267 gitdata = resp.read()
268 268 if 'service=git-upload-pack' in gitdata:
269 269 pass
270 270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 271 # old style git can return some other format !
272 272 pass
273 273 else:
274 274 raise exceptions.URLError(
275 275 "url [%s] does not look like an git" % (cleaned_uri,))
276 276
277 277 return True
278 278
279 279 @reraise_safe_exceptions
280 280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 282 repo = self._factory.repo(wire)
283 283 if isinstance(valid_refs, list):
284 284 valid_refs = tuple(valid_refs)
285 285
286 286 for k in remote_refs:
287 287 # only parse heads/tags and skip so called deferred tags
288 288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 289 repo[k] = remote_refs[k]
290 290
291 291 if update_after_clone:
292 292 # we want to checkout HEAD
293 293 repo["HEAD"] = remote_refs["HEAD"]
294 294 index.build_index_from_tree(repo.path, repo.index_path(),
295 295 repo.object_store, repo["HEAD"].tree)
296 296
297 297 # TODO: this is quite complex, check if that can be simplified
298 298 @reraise_safe_exceptions
299 299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 300 repo = self._factory.repo(wire)
301 301 object_store = repo.object_store
302 302
303 303 # Create tree and populates it with blobs
304 304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 305
306 306 for node in updated:
307 307 # Compute subdirs if needed
308 308 dirpath, nodename = vcspath.split(node['path'])
309 309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 310 parent = commit_tree
311 311 ancestors = [('', parent)]
312 312
313 313 # Tries to dig for the deepest existing tree
314 314 while dirnames:
315 315 curdir = dirnames.pop(0)
316 316 try:
317 317 dir_id = parent[curdir][1]
318 318 except KeyError:
319 319 # put curdir back into dirnames and stops
320 320 dirnames.insert(0, curdir)
321 321 break
322 322 else:
323 323 # If found, updates parent
324 324 parent = repo[dir_id]
325 325 ancestors.append((curdir, parent))
326 326 # Now parent is deepest existing tree and we need to create
327 327 # subtrees for dirnames (in reverse order)
328 328 # [this only applies for nodes from added]
329 329 new_trees = []
330 330
331 331 blob = objects.Blob.from_string(node['content'])
332 332
333 333 if dirnames:
334 334 # If there are trees which should be created we need to build
335 335 # them now (in reverse order)
336 336 reversed_dirnames = list(reversed(dirnames))
337 337 curtree = objects.Tree()
338 338 curtree[node['node_path']] = node['mode'], blob.id
339 339 new_trees.append(curtree)
340 340 for dirname in reversed_dirnames[:-1]:
341 341 newtree = objects.Tree()
342 342 newtree[dirname] = (DIR_STAT, curtree.id)
343 343 new_trees.append(newtree)
344 344 curtree = newtree
345 345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 346 else:
347 347 parent.add(
348 348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 349
350 350 new_trees.append(parent)
351 351 # Update ancestors
352 352 reversed_ancestors = reversed(
353 353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 354 for parent, tree, path in reversed_ancestors:
355 355 parent[path] = (DIR_STAT, tree.id)
356 356 object_store.add_object(tree)
357 357
358 358 object_store.add_object(blob)
359 359 for tree in new_trees:
360 360 object_store.add_object(tree)
361 361
362 362 for node_path in removed:
363 363 paths = node_path.split('/')
364 364 tree = commit_tree
365 365 trees = [tree]
366 366 # Traverse deep into the forest...
367 367 for path in paths:
368 368 try:
369 369 obj = repo[tree[path][1]]
370 370 if isinstance(obj, objects.Tree):
371 371 trees.append(obj)
372 372 tree = obj
373 373 except KeyError:
374 374 break
375 375 # Cut down the blob and all rotten trees on the way back...
376 376 for path, tree in reversed(zip(paths, trees)):
377 377 del tree[path]
378 378 if tree:
379 379 # This tree still has elements - don't remove it or any
380 380 # of it's parents
381 381 break
382 382
383 383 object_store.add_object(commit_tree)
384 384
385 385 # Create commit
386 386 commit = objects.Commit()
387 387 commit.tree = commit_tree.id
388 388 for k, v in commit_data.iteritems():
389 389 setattr(commit, k, v)
390 390 object_store.add_object(commit)
391 391
392 392 ref = 'refs/heads/%s' % branch
393 393 repo.refs[ref] = commit.id
394 394
395 395 return commit.id
396 396
397 397 @reraise_safe_exceptions
398 398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 399 if url != 'default' and '://' not in url:
400 400 client = LocalGitClient(url)
401 401 else:
402 402 url_obj = url_parser(url)
403 403 o = self._build_opener(url)
404 404 url, _ = url_obj.authinfo()
405 405 client = HttpGitClient(base_url=url, opener=o)
406 406 repo = self._factory.repo(wire)
407 407
408 408 determine_wants = repo.object_store.determine_wants_all
409 409 if refs:
410 410 def determine_wants_requested(references):
411 411 return [references[r] for r in references if r in refs]
412 412 determine_wants = determine_wants_requested
413 413
414 414 try:
415 415 remote_refs = client.fetch(
416 416 path=url, target=repo, determine_wants=determine_wants)
417 417 except NotGitRepository as e:
418 418 log.warning(
419 419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 420 # Exception can contain unicode which we convert
421 421 raise exceptions.AbortException(repr(e))
422 422
423 423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 424 # refs filtered by `determine_wants` function. We need to filter result
425 425 # as well
426 426 if refs:
427 427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 428
429 429 if apply_refs:
430 430 # TODO: johbo: Needs proper test coverage with a git repository
431 431 # that contains a tag object, so that we would end up with
432 432 # a peeled ref at this point.
433 433 PEELED_REF_MARKER = '^{}'
434 434 for k in remote_refs:
435 435 if k.endswith(PEELED_REF_MARKER):
436 436 log.info("Skipping peeled reference %s", k)
437 437 continue
438 438 repo[k] = remote_refs[k]
439 439
440 440 if refs:
441 441 # mikhail: explicitly set the head to the last ref.
442 442 repo['HEAD'] = remote_refs[refs[-1]]
443 443
444 444 # TODO: mikhail: should we return remote_refs here to be
445 445 # consistent?
446 446 else:
447 447 return remote_refs
448 448
449 449 @reraise_safe_exceptions
450 450 def sync_push(self, wire, url, refs=None):
451 451 if self.check_url(url, wire):
452 452 repo = self._factory.repo(wire)
453 453 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False)
455
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 _copts=['-c', 'core.askpass=""'],
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
456 457
457 458 @reraise_safe_exceptions
458 459 def get_remote_refs(self, wire, url):
459 460 repo = Repo(url)
460 461 return repo.get_refs()
461 462
462 463 @reraise_safe_exceptions
463 464 def get_description(self, wire):
464 465 repo = self._factory.repo(wire)
465 466 return repo.get_description()
466 467
467 468 @reraise_safe_exceptions
468 469 def get_file_history(self, wire, file_path, commit_id, limit):
469 470 repo = self._factory.repo(wire)
470 471 include = [commit_id]
471 472 paths = [file_path]
472 473
473 474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
474 475 return [x.commit.id for x in walker]
475 476
476 477 @reraise_safe_exceptions
477 478 def get_missing_revs(self, wire, rev1, rev2, path2):
478 479 repo = self._factory.repo(wire)
479 480 LocalGitClient(thin_packs=False).fetch(path2, repo)
480 481
481 482 wire_remote = wire.copy()
482 483 wire_remote['path'] = path2
483 484 repo_remote = self._factory.repo(wire_remote)
484 485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
485 486
486 487 revs = [
487 488 x.commit.id
488 489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
489 490 return revs
490 491
491 492 @reraise_safe_exceptions
492 493 def get_object(self, wire, sha):
493 494 repo = self._factory.repo(wire)
494 495 obj = repo.get_object(sha)
495 496 commit_id = obj.id
496 497
497 498 if isinstance(obj, Tag):
498 499 commit_id = obj.object[1]
499 500
500 501 return {
501 502 'id': obj.id,
502 503 'type': obj.type_name,
503 504 'commit_id': commit_id
504 505 }
505 506
506 507 @reraise_safe_exceptions
507 508 def get_object_attrs(self, wire, sha, *attrs):
508 509 repo = self._factory.repo(wire)
509 510 obj = repo.get_object(sha)
510 511 return list(getattr(obj, a) for a in attrs)
511 512
512 513 @reraise_safe_exceptions
513 514 def get_refs(self, wire):
514 515 repo = self._factory.repo(wire)
515 516 result = {}
516 517 for ref, sha in repo.refs.as_dict().items():
517 518 peeled_sha = repo.get_peeled(ref)
518 519 result[ref] = peeled_sha
519 520 return result
520 521
521 522 @reraise_safe_exceptions
522 523 def get_refs_path(self, wire):
523 524 repo = self._factory.repo(wire)
524 525 return repo.refs.path
525 526
526 527 @reraise_safe_exceptions
527 528 def head(self, wire):
528 529 repo = self._factory.repo(wire)
529 530 return repo.head()
530 531
531 532 @reraise_safe_exceptions
532 533 def init(self, wire):
533 534 repo_path = str_to_dulwich(wire['path'])
534 535 self.repo = Repo.init(repo_path)
535 536
536 537 @reraise_safe_exceptions
537 538 def init_bare(self, wire):
538 539 repo_path = str_to_dulwich(wire['path'])
539 540 self.repo = Repo.init_bare(repo_path)
540 541
541 542 @reraise_safe_exceptions
542 543 def revision(self, wire, rev):
543 544 repo = self._factory.repo(wire)
544 545 obj = repo[rev]
545 546 obj_data = {
546 547 'id': obj.id,
547 548 }
548 549 try:
549 550 obj_data['tree'] = obj.tree
550 551 except AttributeError:
551 552 pass
552 553 return obj_data
553 554
554 555 @reraise_safe_exceptions
555 556 def commit_attribute(self, wire, rev, attr):
556 557 repo = self._factory.repo(wire)
557 558 obj = repo[rev]
558 559 return getattr(obj, attr)
559 560
560 561 @reraise_safe_exceptions
561 562 def set_refs(self, wire, key, value):
562 563 repo = self._factory.repo(wire)
563 564 repo.refs[key] = value
564 565
565 566 @reraise_safe_exceptions
566 567 def remove_ref(self, wire, key):
567 568 repo = self._factory.repo(wire)
568 569 del repo.refs[key]
569 570
570 571 @reraise_safe_exceptions
571 572 def tree_changes(self, wire, source_id, target_id):
572 573 repo = self._factory.repo(wire)
573 574 source = repo[source_id].tree if source_id else None
574 575 target = repo[target_id].tree
575 576 result = repo.object_store.tree_changes(source, target)
576 577 return list(result)
577 578
578 579 @reraise_safe_exceptions
579 580 def tree_items(self, wire, tree_id):
580 581 repo = self._factory.repo(wire)
581 582 tree = repo[tree_id]
582 583
583 584 result = []
584 585 for item in tree.iteritems():
585 586 item_sha = item.sha
586 587 item_mode = item.mode
587 588
588 589 if FILE_MODE(item_mode) == GIT_LINK:
589 590 item_type = "link"
590 591 else:
591 592 item_type = repo[item_sha].type_name
592 593
593 594 result.append((item.path, item_mode, item_sha, item_type))
594 595 return result
595 596
596 597 @reraise_safe_exceptions
597 598 def update_server_info(self, wire):
598 599 repo = self._factory.repo(wire)
599 600 update_server_info(repo)
600 601
601 602 @reraise_safe_exceptions
602 603 def discover_git_version(self):
603 604 stdout, _ = self.run_git_command(
604 605 {}, ['--version'], _bare=True, _safe=True)
605 606 prefix = 'git version'
606 607 if stdout.startswith(prefix):
607 608 stdout = stdout[len(prefix):]
608 609 return stdout.strip()
609 610
610 611 @reraise_safe_exceptions
611 612 def run_git_command(self, wire, cmd, **opts):
612 613 path = wire.get('path', None)
613 614
614 615 if path and os.path.isdir(path):
615 616 opts['cwd'] = path
616 617
617 618 if '_bare' in opts:
618 619 _copts = []
619 620 del opts['_bare']
620 621 else:
621 622 _copts = ['-c', 'core.quotepath=false', ]
622 623 safe_call = False
623 624 if '_safe' in opts:
624 625 # no exc on failure
625 626 del opts['_safe']
626 627 safe_call = True
627 628
629 if '_copts' in opts:
630 _copts.extend(opts['_copts'] or [])
631 del opts['_copts']
632
628 633 gitenv = os.environ.copy()
629 634 gitenv.update(opts.pop('extra_env', {}))
630 635 # need to clean fix GIT_DIR !
631 636 if 'GIT_DIR' in gitenv:
632 637 del gitenv['GIT_DIR']
633 638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
634 639
635 640 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
636 641
637 642 try:
638 643 _opts = {'env': gitenv, 'shell': False}
639 644 _opts.update(opts)
640 645 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
641 646
642 647 return ''.join(p), ''.join(p.error)
643 648 except (EnvironmentError, OSError) as err:
644 649 cmd = ' '.join(cmd) # human friendly CMD
645 650 tb_err = ("Couldn't run git command (%s).\n"
646 651 "Original error was:%s\n" % (cmd, err))
647 652 log.exception(tb_err)
648 653 if safe_call:
649 654 return '', err
650 655 else:
651 656 raise exceptions.VcsException(tb_err)
652 657
658 @reraise_safe_exceptions
659 def install_hooks(self, wire, force=False):
660 from vcsserver.hook_utils import install_git_hooks
661 repo = self._factory.repo(wire)
662 return install_git_hooks(repo.path, repo.bare, force_create=force)
663
653 664
654 665 def str_to_dulwich(value):
655 666 """
656 667 Dulwich 0.10.1a requires `unicode` objects to be passed in.
657 668 """
658 669 return value.decode(settings.WIRE_ENCODING)
@@ -1,758 +1,776 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired):
78 78 raise_from_original(exceptions.AbortException)
79 79 except RepoLookupError:
80 80 raise_from_original(exceptions.LookupException)
81 81 except RequirementError:
82 82 raise_from_original(exceptions.RequirementException)
83 83 except RepoError:
84 84 raise_from_original(exceptions.VcsException)
85 85 except LookupError:
86 86 raise_from_original(exceptions.LookupException)
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException)
91 91 raise
92 92 return wrapper
93 93
94 94
95 95 class MercurialFactory(RepoFactory):
96 96
97 97 def _create_config(self, config, hooks=True):
98 98 if not hooks:
99 99 hooks_to_clean = frozenset((
100 100 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 102 new_config = []
103 103 for section, option, value in config:
104 104 if section == 'hooks' and option in hooks_to_clean:
105 105 continue
106 106 new_config.append((section, option, value))
107 107 config = new_config
108 108
109 109 baseui = make_ui_from_config(config)
110 110 return baseui
111 111
112 112 def _create_repo(self, wire, create):
113 113 baseui = self._create_config(wire["config"])
114 114 return localrepository(baseui, wire["path"], create)
115 115
116 116
117 117 class HgRemote(object):
118 118
119 119 def __init__(self, factory):
120 120 self._factory = factory
121 121
122 122 self._bulk_methods = {
123 123 "affected_files": self.ctx_files,
124 124 "author": self.ctx_user,
125 125 "branch": self.ctx_branch,
126 126 "children": self.ctx_children,
127 127 "date": self.ctx_date,
128 128 "message": self.ctx_description,
129 129 "parents": self.ctx_parents,
130 130 "status": self.ctx_status,
131 131 "obsolete": self.ctx_obsolete,
132 132 "phase": self.ctx_phase,
133 133 "hidden": self.ctx_hidden,
134 134 "_file_paths": self.ctx_list,
135 135 }
136 136
137 137 @reraise_safe_exceptions
138 138 def discover_hg_version(self):
139 139 from mercurial import util
140 140 return util.version()
141 141
142 142 @reraise_safe_exceptions
143 143 def archive_repo(self, archive_path, mtime, file_info, kind):
144 144 if kind == "tgz":
145 145 archiver = archival.tarit(archive_path, mtime, "gz")
146 146 elif kind == "tbz2":
147 147 archiver = archival.tarit(archive_path, mtime, "bz2")
148 148 elif kind == 'zip':
149 149 archiver = archival.zipit(archive_path, mtime)
150 150 else:
151 151 raise exceptions.ArchiveException(
152 152 'Remote does not support: "%s".' % kind)
153 153
154 154 for f_path, f_mode, f_is_link, f_content in file_info:
155 155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 156 archiver.done()
157 157
158 158 @reraise_safe_exceptions
159 159 def bookmarks(self, wire):
160 160 repo = self._factory.repo(wire)
161 161 return dict(repo._bookmarks)
162 162
163 163 @reraise_safe_exceptions
164 164 def branches(self, wire, normal, closed):
165 165 repo = self._factory.repo(wire)
166 166 iter_branches = repo.branchmap().iterbranches()
167 167 bt = {}
168 168 for branch_name, _heads, tip, is_closed in iter_branches:
169 169 if normal and not is_closed:
170 170 bt[branch_name] = tip
171 171 if closed and is_closed:
172 172 bt[branch_name] = tip
173 173
174 174 return bt
175 175
176 176 @reraise_safe_exceptions
177 177 def bulk_request(self, wire, rev, pre_load):
178 178 result = {}
179 179 for attr in pre_load:
180 180 try:
181 181 method = self._bulk_methods[attr]
182 182 result[attr] = method(wire, rev)
183 183 except KeyError:
184 184 raise exceptions.VcsException(
185 185 'Unknown bulk attribute: "%s"' % attr)
186 186 return result
187 187
188 188 @reraise_safe_exceptions
189 189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 191 clone(baseui, source, dest, noupdate=not update_after_clone)
192 192
193 193 @reraise_safe_exceptions
194 194 def commitctx(
195 195 self, wire, message, parents, commit_time, commit_timezone,
196 196 user, files, extra, removed, updated):
197 197
198 198 def _filectxfn(_repo, memctx, path):
199 199 """
200 200 Marks given path as added/changed/removed in a given _repo. This is
201 201 for internal mercurial commit function.
202 202 """
203 203
204 204 # check if this path is removed
205 205 if path in removed:
206 206 # returning None is a way to mark node for removal
207 207 return None
208 208
209 209 # check if this path is added
210 210 for node in updated:
211 211 if node['path'] == path:
212 212 return memfilectx(
213 213 _repo,
214 214 path=node['path'],
215 215 data=node['content'],
216 216 islink=False,
217 217 isexec=bool(node['mode'] & stat.S_IXUSR),
218 218 copied=False,
219 219 memctx=memctx)
220 220
221 221 raise exceptions.AbortException(
222 222 "Given path haven't been marked as added, "
223 223 "changed or removed (%s)" % path)
224 224
225 225 repo = self._factory.repo(wire)
226 226
227 227 commit_ctx = memctx(
228 228 repo=repo,
229 229 parents=parents,
230 230 text=message,
231 231 files=files,
232 232 filectxfn=_filectxfn,
233 233 user=user,
234 234 date=(commit_time, commit_timezone),
235 235 extra=extra)
236 236
237 237 n = repo.commitctx(commit_ctx)
238 238 new_id = hex(n)
239 239
240 240 return new_id
241 241
242 242 @reraise_safe_exceptions
243 243 def ctx_branch(self, wire, revision):
244 244 repo = self._factory.repo(wire)
245 245 ctx = repo[revision]
246 246 return ctx.branch()
247 247
248 248 @reraise_safe_exceptions
249 249 def ctx_children(self, wire, revision):
250 250 repo = self._factory.repo(wire)
251 251 ctx = repo[revision]
252 252 return [child.rev() for child in ctx.children()]
253 253
254 254 @reraise_safe_exceptions
255 255 def ctx_date(self, wire, revision):
256 256 repo = self._factory.repo(wire)
257 257 ctx = repo[revision]
258 258 return ctx.date()
259 259
260 260 @reraise_safe_exceptions
261 261 def ctx_description(self, wire, revision):
262 262 repo = self._factory.repo(wire)
263 263 ctx = repo[revision]
264 264 return ctx.description()
265 265
266 266 @reraise_safe_exceptions
267 267 def ctx_diff(
268 268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 269 repo = self._factory.repo(wire)
270 270 ctx = repo[revision]
271 271 result = ctx.diff(
272 272 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 273 return list(result)
274 274
275 275 @reraise_safe_exceptions
276 276 def ctx_files(self, wire, revision):
277 277 repo = self._factory.repo(wire)
278 278 ctx = repo[revision]
279 279 return ctx.files()
280 280
281 281 @reraise_safe_exceptions
282 282 def ctx_list(self, path, revision):
283 283 repo = self._factory.repo(path)
284 284 ctx = repo[revision]
285 285 return list(ctx)
286 286
287 287 @reraise_safe_exceptions
288 288 def ctx_parents(self, wire, revision):
289 289 repo = self._factory.repo(wire)
290 290 ctx = repo[revision]
291 291 return [parent.rev() for parent in ctx.parents()]
292 292
293 293 @reraise_safe_exceptions
294 294 def ctx_phase(self, wire, revision):
295 295 repo = self._factory.repo(wire)
296 296 ctx = repo[revision]
297 297 # public=0, draft=1, secret=3
298 298 return ctx.phase()
299 299
300 300 @reraise_safe_exceptions
301 301 def ctx_obsolete(self, wire, revision):
302 302 repo = self._factory.repo(wire)
303 303 ctx = repo[revision]
304 304 return ctx.obsolete()
305 305
306 306 @reraise_safe_exceptions
307 307 def ctx_hidden(self, wire, revision):
308 308 repo = self._factory.repo(wire)
309 309 ctx = repo[revision]
310 310 return ctx.hidden()
311 311
312 312 @reraise_safe_exceptions
313 313 def ctx_substate(self, wire, revision):
314 314 repo = self._factory.repo(wire)
315 315 ctx = repo[revision]
316 316 return ctx.substate
317 317
318 318 @reraise_safe_exceptions
319 319 def ctx_status(self, wire, revision):
320 320 repo = self._factory.repo(wire)
321 321 ctx = repo[revision]
322 322 status = repo[ctx.p1().node()].status(other=ctx.node())
323 323 # object of status (odd, custom named tuple in mercurial) is not
324 324 # correctly serializable, we make it a list, as the underling
325 325 # API expects this to be a list
326 326 return list(status)
327 327
328 328 @reraise_safe_exceptions
329 329 def ctx_user(self, wire, revision):
330 330 repo = self._factory.repo(wire)
331 331 ctx = repo[revision]
332 332 return ctx.user()
333 333
334 334 @reraise_safe_exceptions
335 335 def check_url(self, url, config):
336 336 _proto = None
337 337 if '+' in url[:url.find('://')]:
338 338 _proto = url[0:url.find('+')]
339 339 url = url[url.find('+') + 1:]
340 340 handlers = []
341 341 url_obj = url_parser(url)
342 342 test_uri, authinfo = url_obj.authinfo()
343 343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 344 url_obj.query = obfuscate_qs(url_obj.query)
345 345
346 346 cleaned_uri = str(url_obj)
347 347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 348
349 349 if authinfo:
350 350 # create a password manager
351 351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 352 passmgr.add_password(*authinfo)
353 353
354 354 handlers.extend((httpbasicauthhandler(passmgr),
355 355 httpdigestauthhandler(passmgr)))
356 356
357 357 o = urllib2.build_opener(*handlers)
358 358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 359 ('Accept', 'application/mercurial-0.1')]
360 360
361 361 q = {"cmd": 'between'}
362 362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 363 qs = '?%s' % urllib.urlencode(q)
364 364 cu = "%s%s" % (test_uri, qs)
365 365 req = urllib2.Request(cu, None, {})
366 366
367 367 try:
368 368 log.debug("Trying to open URL %s", cleaned_uri)
369 369 resp = o.open(req)
370 370 if resp.code != 200:
371 371 raise exceptions.URLError('Return Code is not 200')
372 372 except Exception as e:
373 373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 374 # means it cannot be cloned
375 375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376 376
377 377 # now check if it's a proper hg repo, but don't do it for svn
378 378 try:
379 379 if _proto == 'svn':
380 380 pass
381 381 else:
382 382 # check for pure hg repos
383 383 log.debug(
384 384 "Verifying if URL is a Mercurial repository: %s",
385 385 cleaned_uri)
386 386 httppeer(make_ui_from_config(config), url).lookup('tip')
387 387 except Exception as e:
388 388 log.warning("URL is not a valid Mercurial repository: %s",
389 389 cleaned_uri)
390 390 raise exceptions.URLError(
391 391 "url [%s] does not look like an hg repo org_exc: %s"
392 392 % (cleaned_uri, e))
393 393
394 394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 395 return True
396 396
397 397 @reraise_safe_exceptions
398 398 def diff(
399 399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 400 context):
401 401 repo = self._factory.repo(wire)
402 402
403 403 if file_filter:
404 404 match_filter = match(file_filter[0], '', [file_filter[1]])
405 405 else:
406 406 match_filter = file_filter
407 407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408 408
409 409 try:
410 410 return "".join(patch.diff(
411 411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 412 except RepoLookupError:
413 413 raise exceptions.LookupException()
414 414
415 415 @reraise_safe_exceptions
416 416 def file_history(self, wire, revision, path, limit):
417 417 repo = self._factory.repo(wire)
418 418
419 419 ctx = repo[revision]
420 420 fctx = ctx.filectx(path)
421 421
422 422 def history_iter():
423 423 limit_rev = fctx.rev()
424 424 for obj in reversed(list(fctx.filelog())):
425 425 obj = fctx.filectx(obj)
426 426 if limit_rev >= obj.rev():
427 427 yield obj
428 428
429 429 history = []
430 430 for cnt, obj in enumerate(history_iter()):
431 431 if limit and cnt >= limit:
432 432 break
433 433 history.append(hex(obj.node()))
434 434
435 435 return [x for x in history]
436 436
437 437 @reraise_safe_exceptions
438 438 def file_history_untill(self, wire, revision, path, limit):
439 439 repo = self._factory.repo(wire)
440 440 ctx = repo[revision]
441 441 fctx = ctx.filectx(path)
442 442
443 443 file_log = list(fctx.filelog())
444 444 if limit:
445 445 # Limit to the last n items
446 446 file_log = file_log[-limit:]
447 447
448 448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449 449
450 450 @reraise_safe_exceptions
451 451 def fctx_annotate(self, wire, revision, path):
452 452 repo = self._factory.repo(wire)
453 453 ctx = repo[revision]
454 454 fctx = ctx.filectx(path)
455 455
456 456 result = []
457 457 for i, (a_line, content) in enumerate(fctx.annotate()):
458 458 ln_no = i + 1
459 459 sha = hex(a_line.fctx.node())
460 460 result.append((ln_no, sha, content))
461 461 return result
462 462
463 463 @reraise_safe_exceptions
464 464 def fctx_data(self, wire, revision, path):
465 465 repo = self._factory.repo(wire)
466 466 ctx = repo[revision]
467 467 fctx = ctx.filectx(path)
468 468 return fctx.data()
469 469
470 470 @reraise_safe_exceptions
471 471 def fctx_flags(self, wire, revision, path):
472 472 repo = self._factory.repo(wire)
473 473 ctx = repo[revision]
474 474 fctx = ctx.filectx(path)
475 475 return fctx.flags()
476 476
477 477 @reraise_safe_exceptions
478 478 def fctx_size(self, wire, revision, path):
479 479 repo = self._factory.repo(wire)
480 480 ctx = repo[revision]
481 481 fctx = ctx.filectx(path)
482 482 return fctx.size()
483 483
484 484 @reraise_safe_exceptions
485 485 def get_all_commit_ids(self, wire, name):
486 486 repo = self._factory.repo(wire)
487 487 revs = repo.filtered(name).changelog.index
488 488 return map(lambda x: hex(x[7]), revs)[:-1]
489 489
490 490 @reraise_safe_exceptions
491 491 def get_config_value(self, wire, section, name, untrusted=False):
492 492 repo = self._factory.repo(wire)
493 493 return repo.ui.config(section, name, untrusted=untrusted)
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_bool(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.configbool(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_list(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configlist(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def is_large_file(self, wire, path):
507 507 return largefiles.lfutil.isstandin(path)
508 508
509 509 @reraise_safe_exceptions
510 510 def in_largefiles_store(self, wire, sha):
511 511 repo = self._factory.repo(wire)
512 512 return largefiles.lfutil.instore(repo, sha)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_user_cache(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.inusercache(repo.ui, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def store_path(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.storepath(repo, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def link(self, wire, sha, path):
526 526 repo = self._factory.repo(wire)
527 527 largefiles.lfutil.link(
528 528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529 529
530 530 @reraise_safe_exceptions
531 531 def localrepository(self, wire, create=False):
532 532 self._factory.repo(wire, create=create)
533 533
534 534 @reraise_safe_exceptions
535 535 def lookup(self, wire, revision, both):
536 536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 537 if isinstance(revision, float):
538 538 revision = long(revision)
539 539 repo = self._factory.repo(wire)
540 540 try:
541 541 ctx = repo[revision]
542 542 except RepoLookupError:
543 543 raise exceptions.LookupException(revision)
544 544 except LookupError as e:
545 545 raise exceptions.LookupException(e.name)
546 546
547 547 if not both:
548 548 return ctx.hex()
549 549
550 550 ctx = repo[ctx.hex()]
551 551 return ctx.hex(), ctx.rev()
552 552
553 553 @reraise_safe_exceptions
554 554 def pull(self, wire, url, commit_ids=None):
555 555 repo = self._factory.repo(wire)
556 # Disable any prompts for this repo
557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558
556 559 remote = peer(repo, {}, url)
560 # Disable any prompts for this remote
561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562
557 563 if commit_ids:
558 564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
559 565
560 566 return exchange.pull(
561 567 repo, remote, heads=commit_ids, force=None).cgresult
562 568
563 569 @reraise_safe_exceptions
564 570 def sync_push(self, wire, url):
565 571 if self.check_url(url, wire['config']):
566 572 repo = self._factory.repo(wire)
573
574 # Disable any prompts for this repo
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576
567 577 bookmarks = dict(repo._bookmarks).keys()
568 578 remote = peer(repo, {}, url)
579 # Disable any prompts for this remote
580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581
569 582 return exchange.push(
570 583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
571 584
572 585 @reraise_safe_exceptions
573 586 def revision(self, wire, rev):
574 587 repo = self._factory.repo(wire)
575 588 ctx = repo[rev]
576 589 return ctx.rev()
577 590
578 591 @reraise_safe_exceptions
579 592 def rev_range(self, wire, filter):
580 593 repo = self._factory.repo(wire)
581 594 revisions = [rev for rev in revrange(repo, filter)]
582 595 return revisions
583 596
584 597 @reraise_safe_exceptions
585 598 def rev_range_hash(self, wire, node):
586 599 repo = self._factory.repo(wire)
587 600
588 601 def get_revs(repo, rev_opt):
589 602 if rev_opt:
590 603 revs = revrange(repo, rev_opt)
591 604 if len(revs) == 0:
592 605 return (nullrev, nullrev)
593 606 return max(revs), min(revs)
594 607 else:
595 608 return len(repo) - 1, 0
596 609
597 610 stop, start = get_revs(repo, [node + ':'])
598 611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
599 612 return revs
600 613
601 614 @reraise_safe_exceptions
602 615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
603 616 other_path = kwargs.pop('other_path', None)
604 617
605 618 # case when we want to compare two independent repositories
606 619 if other_path and other_path != wire["path"]:
607 620 baseui = self._factory._create_config(wire["config"])
608 621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
609 622 else:
610 623 repo = self._factory.repo(wire)
611 624 return list(repo.revs(rev_spec, *args))
612 625
613 626 @reraise_safe_exceptions
614 627 def strip(self, wire, revision, update, backup):
615 628 repo = self._factory.repo(wire)
616 629 ctx = repo[revision]
617 630 hgext_strip(
618 631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
619 632
620 633 @reraise_safe_exceptions
621 634 def verify(self, wire,):
622 635 repo = self._factory.repo(wire)
623 636 baseui = self._factory._create_config(wire['config'])
624 637 baseui.setconfig('ui', 'quiet', 'false')
625 638 output = io.BytesIO()
626 639
627 640 def write(data, **unused_kwargs):
628 641 output.write(data)
629 642 baseui.write = write
630 643
631 644 repo.ui = baseui
632 645 verify.verify(repo)
633 646 return output.getvalue()
634 647
635 648 @reraise_safe_exceptions
636 649 def tag(self, wire, name, revision, message, local, user,
637 650 tag_time, tag_timezone):
638 651 repo = self._factory.repo(wire)
639 652 ctx = repo[revision]
640 653 node = ctx.node()
641 654
642 655 date = (tag_time, tag_timezone)
643 656 try:
644 657 hg_tag.tag(repo, name, node, message, local, user, date)
645 658 except Abort as e:
646 659 log.exception("Tag operation aborted")
647 660 # Exception can contain unicode which we convert
648 661 raise exceptions.AbortException(repr(e))
649 662
650 663 @reraise_safe_exceptions
651 664 def tags(self, wire):
652 665 repo = self._factory.repo(wire)
653 666 return repo.tags()
654 667
655 668 @reraise_safe_exceptions
656 669 def update(self, wire, node=None, clean=False):
657 670 repo = self._factory.repo(wire)
658 671 baseui = self._factory._create_config(wire['config'])
659 672 commands.update(baseui, repo, node=node, clean=clean)
660 673
661 674 @reraise_safe_exceptions
662 675 def identify(self, wire):
663 676 repo = self._factory.repo(wire)
664 677 baseui = self._factory._create_config(wire['config'])
665 678 output = io.BytesIO()
666 679 baseui.write = output.write
667 680 # This is required to get a full node id
668 681 baseui.debugflag = True
669 682 commands.identify(baseui, repo, id=True)
670 683
671 684 return output.getvalue()
672 685
673 686 @reraise_safe_exceptions
674 687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
675 688 hooks=True):
676 689 repo = self._factory.repo(wire)
677 690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 691
679 692 # Mercurial internally has a lot of logic that checks ONLY if
680 693 # option is defined, we just pass those if they are defined then
681 694 opts = {}
682 695 if bookmark:
683 696 opts['bookmark'] = bookmark
684 697 if branch:
685 698 opts['branch'] = branch
686 699 if revision:
687 700 opts['rev'] = revision
688 701
689 702 commands.pull(baseui, repo, source, **opts)
690 703
691 704 @reraise_safe_exceptions
692 705 def heads(self, wire, branch=None):
693 706 repo = self._factory.repo(wire)
694 707 baseui = self._factory._create_config(wire['config'])
695 708 output = io.BytesIO()
696 709
697 710 def write(data, **unused_kwargs):
698 711 output.write(data)
699 712
700 713 baseui.write = write
701 714 if branch:
702 715 args = [branch]
703 716 else:
704 717 args = []
705 718 commands.heads(baseui, repo, template='{node} ', *args)
706 719
707 720 return output.getvalue()
708 721
709 722 @reraise_safe_exceptions
710 723 def ancestor(self, wire, revision1, revision2):
711 724 repo = self._factory.repo(wire)
712 725 changelog = repo.changelog
713 726 lookup = repo.lookup
714 727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
715 728 return hex(a)
716 729
717 730 @reraise_safe_exceptions
718 731 def push(self, wire, revisions, dest_path, hooks=True,
719 732 push_branches=False):
720 733 repo = self._factory.repo(wire)
721 734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
722 735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
723 736 new_branch=push_branches)
724 737
725 738 @reraise_safe_exceptions
726 739 def merge(self, wire, revision):
727 740 repo = self._factory.repo(wire)
728 741 baseui = self._factory._create_config(wire['config'])
729 742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
730 743
731 744 # In case of sub repositories are used mercurial prompts the user in
732 745 # case of merge conflicts or different sub repository sources. By
733 746 # setting the interactive flag to `False` mercurial doesn't prompt the
734 747 # used but instead uses a default value.
735 748 repo.ui.setconfig('ui', 'interactive', False)
736 749
737 750 commands.merge(baseui, repo, rev=revision)
738 751
739 752 @reraise_safe_exceptions
740 753 def commit(self, wire, message, username, close_branch=False):
741 754 repo = self._factory.repo(wire)
742 755 baseui = self._factory._create_config(wire['config'])
743 756 repo.ui.setconfig('ui', 'username', username)
744 757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
745 758
746 759 @reraise_safe_exceptions
747 760 def rebase(self, wire, source=None, dest=None, abort=False):
748 761 repo = self._factory.repo(wire)
749 762 baseui = self._factory._create_config(wire['config'])
750 763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
751 764 rebase.rebase(
752 765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
753 766
754 767 @reraise_safe_exceptions
755 768 def bookmark(self, wire, bookmark, revision=None):
756 769 repo = self._factory.repo(wire)
757 770 baseui = self._factory._create_config(wire['config'])
758 771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
772
773 @reraise_safe_exceptions
774 def install_hooks(self, wire, force=False):
775 # we don't need any special hooks for Mercurial
776 pass
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto.capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto.capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 else:
56 56 logger.debug('Extension largefiles disabled')
57 57 calc_capabilities = lfproto.capabilitiesorig
58 58 return calc_capabilities(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo
66 66 from exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 def dirty(self, ignoreupdate=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepo.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 119 raise SubrepoMergeException()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,467 +1,541 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 import json
24 23 import logging
25 24 import collections
26 25 import importlib
26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 connection.request('POST', '/', body)
49 try:
50 connection.request('POST', '/', body)
51 except Exception:
52 log.error('Connection failed on %s', connection)
53 raise
50 54 response = connection.getresponse()
51 55 return json.loads(response.read())
52 56
53 57 def _serialize(self, hook_name, extras):
54 58 data = {
55 59 'method': hook_name,
56 60 'extras': extras
57 61 }
58 62 return json.dumps(data)
59 63
60 64
61 65 class HooksDummyClient(object):
62 66 def __init__(self, hooks_module):
63 67 self._hooks_module = importlib.import_module(hooks_module)
64 68
65 69 def __call__(self, hook_name, extras):
66 70 with self._hooks_module.Hooks() as hooks:
67 71 return getattr(hooks, hook_name)(extras)
68 72
69 73
70 74 class RemoteMessageWriter(object):
71 75 """Writer base class."""
72 76 def write(self, message):
73 77 raise NotImplementedError()
74 78
75 79
76 80 class HgMessageWriter(RemoteMessageWriter):
77 81 """Writer that knows how to send messages to mercurial clients."""
78 82
79 83 def __init__(self, ui):
80 84 self.ui = ui
81 85
82 86 def write(self, message):
83 87 # TODO: Check why the quiet flag is set by default.
84 88 old = self.ui.quiet
85 89 self.ui.quiet = False
86 90 self.ui.status(message.encode('utf-8'))
87 91 self.ui.quiet = old
88 92
89 93
90 94 class GitMessageWriter(RemoteMessageWriter):
91 95 """Writer that knows how to send messages to git clients."""
92 96
93 97 def __init__(self, stdout=None):
94 98 self.stdout = stdout or sys.stdout
95 99
96 100 def write(self, message):
97 101 self.stdout.write(message.encode('utf-8'))
98 102
99 103
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
106
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
110
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
113
114
100 115 def _handle_exception(result):
101 116 exception_class = result.get('exception')
102 117 exception_traceback = result.get('exception_traceback')
103 118
104 119 if exception_traceback:
105 120 log.error('Got traceback from remote call:%s', exception_traceback)
106 121
107 122 if exception_class == 'HTTPLockedRC':
108 123 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 124 elif exception_class == 'RepositoryError':
110 125 raise exceptions.VcsException(*result['exception_args'])
111 126 elif exception_class:
112 127 raise Exception('Got remote exception "%s" with args "%s"' %
113 128 (exception_class, result['exception_args']))
114 129
115 130
116 131 def _get_hooks_client(extras):
117 132 if 'hooks_uri' in extras:
118 133 protocol = extras.get('hooks_protocol')
119 134 return HooksHttpClient(extras['hooks_uri'])
120 135 else:
121 136 return HooksDummyClient(extras['hooks_module'])
122 137
123 138
124 139 def _call_hook(hook_name, extras, writer):
125 hooks = _get_hooks_client(extras)
126 result = hooks(hook_name, extras)
140 hooks_client = _get_hooks_client(extras)
141 log.debug('Hooks, using client:%s', hooks_client)
142 result = hooks_client(hook_name, extras)
127 143 log.debug('Hooks got result: %s', result)
128 144 writer.write(result['output'])
129 145 _handle_exception(result)
130 146
131 147 return result['status']
132 148
133 149
134 150 def _extras_from_ui(ui):
135 151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
136 152 if not hook_data:
137 153 # maybe it's inside environ ?
138 154 env_hook_data = os.environ.get('RC_SCM_DATA')
139 155 if env_hook_data:
140 156 hook_data = env_hook_data
141 157
142 158 extras = {}
143 159 if hook_data:
144 160 extras = json.loads(hook_data)
145 161 return extras
146 162
147 163
148 164 def _rev_range_hash(repo, node):
149 165
150 166 commits = []
151 167 for rev in xrange(repo[node], len(repo)):
152 168 ctx = repo[rev]
153 169 commit_id = mercurial.node.hex(ctx.node())
154 170 branch = ctx.branch()
155 171 commits.append((commit_id, branch))
156 172
157 173 return commits
158 174
159 175
160 176 def repo_size(ui, repo, **kwargs):
161 177 extras = _extras_from_ui(ui)
162 178 return _call_hook('repo_size', extras, HgMessageWriter(ui))
163 179
164 180
165 181 def pre_pull(ui, repo, **kwargs):
166 182 extras = _extras_from_ui(ui)
167 183 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
168 184
169 185
170 186 def pre_pull_ssh(ui, repo, **kwargs):
171 187 extras = _extras_from_ui(ui)
172 188 if extras and extras.get('SSH'):
173 189 return pre_pull(ui, repo, **kwargs)
174 190 return 0
175 191
176 192
177 193 def post_pull(ui, repo, **kwargs):
178 194 extras = _extras_from_ui(ui)
179 195 return _call_hook('post_pull', extras, HgMessageWriter(ui))
180 196
181 197
182 198 def post_pull_ssh(ui, repo, **kwargs):
183 199 extras = _extras_from_ui(ui)
184 200 if extras and extras.get('SSH'):
185 201 return post_pull(ui, repo, **kwargs)
186 202 return 0
187 203
188 204
189 205 def pre_push(ui, repo, node=None, **kwargs):
190 206 extras = _extras_from_ui(ui)
191 207
192 208 rev_data = []
193 209 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
194 210 branches = collections.defaultdict(list)
195 211 for commit_id, branch in _rev_range_hash(repo, node):
196 212 branches[branch].append(commit_id)
197 213
198 214 for branch, commits in branches.iteritems():
199 215 old_rev = kwargs.get('node_last') or commits[0]
200 216 rev_data.append({
201 217 'old_rev': old_rev,
202 218 'new_rev': commits[-1],
203 219 'ref': '',
204 220 'type': 'branch',
205 221 'name': branch,
206 222 })
207 223
208 224 extras['commit_ids'] = rev_data
209 225 return _call_hook('pre_push', extras, HgMessageWriter(ui))
210 226
211 227
212 228 def pre_push_ssh(ui, repo, node=None, **kwargs):
213 229 if _extras_from_ui(ui).get('SSH'):
214 230 return pre_push(ui, repo, node, **kwargs)
215 231
216 232 return 0
217 233
218 234
219 235 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
220 236 extras = _extras_from_ui(ui)
221 237 if extras.get('SSH'):
222 238 permission = extras['SSH_PERMISSIONS']
223 239
224 240 if 'repository.write' == permission or 'repository.admin' == permission:
225 241 return 0
226 242
227 243 # non-zero ret code
228 244 return 1
229 245
230 246 return 0
231 247
232 248
233 249 def post_push(ui, repo, node, **kwargs):
234 250 extras = _extras_from_ui(ui)
235 251
236 252 commit_ids = []
237 253 branches = []
238 254 bookmarks = []
239 255 tags = []
240 256
241 257 for commit_id, branch in _rev_range_hash(repo, node):
242 258 commit_ids.append(commit_id)
243 259 if branch not in branches:
244 260 branches.append(branch)
245 261
246 262 if hasattr(ui, '_rc_pushkey_branches'):
247 263 bookmarks = ui._rc_pushkey_branches
248 264
249 265 extras['commit_ids'] = commit_ids
250 266 extras['new_refs'] = {
251 267 'branches': branches,
252 268 'bookmarks': bookmarks,
253 269 'tags': tags
254 270 }
255 271
256 272 return _call_hook('post_push', extras, HgMessageWriter(ui))
257 273
258 274
259 275 def post_push_ssh(ui, repo, node, **kwargs):
260 276 if _extras_from_ui(ui).get('SSH'):
261 277 return post_push(ui, repo, node, **kwargs)
262 278 return 0
263 279
264 280
265 281 def key_push(ui, repo, **kwargs):
266 282 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
267 283 # store new bookmarks in our UI object propagated later to post_push
268 284 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
269 285 return
270 286
271 287
272 288 # backward compat
273 289 log_pull_action = post_pull
274 290
275 291 # backward compat
276 292 log_push_action = post_push
277 293
278 294
279 295 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
280 296 """
281 297 Old hook name: keep here for backward compatibility.
282 298
283 299 This is only required when the installed git hooks are not upgraded.
284 300 """
285 301 pass
286 302
287 303
288 304 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
289 305 """
290 306 Old hook name: keep here for backward compatibility.
291 307
292 308 This is only required when the installed git hooks are not upgraded.
293 309 """
294 310 pass
295 311
296 312
297 313 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
298 314
299 315
300 316 def git_pre_pull(extras):
301 317 """
302 318 Pre pull hook.
303 319
304 320 :param extras: dictionary containing the keys defined in simplevcs
305 321 :type extras: dict
306 322
307 323 :return: status code of the hook. 0 for success.
308 324 :rtype: int
309 325 """
310 326 if 'pull' not in extras['hooks']:
311 327 return HookResponse(0, '')
312 328
313 329 stdout = io.BytesIO()
314 330 try:
315 331 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
316 332 except Exception as error:
317 333 status = 128
318 334 stdout.write('ERROR: %s\n' % str(error))
319 335
320 336 return HookResponse(status, stdout.getvalue())
321 337
322 338
323 339 def git_post_pull(extras):
324 340 """
325 341 Post pull hook.
326 342
327 343 :param extras: dictionary containing the keys defined in simplevcs
328 344 :type extras: dict
329 345
330 346 :return: status code of the hook. 0 for success.
331 347 :rtype: int
332 348 """
333 349 if 'pull' not in extras['hooks']:
334 350 return HookResponse(0, '')
335 351
336 352 stdout = io.BytesIO()
337 353 try:
338 354 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
339 355 except Exception as error:
340 356 status = 128
341 357 stdout.write('ERROR: %s\n' % error)
342 358
343 359 return HookResponse(status, stdout.getvalue())
344 360
345 361
346 362 def _parse_git_ref_lines(revision_lines):
347 363 rev_data = []
348 364 for revision_line in revision_lines or []:
349 365 old_rev, new_rev, ref = revision_line.strip().split(' ')
350 366 ref_data = ref.split('/', 2)
351 367 if ref_data[1] in ('tags', 'heads'):
352 368 rev_data.append({
353 369 'old_rev': old_rev,
354 370 'new_rev': new_rev,
355 371 'ref': ref,
356 372 'type': ref_data[1],
357 373 'name': ref_data[2],
358 374 })
359 375 return rev_data
360 376
361 377
362 378 def git_pre_receive(unused_repo_path, revision_lines, env):
363 379 """
364 380 Pre push hook.
365 381
366 382 :param extras: dictionary containing the keys defined in simplevcs
367 383 :type extras: dict
368 384
369 385 :return: status code of the hook. 0 for success.
370 386 :rtype: int
371 387 """
372 388 extras = json.loads(env['RC_SCM_DATA'])
373 389 rev_data = _parse_git_ref_lines(revision_lines)
374 390 if 'push' not in extras['hooks']:
375 391 return 0
376 392 extras['commit_ids'] = rev_data
377 393 return _call_hook('pre_push', extras, GitMessageWriter())
378 394
379 395
380 396 def git_post_receive(unused_repo_path, revision_lines, env):
381 397 """
382 398 Post push hook.
383 399
384 400 :param extras: dictionary containing the keys defined in simplevcs
385 401 :type extras: dict
386 402
387 403 :return: status code of the hook. 0 for success.
388 404 :rtype: int
389 405 """
390 406 extras = json.loads(env['RC_SCM_DATA'])
391 407 if 'push' not in extras['hooks']:
392 408 return 0
393 409
394 410 rev_data = _parse_git_ref_lines(revision_lines)
395 411
396 412 git_revs = []
397 413
398 414 # N.B.(skreft): it is ok to just call git, as git before calling a
399 415 # subcommand sets the PATH environment variable so that it point to the
400 416 # correct version of the git executable.
401 417 empty_commit_id = '0' * 40
402 418 branches = []
403 419 tags = []
404 420 for push_ref in rev_data:
405 421 type_ = push_ref['type']
406 422
407 423 if type_ == 'heads':
408 424 if push_ref['old_rev'] == empty_commit_id:
409 425 # starting new branch case
410 426 if push_ref['name'] not in branches:
411 427 branches.append(push_ref['name'])
412 428
413 429 # Fix up head revision if needed
414 430 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
415 431 try:
416 432 subprocessio.run_command(cmd, env=os.environ.copy())
417 433 except Exception:
418 434 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
419 435 'refs/heads/%s' % push_ref['name']]
420 436 print("Setting default branch to %s" % push_ref['name'])
421 437 subprocessio.run_command(cmd, env=os.environ.copy())
422 438
423 439 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
424 440 '--format=%(refname)', 'refs/heads/*']
425 441 stdout, stderr = subprocessio.run_command(
426 442 cmd, env=os.environ.copy())
427 443 heads = stdout
428 444 heads = heads.replace(push_ref['ref'], '')
429 445 heads = ' '.join(head for head in heads.splitlines() if head)
430 446 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
431 447 '--pretty=format:%H', '--', push_ref['new_rev'],
432 448 '--not', heads]
433 449 stdout, stderr = subprocessio.run_command(
434 450 cmd, env=os.environ.copy())
435 451 git_revs.extend(stdout.splitlines())
436 452 elif push_ref['new_rev'] == empty_commit_id:
437 453 # delete branch case
438 454 git_revs.append('delete_branch=>%s' % push_ref['name'])
439 455 else:
440 456 if push_ref['name'] not in branches:
441 457 branches.append(push_ref['name'])
442 458
443 459 cmd = [settings.GIT_EXECUTABLE, 'log',
444 460 '{old_rev}..{new_rev}'.format(**push_ref),
445 461 '--reverse', '--pretty=format:%H']
446 462 stdout, stderr = subprocessio.run_command(
447 463 cmd, env=os.environ.copy())
448 464 git_revs.extend(stdout.splitlines())
449 465 elif type_ == 'tags':
450 466 if push_ref['name'] not in tags:
451 467 tags.append(push_ref['name'])
452 468 git_revs.append('tag=>%s' % push_ref['name'])
453 469
454 470 extras['commit_ids'] = git_revs
455 471 extras['new_refs'] = {
456 472 'branches': branches,
457 473 'bookmarks': [],
458 474 'tags': tags,
459 475 }
460 476
461 477 if 'repo_size' in extras['hooks']:
462 478 try:
463 479 _call_hook('repo_size', extras, GitMessageWriter())
464 480 except:
465 481 pass
466 482
467 483 return _call_hook('post_push', extras, GitMessageWriter())
484
485
486 def svn_pre_commit(repo_path, commit_data, env):
487 path, txn_id = commit_data
488 branches = []
489 tags = []
490
491 cmd = ['svnlook', 'pget',
492 '-t', txn_id,
493 '--revprop', path, 'rc-scm-extras']
494 stdout, stderr = subprocessio.run_command(
495 cmd, env=os.environ.copy())
496 extras = json.loads(base64.urlsafe_b64decode(stdout))
497
498 extras['commit_ids'] = []
499 extras['txn_id'] = txn_id
500 extras['new_refs'] = {
501 'branches': branches,
502 'bookmarks': [],
503 'tags': tags,
504 }
505 sys.stderr.write(str(extras))
506 return _call_hook('pre_push', extras, SvnMessageWriter())
507
508
509 def svn_post_commit(repo_path, commit_data, env):
510 """
511 commit_data is path, rev, txn_id
512 """
513 path, commit_id, txn_id = commit_data
514 branches = []
515 tags = []
516
517 cmd = ['svnlook', 'pget',
518 '-r', commit_id,
519 '--revprop', path, 'rc-scm-extras']
520 stdout, stderr = subprocessio.run_command(
521 cmd, env=os.environ.copy())
522
523 extras = json.loads(base64.urlsafe_b64decode(stdout))
524
525 extras['commit_ids'] = [commit_id]
526 extras['txn_id'] = txn_id
527 extras['new_refs'] = {
528 'branches': branches,
529 'bookmarks': [],
530 'tags': tags,
531 }
532
533 if 'repo_size' in extras['hooks']:
534 try:
535 _call_hook('repo_size', extras, SvnMessageWriter())
536 except:
537 pass
538
539 return _call_hook('post_push', extras, SvnMessageWriter())
540
541
@@ -1,478 +1,487 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import os
18 19 import base64
19 20 import locale
20 21 import logging
21 22 import uuid
22 23 import wsgiref.util
23 24 import traceback
24 25 from itertools import chain
25 26
26 27 import simplejson as json
27 28 import msgpack
28 29 from beaker.cache import CacheManager
29 30 from beaker.util import parse_cache_config_options
30 31 from pyramid.config import Configurator
31 32 from pyramid.wsgi import wsgiapp
33 from pyramid.compat import configparser
32 34
33 35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
34 36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
35 37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
36 38 from vcsserver.echo_stub.echo_app import EchoApp
37 39 from vcsserver.exceptions import HTTPRepoLocked
38 40 from vcsserver.server import VcsServer
39 41
40 42 try:
41 43 from vcsserver.git import GitFactory, GitRemote
42 44 except ImportError:
43 45 GitFactory = None
44 46 GitRemote = None
45 47
46 48 try:
47 49 from vcsserver.hg import MercurialFactory, HgRemote
48 50 except ImportError:
49 51 MercurialFactory = None
50 52 HgRemote = None
51 53
52 54 try:
53 55 from vcsserver.svn import SubversionFactory, SvnRemote
54 56 except ImportError:
55 57 SubversionFactory = None
56 58 SvnRemote = None
57 59
58 60 log = logging.getLogger(__name__)
59 61
60 62
61 63 def _is_request_chunked(environ):
62 64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
63 65 return stream
64 66
65 67
66 68 class VCS(object):
67 69 def __init__(self, locale=None, cache_config=None):
68 70 self.locale = locale
69 71 self.cache_config = cache_config
70 72 self._configure_locale()
71 73 self._initialize_cache()
72 74
73 75 if GitFactory and GitRemote:
74 76 git_repo_cache = self.cache.get_cache_region(
75 77 'git', region='repo_object')
76 78 git_factory = GitFactory(git_repo_cache)
77 79 self._git_remote = GitRemote(git_factory)
78 80 else:
79 81 log.info("Git client import failed")
80 82
81 83 if MercurialFactory and HgRemote:
82 84 hg_repo_cache = self.cache.get_cache_region(
83 85 'hg', region='repo_object')
84 86 hg_factory = MercurialFactory(hg_repo_cache)
85 87 self._hg_remote = HgRemote(hg_factory)
86 88 else:
87 89 log.info("Mercurial client import failed")
88 90
89 91 if SubversionFactory and SvnRemote:
90 92 svn_repo_cache = self.cache.get_cache_region(
91 93 'svn', region='repo_object')
92 94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
93 99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
94 100 else:
95 101 log.info("Subversion client import failed")
96 102
97 103 self._vcsserver = VcsServer()
98 104
99 105 def _initialize_cache(self):
100 106 cache_config = parse_cache_config_options(self.cache_config)
101 107 log.info('Initializing beaker cache: %s' % cache_config)
102 108 self.cache = CacheManager(**cache_config)
103 109
104 110 def _configure_locale(self):
105 111 if self.locale:
106 112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
107 113 else:
108 114 log.info(
109 115 'Configuring locale subsystem based on environment variables')
110 116 try:
111 117 # If self.locale is the empty string, then the locale
112 118 # module will use the environment variables. See the
113 119 # documentation of the package `locale`.
114 120 locale.setlocale(locale.LC_ALL, self.locale)
115 121
116 122 language_code, encoding = locale.getlocale()
117 123 log.info(
118 124 'Locale set to language code "%s" with encoding "%s".',
119 125 language_code, encoding)
120 126 except locale.Error:
121 127 log.exception(
122 128 'Cannot set locale, not configuring the locale system')
123 129
124 130
125 131 class WsgiProxy(object):
126 132 def __init__(self, wsgi):
127 133 self.wsgi = wsgi
128 134
129 135 def __call__(self, environ, start_response):
130 136 input_data = environ['wsgi.input'].read()
131 137 input_data = msgpack.unpackb(input_data)
132 138
133 139 error = None
134 140 try:
135 141 data, status, headers = self.wsgi.handle(
136 142 input_data['environment'], input_data['input_data'],
137 143 *input_data['args'], **input_data['kwargs'])
138 144 except Exception as e:
139 145 data, status, headers = [], None, None
140 146 error = {
141 147 'message': str(e),
142 148 '_vcs_kind': getattr(e, '_vcs_kind', None)
143 149 }
144 150
145 151 start_response(200, {})
146 152 return self._iterator(error, status, headers, data)
147 153
148 154 def _iterator(self, error, status, headers, data):
149 155 initial_data = [
150 156 error,
151 157 status,
152 158 headers,
153 159 ]
154 160
155 161 for d in chain(initial_data, data):
156 162 yield msgpack.packb(d)
157 163
158 164
159 165 class HTTPApplication(object):
160 166 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
161 167
162 168 remote_wsgi = remote_wsgi
163 169 _use_echo_app = False
164 170
165 171 def __init__(self, settings=None, global_config=None):
166 172 self.config = Configurator(settings=settings)
167 173 self.global_config = global_config
168 174
169 175 locale = settings.get('locale', '') or 'en_US.UTF-8'
170 176 vcs = VCS(locale=locale, cache_config=settings)
171 177 self._remotes = {
172 178 'hg': vcs._hg_remote,
173 179 'git': vcs._git_remote,
174 180 'svn': vcs._svn_remote,
175 181 'server': vcs._vcsserver,
176 182 }
177 183 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
178 184 self._use_echo_app = True
179 185 log.warning("Using EchoApp for VCS operations.")
180 186 self.remote_wsgi = remote_wsgi_stub
181 187 self._configure_settings(settings)
182 188 self._configure()
183 189
184 190 def _configure_settings(self, app_settings):
185 191 """
186 192 Configure the settings module.
187 193 """
188 194 git_path = app_settings.get('git_path', None)
189 195 if git_path:
190 196 settings.GIT_EXECUTABLE = git_path
197 binary_dir = app_settings.get('core.binary_dir', None)
198 if binary_dir:
199 settings.BINARY_DIR = binary_dir
191 200
192 201 def _configure(self):
193 202 self.config.add_renderer(
194 203 name='msgpack',
195 204 factory=self._msgpack_renderer_factory)
196 205
197 206 self.config.add_route('service', '/_service')
198 207 self.config.add_route('status', '/status')
199 208 self.config.add_route('hg_proxy', '/proxy/hg')
200 209 self.config.add_route('git_proxy', '/proxy/git')
201 210 self.config.add_route('vcs', '/{backend}')
202 211 self.config.add_route('stream_git', '/stream/git/*repo_name')
203 212 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
204 213
205 214 self.config.add_view(
206 215 self.status_view, route_name='status', renderer='json')
207 216 self.config.add_view(
208 217 self.service_view, route_name='service', renderer='msgpack')
209 218
210 219 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
211 220 self.config.add_view(self.git_proxy(), route_name='git_proxy')
212 221 self.config.add_view(
213 222 self.vcs_view, route_name='vcs', renderer='msgpack',
214 223 custom_predicates=[self.is_vcs_view])
215 224
216 225 self.config.add_view(self.hg_stream(), route_name='stream_hg')
217 226 self.config.add_view(self.git_stream(), route_name='stream_git')
218 227
219 228 def notfound(request):
220 229 return {'status': '404 NOT FOUND'}
221 230 self.config.add_notfound_view(notfound, renderer='json')
222 231
223 232 self.config.add_view(self.handle_vcs_exception, context=Exception)
224 233
225 234 self.config.add_tween(
226 235 'vcsserver.tweens.RequestWrapperTween',
227 236 )
228 237
229 238 def wsgi_app(self):
230 239 return self.config.make_wsgi_app()
231 240
232 241 def vcs_view(self, request):
233 242 remote = self._remotes[request.matchdict['backend']]
234 243 payload = msgpack.unpackb(request.body, use_list=True)
235 244 method = payload.get('method')
236 245 params = payload.get('params')
237 246 wire = params.get('wire')
238 247 args = params.get('args')
239 248 kwargs = params.get('kwargs')
240 249 if wire:
241 250 try:
242 251 wire['context'] = uuid.UUID(wire['context'])
243 252 except KeyError:
244 253 pass
245 254 args.insert(0, wire)
246 255
247 256 log.debug('method called:%s with kwargs:%s', method, kwargs)
248 257 try:
249 258 resp = getattr(remote, method)(*args, **kwargs)
250 259 except Exception as e:
251 260 tb_info = traceback.format_exc()
252 261
253 262 type_ = e.__class__.__name__
254 263 if type_ not in self.ALLOWED_EXCEPTIONS:
255 264 type_ = None
256 265
257 266 resp = {
258 267 'id': payload.get('id'),
259 268 'error': {
260 269 'message': e.message,
261 270 'traceback': tb_info,
262 271 'type': type_
263 272 }
264 273 }
265 274 try:
266 275 resp['error']['_vcs_kind'] = e._vcs_kind
267 276 except AttributeError:
268 277 pass
269 278 else:
270 279 resp = {
271 280 'id': payload.get('id'),
272 281 'result': resp
273 282 }
274 283
275 284 return resp
276 285
277 286 def status_view(self, request):
278 287 import vcsserver
279 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
288 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
289 'pid': os.getpid()}
280 290
281 291 def service_view(self, request):
282 292 import vcsserver
283 import ConfigParser as configparser
284 293
285 294 payload = msgpack.unpackb(request.body, use_list=True)
286 295
287 296 try:
288 297 path = self.global_config['__file__']
289 298 config = configparser.ConfigParser()
290 299 config.read(path)
291 300 parsed_ini = config
292 301 if parsed_ini.has_section('server:main'):
293 302 parsed_ini = dict(parsed_ini.items('server:main'))
294 303 except Exception:
295 304 log.exception('Failed to read .ini file for display')
296 305 parsed_ini = {}
297 306
298 307 resp = {
299 308 'id': payload.get('id'),
300 309 'result': dict(
301 310 version=vcsserver.__version__,
302 311 config=parsed_ini,
303 312 payload=payload,
304 313 )
305 314 }
306 315 return resp
307 316
308 317 def _msgpack_renderer_factory(self, info):
309 318 def _render(value, system):
310 319 value = msgpack.packb(value)
311 320 request = system.get('request')
312 321 if request is not None:
313 322 response = request.response
314 323 ct = response.content_type
315 324 if ct == response.default_content_type:
316 325 response.content_type = 'application/x-msgpack'
317 326 return value
318 327 return _render
319 328
320 329 def set_env_from_config(self, environ, config):
321 330 dict_conf = {}
322 331 try:
323 332 for elem in config:
324 333 if elem[0] == 'rhodecode':
325 334 dict_conf = json.loads(elem[2])
326 335 break
327 336 except Exception:
328 337 log.exception('Failed to fetch SCM CONFIG')
329 338 return
330 339
331 340 username = dict_conf.get('username')
332 341 if username:
333 342 environ['REMOTE_USER'] = username
334 343 # mercurial specific, some extension api rely on this
335 344 environ['HGUSER'] = username
336 345
337 346 ip = dict_conf.get('ip')
338 347 if ip:
339 348 environ['REMOTE_HOST'] = ip
340 349
341 350 if _is_request_chunked(environ):
342 351 # set the compatibility flag for webob
343 352 environ['wsgi.input_terminated'] = True
344 353
345 354 def hg_proxy(self):
346 355 @wsgiapp
347 356 def _hg_proxy(environ, start_response):
348 357 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
349 358 return app(environ, start_response)
350 359 return _hg_proxy
351 360
352 361 def git_proxy(self):
353 362 @wsgiapp
354 363 def _git_proxy(environ, start_response):
355 364 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
356 365 return app(environ, start_response)
357 366 return _git_proxy
358 367
359 368 def hg_stream(self):
360 369 if self._use_echo_app:
361 370 @wsgiapp
362 371 def _hg_stream(environ, start_response):
363 372 app = EchoApp('fake_path', 'fake_name', None)
364 373 return app(environ, start_response)
365 374 return _hg_stream
366 375 else:
367 376 @wsgiapp
368 377 def _hg_stream(environ, start_response):
369 378 log.debug('http-app: handling hg stream')
370 379 repo_path = environ['HTTP_X_RC_REPO_PATH']
371 380 repo_name = environ['HTTP_X_RC_REPO_NAME']
372 381 packed_config = base64.b64decode(
373 382 environ['HTTP_X_RC_REPO_CONFIG'])
374 383 config = msgpack.unpackb(packed_config)
375 384 app = scm_app.create_hg_wsgi_app(
376 385 repo_path, repo_name, config)
377 386
378 387 # Consistent path information for hgweb
379 388 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
380 389 environ['REPO_NAME'] = repo_name
381 390 self.set_env_from_config(environ, config)
382 391
383 392 log.debug('http-app: starting app handler '
384 393 'with %s and process request', app)
385 394 return app(environ, ResponseFilter(start_response))
386 395 return _hg_stream
387 396
388 397 def git_stream(self):
389 398 if self._use_echo_app:
390 399 @wsgiapp
391 400 def _git_stream(environ, start_response):
392 401 app = EchoApp('fake_path', 'fake_name', None)
393 402 return app(environ, start_response)
394 403 return _git_stream
395 404 else:
396 405 @wsgiapp
397 406 def _git_stream(environ, start_response):
398 407 log.debug('http-app: handling git stream')
399 408 repo_path = environ['HTTP_X_RC_REPO_PATH']
400 409 repo_name = environ['HTTP_X_RC_REPO_NAME']
401 410 packed_config = base64.b64decode(
402 411 environ['HTTP_X_RC_REPO_CONFIG'])
403 412 config = msgpack.unpackb(packed_config)
404 413
405 414 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
406 415 self.set_env_from_config(environ, config)
407 416
408 417 content_type = environ.get('CONTENT_TYPE', '')
409 418
410 419 path = environ['PATH_INFO']
411 420 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
412 421 log.debug(
413 422 'LFS: Detecting if request `%s` is LFS server path based '
414 423 'on content type:`%s`, is_lfs:%s',
415 424 path, content_type, is_lfs_request)
416 425
417 426 if not is_lfs_request:
418 427 # fallback detection by path
419 428 if GIT_LFS_PROTO_PAT.match(path):
420 429 is_lfs_request = True
421 430 log.debug(
422 431 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
423 432 path, is_lfs_request)
424 433
425 434 if is_lfs_request:
426 435 app = scm_app.create_git_lfs_wsgi_app(
427 436 repo_path, repo_name, config)
428 437 else:
429 438 app = scm_app.create_git_wsgi_app(
430 439 repo_path, repo_name, config)
431 440
432 441 log.debug('http-app: starting app handler '
433 442 'with %s and process request', app)
434 443
435 444 return app(environ, start_response)
436 445
437 446 return _git_stream
438 447
439 448 def is_vcs_view(self, context, request):
440 449 """
441 450 View predicate that returns true if given backend is supported by
442 451 defined remotes.
443 452 """
444 453 backend = request.matchdict.get('backend')
445 454 return backend in self._remotes
446 455
447 456 def handle_vcs_exception(self, exception, request):
448 457 _vcs_kind = getattr(exception, '_vcs_kind', '')
449 458 if _vcs_kind == 'repo_locked':
450 459 # Get custom repo-locked status code if present.
451 460 status_code = request.headers.get('X-RC-Locked-Status-Code')
452 461 return HTTPRepoLocked(
453 462 title=exception.message, status_code=status_code)
454 463
455 464 # Re-raise exception if we can not handle it.
456 465 log.exception(
457 466 'error occurred handling this request for path: %s', request.path)
458 467 raise exception
459 468
460 469
461 470 class ResponseFilter(object):
462 471
463 472 def __init__(self, start_response):
464 473 self._start_response = start_response
465 474
466 475 def __call__(self, status, response_headers, exc_info=None):
467 476 headers = tuple(
468 477 (h, v) for h, v in response_headers
469 478 if not wsgiref.util.is_hop_by_hop(h))
470 479 return self._start_response(status, headers, exc_info)
471 480
472 481
473 482 def main(global_config, **settings):
474 483 if MercurialFactory:
475 484 hgpatches.patch_largefiles_capabilities()
476 485 hgpatches.patch_subrepo_type_mapping()
477 486 app = HTTPApplication(settings=settings, global_config=global_config)
478 487 return app.wsgi_app()
@@ -1,19 +1,20 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
20 BINARY_DIR = ''
@@ -1,677 +1,689 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import urllib
26 26 import traceback
27 27
28 28 import svn.client
29 29 import svn.core
30 30 import svn.delta
31 31 import svn.diff
32 32 import svn.fs
33 33 import svn.repos
34 34
35 from vcsserver import svn_diff, exceptions, subprocessio
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 # Set of svn compatible version flags.
42 42 # Compare with subversion/svnadmin/svnadmin.c
43 43 svn_compatible_versions = set([
44 44 'pre-1.4-compatible',
45 45 'pre-1.5-compatible',
46 46 'pre-1.6-compatible',
47 47 'pre-1.8-compatible',
48 48 'pre-1.9-compatible',
49 49 ])
50 50
51 51 svn_compatible_versions_map = {
52 52 'pre-1.4-compatible': '1.3',
53 53 'pre-1.5-compatible': '1.4',
54 54 'pre-1.6-compatible': '1.5',
55 55 'pre-1.8-compatible': '1.7',
56 56 'pre-1.9-compatible': '1.8',
57 57 }
58 58
59 59
60 60 def reraise_safe_exceptions(func):
61 61 """Decorator for converting svn exceptions to something neutral."""
62 62 def wrapper(*args, **kwargs):
63 63 try:
64 64 return func(*args, **kwargs)
65 65 except Exception as e:
66 66 if not hasattr(e, '_vcs_kind'):
67 67 log.exception("Unhandled exception in hg remote call")
68 68 raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class SubversionFactory(RepoFactory):
74 74
75 75 def _create_repo(self, wire, create, compatible_version):
76 76 path = svn.core.svn_path_canonicalize(wire['path'])
77 77 if create:
78 78 fs_config = {'compatible-version': '1.9'}
79 79 if compatible_version:
80 80 if compatible_version not in svn_compatible_versions:
81 81 raise Exception('Unknown SVN compatible version "{}"'
82 82 .format(compatible_version))
83 83 fs_config['compatible-version'] = \
84 84 svn_compatible_versions_map[compatible_version]
85 85
86 86 log.debug('Create SVN repo with config "%s"', fs_config)
87 87 repo = svn.repos.create(path, "", "", None, fs_config)
88 88 else:
89 89 repo = svn.repos.open(path)
90 90
91 91 log.debug('Got SVN object: %s', repo)
92 92 return repo
93 93
94 94 def repo(self, wire, create=False, compatible_version=None):
95 95 def create_new_repo():
96 96 return self._create_repo(wire, create, compatible_version)
97 97
98 98 return self._repo(wire, create_new_repo)
99 99
100 100
101 101 NODE_TYPE_MAPPING = {
102 102 svn.core.svn_node_file: 'file',
103 103 svn.core.svn_node_dir: 'dir',
104 104 }
105 105
106 106
107 107 class SvnRemote(object):
108 108
109 109 def __init__(self, factory, hg_factory=None):
110 110 self._factory = factory
111 111 # TODO: Remove once we do not use internal Mercurial objects anymore
112 112 # for subversion
113 113 self._hg_factory = hg_factory
114 114
115 115 @reraise_safe_exceptions
116 116 def discover_svn_version(self):
117 117 try:
118 118 import svn.core
119 119 svn_ver = svn.core.SVN_VERSION
120 120 except ImportError:
121 121 svn_ver = None
122 122 return svn_ver
123 123
124 124 def check_url(self, url, config_items):
125 125 # this can throw exception if not installed, but we detect this
126 126 from hgsubversion import svnrepo
127 127
128 128 baseui = self._hg_factory._create_config(config_items)
129 129 # uuid function get's only valid UUID from proper repo, else
130 130 # throws exception
131 131 try:
132 132 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 133 except Exception:
134 134 tb = traceback.format_exc()
135 135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 136 raise URLError(
137 137 '"%s" is not a valid Subversion source url.' % (url, ))
138 138 return True
139 139
140 140 def is_path_valid_repository(self, wire, path):
141 141
142 142 # NOTE(marcink): short circuit the check for SVN repo
143 143 # the repos.open might be expensive to check, but we have one cheap
144 144 # pre condition that we can use, to check for 'format' file
145 145
146 146 if not os.path.isfile(os.path.join(path, 'format')):
147 147 return False
148 148
149 149 try:
150 150 svn.repos.open(path)
151 151 except svn.core.SubversionException:
152 152 tb = traceback.format_exc()
153 153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 154 return False
155 155 return True
156 156
157 157 @reraise_safe_exceptions
158 158 def verify(self, wire,):
159 159 repo_path = wire['path']
160 160 if not self.is_path_valid_repository(wire, repo_path):
161 161 raise Exception(
162 162 "Path %s is not a valid Subversion repository." % repo_path)
163 163
164 164 cmd = ['svnadmin', 'info', repo_path]
165 165 stdout, stderr = subprocessio.run_command(cmd)
166 166 return stdout
167 167
168 168 def lookup(self, wire, revision):
169 169 if revision not in [-1, None, 'HEAD']:
170 170 raise NotImplementedError
171 171 repo = self._factory.repo(wire)
172 172 fs_ptr = svn.repos.fs(repo)
173 173 head = svn.fs.youngest_rev(fs_ptr)
174 174 return head
175 175
176 176 def lookup_interval(self, wire, start_ts, end_ts):
177 177 repo = self._factory.repo(wire)
178 178 fsobj = svn.repos.fs(repo)
179 179 start_rev = None
180 180 end_rev = None
181 181 if start_ts:
182 182 start_ts_svn = apr_time_t(start_ts)
183 183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 184 else:
185 185 start_rev = 1
186 186 if end_ts:
187 187 end_ts_svn = apr_time_t(end_ts)
188 188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 189 else:
190 190 end_rev = svn.fs.youngest_rev(fsobj)
191 191 return start_rev, end_rev
192 192
193 193 def revision_properties(self, wire, revision):
194 194 repo = self._factory.repo(wire)
195 195 fs_ptr = svn.repos.fs(repo)
196 196 return svn.fs.revision_proplist(fs_ptr, revision)
197 197
198 198 def revision_changes(self, wire, revision):
199 199
200 200 repo = self._factory.repo(wire)
201 201 fsobj = svn.repos.fs(repo)
202 202 rev_root = svn.fs.revision_root(fsobj, revision)
203 203
204 204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 206 base_dir = ""
207 207 send_deltas = False
208 208 svn.repos.replay2(
209 209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 210 editor_ptr, editor_baton, None)
211 211
212 212 added = []
213 213 changed = []
214 214 removed = []
215 215
216 216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 217 for path, change in editor.changes.iteritems():
218 218 # TODO: Decide what to do with directory nodes. Subversion can add
219 219 # empty directories.
220 220
221 221 if change.item_kind == svn.core.svn_node_dir:
222 222 continue
223 223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 224 added.append(path)
225 225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 226 svn.repos.CHANGE_ACTION_REPLACE]:
227 227 changed.append(path)
228 228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 229 removed.append(path)
230 230 else:
231 231 raise NotImplementedError(
232 232 "Action %s not supported on path %s" % (
233 233 change.action, path))
234 234
235 235 changes = {
236 236 'added': added,
237 237 'changed': changed,
238 238 'removed': removed,
239 239 }
240 240 return changes
241 241
242 242 def node_history(self, wire, path, revision, limit):
243 243 cross_copies = False
244 244 repo = self._factory.repo(wire)
245 245 fsobj = svn.repos.fs(repo)
246 246 rev_root = svn.fs.revision_root(fsobj, revision)
247 247
248 248 history_revisions = []
249 249 history = svn.fs.node_history(rev_root, path)
250 250 history = svn.fs.history_prev(history, cross_copies)
251 251 while history:
252 252 __, node_revision = svn.fs.history_location(history)
253 253 history_revisions.append(node_revision)
254 254 if limit and len(history_revisions) >= limit:
255 255 break
256 256 history = svn.fs.history_prev(history, cross_copies)
257 257 return history_revisions
258 258
259 259 def node_properties(self, wire, path, revision):
260 260 repo = self._factory.repo(wire)
261 261 fsobj = svn.repos.fs(repo)
262 262 rev_root = svn.fs.revision_root(fsobj, revision)
263 263 return svn.fs.node_proplist(rev_root, path)
264 264
265 265 def file_annotate(self, wire, path, revision):
266 266 abs_path = 'file://' + urllib.pathname2url(
267 267 vcspath.join(wire['path'], path))
268 268 file_uri = svn.core.svn_path_canonicalize(abs_path)
269 269
270 270 start_rev = svn_opt_revision_value_t(0)
271 271 peg_rev = svn_opt_revision_value_t(revision)
272 272 end_rev = peg_rev
273 273
274 274 annotations = []
275 275
276 276 def receiver(line_no, revision, author, date, line, pool):
277 277 annotations.append((line_no, revision, line))
278 278
279 279 # TODO: Cannot use blame5, missing typemap function in the swig code
280 280 try:
281 281 svn.client.blame2(
282 282 file_uri, peg_rev, start_rev, end_rev,
283 283 receiver, svn.client.create_context())
284 284 except svn.core.SubversionException as exc:
285 285 log.exception("Error during blame operation.")
286 286 raise Exception(
287 287 "Blame not supported or file does not exist at path %s. "
288 288 "Error %s." % (path, exc))
289 289
290 290 return annotations
291 291
292 292 def get_node_type(self, wire, path, rev=None):
293 293 repo = self._factory.repo(wire)
294 294 fs_ptr = svn.repos.fs(repo)
295 295 if rev is None:
296 296 rev = svn.fs.youngest_rev(fs_ptr)
297 297 root = svn.fs.revision_root(fs_ptr, rev)
298 298 node = svn.fs.check_path(root, path)
299 299 return NODE_TYPE_MAPPING.get(node, None)
300 300
301 301 def get_nodes(self, wire, path, revision=None):
302 302 repo = self._factory.repo(wire)
303 303 fsobj = svn.repos.fs(repo)
304 304 if revision is None:
305 305 revision = svn.fs.youngest_rev(fsobj)
306 306 root = svn.fs.revision_root(fsobj, revision)
307 307 entries = svn.fs.dir_entries(root, path)
308 308 result = []
309 309 for entry_path, entry_info in entries.iteritems():
310 310 result.append(
311 311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 312 return result
313 313
314 314 def get_file_content(self, wire, path, rev=None):
315 315 repo = self._factory.repo(wire)
316 316 fsobj = svn.repos.fs(repo)
317 317 if rev is None:
318 318 rev = svn.fs.youngest_revision(fsobj)
319 319 root = svn.fs.revision_root(fsobj, rev)
320 320 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 321 return content.read()
322 322
323 323 def get_file_size(self, wire, path, revision=None):
324 324 repo = self._factory.repo(wire)
325 325 fsobj = svn.repos.fs(repo)
326 326 if revision is None:
327 327 revision = svn.fs.youngest_revision(fsobj)
328 328 root = svn.fs.revision_root(fsobj, revision)
329 329 size = svn.fs.file_length(root, path)
330 330 return size
331 331
332 332 def create_repository(self, wire, compatible_version=None):
333 333 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 334 self._factory.repo(wire, create=True,
335 335 compatible_version=compatible_version)
336 336
337 337 def import_remote_repository(self, wire, src_url):
338 338 repo_path = wire['path']
339 339 if not self.is_path_valid_repository(wire, repo_path):
340 340 raise Exception(
341 341 "Path %s is not a valid Subversion repository." % repo_path)
342 342
343 343 # TODO: johbo: URL checks ?
344 344 import subprocess
345 345 rdump = subprocess.Popen(
346 346 ['svnrdump', 'dump', '--non-interactive', src_url],
347 347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 348 load = subprocess.Popen(
349 349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350 350
351 351 # TODO: johbo: This can be a very long operation, might be better
352 352 # to track some kind of status and provide an api to check if the
353 353 # import is done.
354 354 rdump.wait()
355 355 load.wait()
356 356
357 357 if rdump.returncode != 0:
358 358 errors = rdump.stderr.read()
359 359 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 360 rdump.returncode, errors)
361 361 reason = 'UNKNOWN'
362 362 if 'svnrdump: E230001:' in errors:
363 363 reason = 'INVALID_CERTIFICATE'
364 364 raise Exception(
365 365 'Failed to dump the remote repository from %s.' % src_url,
366 366 reason)
367 367 if load.returncode != 0:
368 368 raise Exception(
369 369 'Failed to load the dump of remote repository from %s.' %
370 370 (src_url, ))
371 371
372 372 def commit(self, wire, message, author, timestamp, updated, removed):
373 373 assert isinstance(message, str)
374 374 assert isinstance(author, str)
375 375
376 376 repo = self._factory.repo(wire)
377 377 fsobj = svn.repos.fs(repo)
378 378
379 379 rev = svn.fs.youngest_rev(fsobj)
380 380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 381 txn_root = svn.fs.txn_root(txn)
382 382
383 383 for node in updated:
384 384 TxnNodeProcessor(node, txn_root).update()
385 385 for node in removed:
386 386 TxnNodeProcessor(node, txn_root).remove()
387 387
388 388 commit_id = svn.repos.fs_commit_txn(repo, txn)
389 389
390 390 if timestamp:
391 391 apr_time = apr_time_t(timestamp)
392 392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394 394
395 395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 396 return commit_id
397 397
398 398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 399 ignore_whitespace=False, context=3):
400 400
401 401 wire.update(cache=False)
402 402 repo = self._factory.repo(wire)
403 403 diff_creator = SvnDiffer(
404 404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 405 try:
406 406 return diff_creator.generate_diff()
407 407 except svn.core.SubversionException as e:
408 408 log.exception(
409 409 "Error during diff operation operation. "
410 410 "Path might not exist %s, %s" % (path1, path2))
411 411 return ""
412 412
413 413 @reraise_safe_exceptions
414 414 def is_large_file(self, wire, path):
415 415 return False
416 416
417 @reraise_safe_exceptions
418 def install_hooks(self, wire, force=False):
419 from vcsserver.hook_utils import install_svn_hooks
420 repo_path = wire['path']
421 binary_dir = settings.BINARY_DIR
422 executable = None
423 if binary_dir:
424 executable = os.path.join(binary_dir, 'python')
425 return install_svn_hooks(
426 repo_path, executable=executable, force_create=force)
427
417 428
418 429 class SvnDiffer(object):
419 430 """
420 431 Utility to create diffs based on difflib and the Subversion api
421 432 """
422 433
423 434 binary_content = False
424 435
425 436 def __init__(
426 437 self, repo, src_rev, src_path, tgt_rev, tgt_path,
427 438 ignore_whitespace, context):
428 439 self.repo = repo
429 440 self.ignore_whitespace = ignore_whitespace
430 441 self.context = context
431 442
432 443 fsobj = svn.repos.fs(repo)
433 444
434 445 self.tgt_rev = tgt_rev
435 446 self.tgt_path = tgt_path or ''
436 447 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
437 448 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
438 449
439 450 self.src_rev = src_rev
440 451 self.src_path = src_path or self.tgt_path
441 452 self.src_root = svn.fs.revision_root(fsobj, src_rev)
442 453 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
443 454
444 455 self._validate()
445 456
446 457 def _validate(self):
447 458 if (self.tgt_kind != svn.core.svn_node_none and
448 459 self.src_kind != svn.core.svn_node_none and
449 460 self.src_kind != self.tgt_kind):
450 461 # TODO: johbo: proper error handling
451 462 raise Exception(
452 463 "Source and target are not compatible for diff generation. "
453 464 "Source type: %s, target type: %s" %
454 465 (self.src_kind, self.tgt_kind))
455 466
456 467 def generate_diff(self):
457 468 buf = StringIO.StringIO()
458 469 if self.tgt_kind == svn.core.svn_node_dir:
459 470 self._generate_dir_diff(buf)
460 471 else:
461 472 self._generate_file_diff(buf)
462 473 return buf.getvalue()
463 474
464 475 def _generate_dir_diff(self, buf):
465 476 editor = DiffChangeEditor()
466 477 editor_ptr, editor_baton = svn.delta.make_editor(editor)
467 478 svn.repos.dir_delta2(
468 479 self.src_root,
469 480 self.src_path,
470 481 '', # src_entry
471 482 self.tgt_root,
472 483 self.tgt_path,
473 484 editor_ptr, editor_baton,
474 485 authorization_callback_allow_all,
475 486 False, # text_deltas
476 487 svn.core.svn_depth_infinity, # depth
477 488 False, # entry_props
478 489 False, # ignore_ancestry
479 490 )
480 491
481 492 for path, __, change in sorted(editor.changes):
482 493 self._generate_node_diff(
483 494 buf, change, path, self.tgt_path, path, self.src_path)
484 495
485 496 def _generate_file_diff(self, buf):
486 497 change = None
487 498 if self.src_kind == svn.core.svn_node_none:
488 499 change = "add"
489 500 elif self.tgt_kind == svn.core.svn_node_none:
490 501 change = "delete"
491 502 tgt_base, tgt_path = vcspath.split(self.tgt_path)
492 503 src_base, src_path = vcspath.split(self.src_path)
493 504 self._generate_node_diff(
494 505 buf, change, tgt_path, tgt_base, src_path, src_base)
495 506
496 507 def _generate_node_diff(
497 508 self, buf, change, tgt_path, tgt_base, src_path, src_base):
498 509
499 510 if self.src_rev == self.tgt_rev and tgt_base == src_base:
500 511 # makes consistent behaviour with git/hg to return empty diff if
501 512 # we compare same revisions
502 513 return
503 514
504 515 tgt_full_path = vcspath.join(tgt_base, tgt_path)
505 516 src_full_path = vcspath.join(src_base, src_path)
506 517
507 518 self.binary_content = False
508 519 mime_type = self._get_mime_type(tgt_full_path)
509 520
510 521 if mime_type and not mime_type.startswith('text'):
511 522 self.binary_content = True
512 523 buf.write("=" * 67 + '\n')
513 524 buf.write("Cannot display: file marked as a binary type.\n")
514 525 buf.write("svn:mime-type = %s\n" % mime_type)
515 526 buf.write("Index: %s\n" % (tgt_path, ))
516 527 buf.write("=" * 67 + '\n')
517 528 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
518 529 'tgt_path': tgt_path})
519 530
520 531 if change == 'add':
521 532 # TODO: johbo: SVN is missing a zero here compared to git
522 533 buf.write("new file mode 10644\n")
523 534
524 535 #TODO(marcink): intro to binary detection of svn patches
525 536 # if self.binary_content:
526 537 # buf.write('GIT binary patch\n')
527 538
528 539 buf.write("--- /dev/null\t(revision 0)\n")
529 540 src_lines = []
530 541 else:
531 542 if change == 'delete':
532 543 buf.write("deleted file mode 10644\n")
533 544
534 545 #TODO(marcink): intro to binary detection of svn patches
535 546 # if self.binary_content:
536 547 # buf.write('GIT binary patch\n')
537 548
538 549 buf.write("--- a/%s\t(revision %s)\n" % (
539 550 src_path, self.src_rev))
540 551 src_lines = self._svn_readlines(self.src_root, src_full_path)
541 552
542 553 if change == 'delete':
543 554 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
544 555 tgt_lines = []
545 556 else:
546 557 buf.write("+++ b/%s\t(revision %s)\n" % (
547 558 tgt_path, self.tgt_rev))
548 559 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
549 560
550 561 if not self.binary_content:
551 562 udiff = svn_diff.unified_diff(
552 563 src_lines, tgt_lines, context=self.context,
553 564 ignore_blank_lines=self.ignore_whitespace,
554 565 ignore_case=False,
555 566 ignore_space_changes=self.ignore_whitespace)
556 567 buf.writelines(udiff)
557 568
558 569 def _get_mime_type(self, path):
559 570 try:
560 571 mime_type = svn.fs.node_prop(
561 572 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
562 573 except svn.core.SubversionException:
563 574 mime_type = svn.fs.node_prop(
564 575 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
565 576 return mime_type
566 577
567 578 def _svn_readlines(self, fs_root, node_path):
568 579 if self.binary_content:
569 580 return []
570 581 node_kind = svn.fs.check_path(fs_root, node_path)
571 582 if node_kind not in (
572 583 svn.core.svn_node_file, svn.core.svn_node_symlink):
573 584 return []
574 585 content = svn.core.Stream(
575 586 svn.fs.file_contents(fs_root, node_path)).read()
576 587 return content.splitlines(True)
577 588
578 589
590
579 591 class DiffChangeEditor(svn.delta.Editor):
580 592 """
581 593 Records changes between two given revisions
582 594 """
583 595
584 596 def __init__(self):
585 597 self.changes = []
586 598
587 599 def delete_entry(self, path, revision, parent_baton, pool=None):
588 600 self.changes.append((path, None, 'delete'))
589 601
590 602 def add_file(
591 603 self, path, parent_baton, copyfrom_path, copyfrom_revision,
592 604 file_pool=None):
593 605 self.changes.append((path, 'file', 'add'))
594 606
595 607 def open_file(self, path, parent_baton, base_revision, file_pool=None):
596 608 self.changes.append((path, 'file', 'change'))
597 609
598 610
599 611 def authorization_callback_allow_all(root, path, pool):
600 612 return True
601 613
602 614
603 615 class TxnNodeProcessor(object):
604 616 """
605 617 Utility to process the change of one node within a transaction root.
606 618
607 619 It encapsulates the knowledge of how to add, update or remove
608 620 a node for a given transaction root. The purpose is to support the method
609 621 `SvnRemote.commit`.
610 622 """
611 623
612 624 def __init__(self, node, txn_root):
613 625 assert isinstance(node['path'], str)
614 626
615 627 self.node = node
616 628 self.txn_root = txn_root
617 629
618 630 def update(self):
619 631 self._ensure_parent_dirs()
620 632 self._add_file_if_node_does_not_exist()
621 633 self._update_file_content()
622 634 self._update_file_properties()
623 635
624 636 def remove(self):
625 637 svn.fs.delete(self.txn_root, self.node['path'])
626 638 # TODO: Clean up directory if empty
627 639
628 640 def _ensure_parent_dirs(self):
629 641 curdir = vcspath.dirname(self.node['path'])
630 642 dirs_to_create = []
631 643 while not self._svn_path_exists(curdir):
632 644 dirs_to_create.append(curdir)
633 645 curdir = vcspath.dirname(curdir)
634 646
635 647 for curdir in reversed(dirs_to_create):
636 648 log.debug('Creating missing directory "%s"', curdir)
637 649 svn.fs.make_dir(self.txn_root, curdir)
638 650
639 651 def _svn_path_exists(self, path):
640 652 path_status = svn.fs.check_path(self.txn_root, path)
641 653 return path_status != svn.core.svn_node_none
642 654
643 655 def _add_file_if_node_does_not_exist(self):
644 656 kind = svn.fs.check_path(self.txn_root, self.node['path'])
645 657 if kind == svn.core.svn_node_none:
646 658 svn.fs.make_file(self.txn_root, self.node['path'])
647 659
648 660 def _update_file_content(self):
649 661 assert isinstance(self.node['content'], str)
650 662 handler, baton = svn.fs.apply_textdelta(
651 663 self.txn_root, self.node['path'], None, None)
652 664 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
653 665
654 666 def _update_file_properties(self):
655 667 properties = self.node.get('properties', {})
656 668 for key, value in properties.iteritems():
657 669 svn.fs.change_node_prop(
658 670 self.txn_root, self.node['path'], key, value)
659 671
660 672
661 673 def apr_time_t(timestamp):
662 674 """
663 675 Convert a Python timestamp into APR timestamp type apr_time_t
664 676 """
665 677 return timestamp * 1E6
666 678
667 679
668 680 def svn_opt_revision_value_t(num):
669 681 """
670 682 Put `num` into a `svn_opt_revision_value_t` structure.
671 683 """
672 684 value = svn.core.svn_opt_revision_value_t()
673 685 value.number = num
674 686 revision = svn.core.svn_opt_revision_t()
675 687 revision.kind = svn.core.svn_opt_revision_number
676 688 revision.value = value
677 689 return revision
@@ -1,57 +1,58 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 43 print 'Using vcsserver port %s' % (port, )
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
58
@@ -1,71 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72
73
74 def no_newline_id_generator(test_name):
75 """
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
78 """
79 org_name = test_name
80 test_name = test_name\
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
85
86 return test_name or 'test-with-empty-name'
@@ -1,72 +1,82 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
18
19 log = logging.getLogger(__name__)
17 20
18 21
19 22 def safe_int(val, default=None):
20 23 """
21 24 Returns int() of val if val is not convertable to int use default
22 25 instead
23 26
24 27 :param val:
25 28 :param default:
26 29 """
27 30
28 31 try:
29 32 val = int(val)
30 33 except (ValueError, TypeError):
31 34 val = default
32 35
33 36 return val
34 37
35 38
36 39 def safe_str(unicode_, to_encoding=['utf8']):
37 40 """
38 41 safe str function. Does few trick to turn unicode_ into string
39 42
40 43 In case of UnicodeEncodeError, we try to return it with encoding detected
41 44 by chardet library if it fails fallback to string with errors replaced
42 45
43 46 :param unicode_: unicode to encode
44 47 :rtype: str
45 48 :returns: str object
46 49 """
47 50
48 51 # if it's not basestr cast to str
49 52 if not isinstance(unicode_, basestring):
50 53 return str(unicode_)
51 54
52 55 if isinstance(unicode_, str):
53 56 return unicode_
54 57
55 58 if not isinstance(to_encoding, (list, tuple)):
56 59 to_encoding = [to_encoding]
57 60
58 61 for enc in to_encoding:
59 62 try:
60 63 return unicode_.encode(enc)
61 64 except UnicodeEncodeError:
62 65 pass
63 66
64 67 try:
65 68 import chardet
66 69 encoding = chardet.detect(unicode_)['encoding']
67 70 if encoding is None:
68 71 raise UnicodeEncodeError()
69 72
70 73 return unicode_.encode(encoding)
71 74 except (ImportError, UnicodeEncodeError):
72 75 return unicode_.encode(to_encoding[0], 'replace')
76
77
78 class AttributeDict(dict):
79 def __getattr__(self, attr):
80 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
General Comments 0
You need to be logged in to leave comments. Login now