##// END OF EJS Templates
source-code: updated copyrights to 2023
super-admin -
r1126:f96985cd python3
parent child Browse files
Show More
@@ -1,56 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19 import pytest
19 import pytest
20
20
21
21
22 def pytest_addoption(parser):
22 def pytest_addoption(parser):
23 parser.addoption(
23 parser.addoption(
24 '--perf-repeat-vcs', type=int, default=100,
24 '--perf-repeat-vcs', type=int, default=100,
25 help="Number of repetitions in performance tests.")
25 help="Number of repetitions in performance tests.")
26
26
27
27
28 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
29 def repeat(request):
29 def repeat(request):
30 """
30 """
31 The number of repetitions is based on this fixture.
31 The number of repetitions is based on this fixture.
32
32
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 tests are not too slow in our default test suite.
34 tests are not too slow in our default test suite.
35 """
35 """
36 return request.config.getoption('--perf-repeat-vcs')
36 return request.config.getoption('--perf-repeat-vcs')
37
37
38
38
39 @pytest.fixture(scope='session')
39 @pytest.fixture(scope='session')
40 def vcsserver_port(request):
40 def vcsserver_port(request):
41 port = get_available_port()
41 port = get_available_port()
42 print(f'Using vcsserver port {port}')
42 print(f'Using vcsserver port {port}')
43 return port
43 return port
44
44
45
45
46 def get_available_port():
46 def get_available_port():
47 family = socket.AF_INET
47 family = socket.AF_INET
48 socktype = socket.SOCK_STREAM
48 socktype = socket.SOCK_STREAM
49 host = '127.0.0.1'
49 host = '127.0.0.1'
50
50
51 mysocket = socket.socket(family, socktype)
51 mysocket = socket.socket(family, socktype)
52 mysocket.bind((host, 0))
52 mysocket.bind((host, 0))
53 port = mysocket.getsockname()[1]
53 port = mysocket.getsockname()[1]
54 mysocket.close()
54 mysocket.close()
55 del mysocket
55 del mysocket
56 return port
56 return port
@@ -1,28 +1,28 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pkgutil
18 import pkgutil
19
19
20
20
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip().decode()
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip().decode()
22
22
23 # link to config for pyramid
23 # link to config for pyramid
24 CONFIG = {}
24 CONFIG = {}
25
25
26 # Populated with the settings dictionary from application init in
26 # Populated with the settings dictionary from application init in
27 #
27 #
28 PYRAMID_SETTINGS = {}
28 PYRAMID_SETTINGS = {}
@@ -1,195 +1,195 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import traceback
20 import traceback
21 import logging
21 import logging
22 import urllib.parse
22 import urllib.parse
23
23
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
25 from vcsserver.lib.rc_cache import region_meta
25 from vcsserver.lib.rc_cache import region_meta
26
26
27 from vcsserver import exceptions
27 from vcsserver import exceptions
28 from vcsserver.exceptions import NoContentException
28 from vcsserver.exceptions import NoContentException
29 from vcsserver.hgcompat import archival
29 from vcsserver.hgcompat import archival
30 from vcsserver.str_utils import safe_bytes
30 from vcsserver.str_utils import safe_bytes
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class RepoFactory(object):
35 class RepoFactory(object):
36 """
36 """
37 Utility to create instances of repository
37 Utility to create instances of repository
38
38
39 It provides internal caching of the `repo` object based on
39 It provides internal caching of the `repo` object based on
40 the :term:`call context`.
40 the :term:`call context`.
41 """
41 """
42 repo_type = None
42 repo_type = None
43
43
44 def __init__(self):
44 def __init__(self):
45 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
45 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
46
46
47 def _create_config(self, path, config):
47 def _create_config(self, path, config):
48 config = {}
48 config = {}
49 return config
49 return config
50
50
51 def _create_repo(self, wire, create):
51 def _create_repo(self, wire, create):
52 raise NotImplementedError()
52 raise NotImplementedError()
53
53
54 def repo(self, wire, create=False):
54 def repo(self, wire, create=False):
55 raise NotImplementedError()
55 raise NotImplementedError()
56
56
57
57
58 def obfuscate_qs(query_string):
58 def obfuscate_qs(query_string):
59 if query_string is None:
59 if query_string is None:
60 return None
60 return None
61
61
62 parsed = []
62 parsed = []
63 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
63 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
64 if k in ['auth_token', 'api_key']:
64 if k in ['auth_token', 'api_key']:
65 v = "*****"
65 v = "*****"
66 parsed.append((k, v))
66 parsed.append((k, v))
67
67
68 return '&'.join('{}{}'.format(
68 return '&'.join('{}{}'.format(
69 k, f'={v}' if v else '') for k, v in parsed)
69 k, f'={v}' if v else '') for k, v in parsed)
70
70
71
71
72 def raise_from_original(new_type, org_exc: Exception):
72 def raise_from_original(new_type, org_exc: Exception):
73 """
73 """
74 Raise a new exception type with original args and traceback.
74 Raise a new exception type with original args and traceback.
75 """
75 """
76
76
77 exc_type, exc_value, exc_traceback = sys.exc_info()
77 exc_type, exc_value, exc_traceback = sys.exc_info()
78 new_exc = new_type(*exc_value.args)
78 new_exc = new_type(*exc_value.args)
79
79
80 # store the original traceback into the new exc
80 # store the original traceback into the new exc
81 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
81 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
82
82
83 try:
83 try:
84 raise new_exc.with_traceback(exc_traceback)
84 raise new_exc.with_traceback(exc_traceback)
85 finally:
85 finally:
86 del exc_traceback
86 del exc_traceback
87
87
88
88
89
89
90 class ArchiveNode(object):
90 class ArchiveNode(object):
91 def __init__(self, path, mode, is_link, raw_bytes):
91 def __init__(self, path, mode, is_link, raw_bytes):
92 self.path = path
92 self.path = path
93 self.mode = mode
93 self.mode = mode
94 self.is_link = is_link
94 self.is_link = is_link
95 self.raw_bytes = raw_bytes
95 self.raw_bytes = raw_bytes
96
96
97
97
98 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
98 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
99 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
99 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
100 """
100 """
101 Function that would store an generate archive and send it to a dedicated backend store
101 Function that would store an generate archive and send it to a dedicated backend store
102 In here we use diskcache
102 In here we use diskcache
103
103
104 :param node_walker: a generator returning nodes to add to archive
104 :param node_walker: a generator returning nodes to add to archive
105 :param archive_key: key used to store the path
105 :param archive_key: key used to store the path
106 :param kind: archive kind
106 :param kind: archive kind
107 :param mtime: time of creation
107 :param mtime: time of creation
108 :param archive_at_path: default '/' the path at archive was started. if this is not '/' it means it's a partial archive
108 :param archive_at_path: default '/' the path at archive was started. if this is not '/' it means it's a partial archive
109 :param archive_dir_name: inside dir name when creating an archive
109 :param archive_dir_name: inside dir name when creating an archive
110 :param commit_id: commit sha of revision archive was created at
110 :param commit_id: commit sha of revision archive was created at
111 :param write_metadata:
111 :param write_metadata:
112 :param extra_metadata:
112 :param extra_metadata:
113 :param cache_config:
113 :param cache_config:
114
114
115 walker should be a file walker, for example:
115 walker should be a file walker, for example:
116 def node_walker():
116 def node_walker():
117 for file_info in files:
117 for file_info in files:
118 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
118 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
119 """
119 """
120 extra_metadata = extra_metadata or {}
120 extra_metadata = extra_metadata or {}
121
121
122 d_cache = get_archival_cache_store(config=cache_config)
122 d_cache = get_archival_cache_store(config=cache_config)
123
123
124 if archive_key in d_cache:
124 if archive_key in d_cache:
125 with d_cache as d_cache_reader:
125 with d_cache as d_cache_reader:
126 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
126 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
127 return reader.name
127 return reader.name
128
128
129 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
129 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
130 log.debug('Creating new temp archive in %s', archive_tmp_path)
130 log.debug('Creating new temp archive in %s', archive_tmp_path)
131
131
132 if kind == "tgz":
132 if kind == "tgz":
133 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
133 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
134 elif kind == "tbz2":
134 elif kind == "tbz2":
135 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
135 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
136 elif kind == 'zip':
136 elif kind == 'zip':
137 archiver = archival.zipit(archive_tmp_path, mtime)
137 archiver = archival.zipit(archive_tmp_path, mtime)
138 else:
138 else:
139 raise exceptions.ArchiveException()(
139 raise exceptions.ArchiveException()(
140 f'Remote does not support: "{kind}" archive type.')
140 f'Remote does not support: "{kind}" archive type.')
141
141
142 for f in node_walker(commit_id, archive_at_path):
142 for f in node_walker(commit_id, archive_at_path):
143 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
143 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
144 try:
144 try:
145 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
145 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
146 except NoContentException:
146 except NoContentException:
147 # NOTE(marcink): this is a special case for SVN so we can create "empty"
147 # NOTE(marcink): this is a special case for SVN so we can create "empty"
148 # directories which arent supported by archiver
148 # directories which arent supported by archiver
149 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
149 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
150
150
151 if write_metadata:
151 if write_metadata:
152 metadata = dict([
152 metadata = dict([
153 ('commit_id', commit_id),
153 ('commit_id', commit_id),
154 ('mtime', mtime),
154 ('mtime', mtime),
155 ])
155 ])
156 metadata.update(extra_metadata)
156 metadata.update(extra_metadata)
157
157
158 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
158 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
159 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
159 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
160 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
160 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
161
161
162 archiver.done()
162 archiver.done()
163
163
164 # ensure set & get are atomic
164 # ensure set & get are atomic
165 with d_cache.transact():
165 with d_cache.transact():
166
166
167 with open(archive_tmp_path, 'rb') as archive_file:
167 with open(archive_tmp_path, 'rb') as archive_file:
168 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
168 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
169 if not add_result:
169 if not add_result:
170 log.error('Failed to store cache for key=%s', archive_key)
170 log.error('Failed to store cache for key=%s', archive_key)
171
171
172 os.remove(archive_tmp_path)
172 os.remove(archive_tmp_path)
173
173
174 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
174 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
175 if not reader:
175 if not reader:
176 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
176 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
177
177
178 return reader.name
178 return reader.name
179
179
180
180
181 class BinaryEnvelope(object):
181 class BinaryEnvelope(object):
182 def __init__(self, val):
182 def __init__(self, val):
183 self.val = val
183 self.val = val
184
184
185
185
186 class BytesEnvelope(bytes):
186 class BytesEnvelope(bytes):
187 def __new__(cls, content):
187 def __new__(cls, content):
188 if isinstance(content, bytes):
188 if isinstance(content, bytes):
189 return super().__new__(cls, content)
189 return super().__new__(cls, content)
190 else:
190 else:
191 raise TypeError('Content must be bytes.')
191 raise TypeError('Content must be bytes.')
192
192
193
193
194 class BinaryBytesEnvelope(BytesEnvelope):
194 class BinaryBytesEnvelope(BytesEnvelope):
195 pass
195 pass
@@ -0,0 +1,1 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
@@ -1,27 +1,27 b''
1 # Copyright (C) 2010-2020 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 HOOK_REPO_SIZE = 'changegroup.repo_size'
19 HOOK_REPO_SIZE = 'changegroup.repo_size'
20
20
21 # HG
21 # HG
22 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
22 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
23 HOOK_PULL = 'outgoing.pull_logger'
23 HOOK_PULL = 'outgoing.pull_logger'
24 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
24 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
25 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
25 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
26 HOOK_PUSH = 'changegroup.push_logger'
26 HOOK_PUSH = 'changegroup.push_logger'
27 HOOK_PUSH_KEY = 'pushkey.key_push'
27 HOOK_PUSH_KEY = 'pushkey.key_push'
@@ -1,168 +1,168 b''
1 # Copyright (C) 2010-2020 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import textwrap
20 import textwrap
21 import string
21 import string
22 import functools
22 import functools
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import logging.config
25 import logging.config
26
26
27 from vcsserver.type_utils import str2bool, aslist
27 from vcsserver.type_utils import str2bool, aslist
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31 # skip keys, that are set here, so we don't double process those
31 # skip keys, that are set here, so we don't double process those
32 set_keys = {
32 set_keys = {
33 '__file__': ''
33 '__file__': ''
34 }
34 }
35
35
36
36
37 class SettingsMaker(object):
37 class SettingsMaker(object):
38
38
39 def __init__(self, app_settings):
39 def __init__(self, app_settings):
40 self.settings = app_settings
40 self.settings = app_settings
41
41
42 @classmethod
42 @classmethod
43 def _bool_func(cls, input_val):
43 def _bool_func(cls, input_val):
44 if isinstance(input_val, bytes):
44 if isinstance(input_val, bytes):
45 # decode to str
45 # decode to str
46 input_val = input_val.decode('utf8')
46 input_val = input_val.decode('utf8')
47 return str2bool(input_val)
47 return str2bool(input_val)
48
48
49 @classmethod
49 @classmethod
50 def _int_func(cls, input_val):
50 def _int_func(cls, input_val):
51 return int(input_val)
51 return int(input_val)
52
52
53 @classmethod
53 @classmethod
54 def _list_func(cls, input_val, sep=','):
54 def _list_func(cls, input_val, sep=','):
55 return aslist(input_val, sep=sep)
55 return aslist(input_val, sep=sep)
56
56
57 @classmethod
57 @classmethod
58 def _string_func(cls, input_val, lower=True):
58 def _string_func(cls, input_val, lower=True):
59 if lower:
59 if lower:
60 input_val = input_val.lower()
60 input_val = input_val.lower()
61 return input_val
61 return input_val
62
62
63 @classmethod
63 @classmethod
64 def _float_func(cls, input_val):
64 def _float_func(cls, input_val):
65 return float(input_val)
65 return float(input_val)
66
66
67 @classmethod
67 @classmethod
68 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
68 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
69
69
70 # ensure we have our dir created
70 # ensure we have our dir created
71 if not os.path.isdir(input_val) and ensure_dir:
71 if not os.path.isdir(input_val) and ensure_dir:
72 os.makedirs(input_val, mode=mode, exist_ok=True)
72 os.makedirs(input_val, mode=mode, exist_ok=True)
73
73
74 if not os.path.isdir(input_val):
74 if not os.path.isdir(input_val):
75 raise Exception('Dir at {} does not exist'.format(input_val))
75 raise Exception('Dir at {} does not exist'.format(input_val))
76 return input_val
76 return input_val
77
77
78 @classmethod
78 @classmethod
79 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
79 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
80 dirname = os.path.dirname(input_val)
80 dirname = os.path.dirname(input_val)
81 cls._dir_func(dirname, ensure_dir=ensure_dir)
81 cls._dir_func(dirname, ensure_dir=ensure_dir)
82 return input_val
82 return input_val
83
83
84 @classmethod
84 @classmethod
85 def _key_transformator(cls, key):
85 def _key_transformator(cls, key):
86 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
86 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
87
87
88 def maybe_env_key(self, key):
88 def maybe_env_key(self, key):
89 # now maybe we have this KEY in env, search and use the value with higher priority.
89 # now maybe we have this KEY in env, search and use the value with higher priority.
90 transformed_key = self._key_transformator(key)
90 transformed_key = self._key_transformator(key)
91 envvar_value = os.environ.get(transformed_key)
91 envvar_value = os.environ.get(transformed_key)
92 if envvar_value:
92 if envvar_value:
93 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
93 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
94
94
95 return envvar_value
95 return envvar_value
96
96
97 def env_expand(self):
97 def env_expand(self):
98 replaced = {}
98 replaced = {}
99 for k, v in self.settings.items():
99 for k, v in self.settings.items():
100 if k not in set_keys:
100 if k not in set_keys:
101 envvar_value = self.maybe_env_key(k)
101 envvar_value = self.maybe_env_key(k)
102 if envvar_value:
102 if envvar_value:
103 replaced[k] = envvar_value
103 replaced[k] = envvar_value
104 set_keys[k] = envvar_value
104 set_keys[k] = envvar_value
105
105
106 # replace ALL keys updated
106 # replace ALL keys updated
107 self.settings.update(replaced)
107 self.settings.update(replaced)
108
108
109 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
109 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
110 """
110 """
111 Helper to enable debug on running instance
111 Helper to enable debug on running instance
112 :return:
112 :return:
113 """
113 """
114
114
115 if not str2bool(self.settings.get('logging.autoconfigure')):
115 if not str2bool(self.settings.get('logging.autoconfigure')):
116 log.info('logging configuration based on main .ini file')
116 log.info('logging configuration based on main .ini file')
117 return
117 return
118
118
119 if logging_conf is None:
119 if logging_conf is None:
120 logging_conf = self.settings.get('logging.logging_conf_file') or ''
120 logging_conf = self.settings.get('logging.logging_conf_file') or ''
121
121
122 if not os.path.isfile(logging_conf):
122 if not os.path.isfile(logging_conf):
123 log.error('Unable to setup logging based on %s, '
123 log.error('Unable to setup logging based on %s, '
124 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
124 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
125 return
125 return
126
126
127 with open(logging_conf, 'rt') as f:
127 with open(logging_conf, 'rt') as f:
128 ini_template = textwrap.dedent(f.read())
128 ini_template = textwrap.dedent(f.read())
129 ini_template = string.Template(ini_template).safe_substitute(
129 ini_template = string.Template(ini_template).safe_substitute(
130 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
130 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
131 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
131 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
132 )
132 )
133
133
134 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
134 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
135 log.info('Saved Temporary LOGGING config at %s', f.name)
135 log.info('Saved Temporary LOGGING config at %s', f.name)
136 f.write(ini_template)
136 f.write(ini_template)
137
137
138 logging.config.fileConfig(f.name)
138 logging.config.fileConfig(f.name)
139 os.remove(f.name)
139 os.remove(f.name)
140
140
141 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
141 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
142 input_val = self.settings.get(key, default)
142 input_val = self.settings.get(key, default)
143
143
144 if default_when_empty and not input_val:
144 if default_when_empty and not input_val:
145 # use default value when value is set in the config but it is empty
145 # use default value when value is set in the config but it is empty
146 input_val = default
146 input_val = default
147
147
148 parser_func = {
148 parser_func = {
149 'bool': self._bool_func,
149 'bool': self._bool_func,
150 'int': self._int_func,
150 'int': self._int_func,
151 'list': self._list_func,
151 'list': self._list_func,
152 'list:newline': functools.partial(self._list_func, sep='/n'),
152 'list:newline': functools.partial(self._list_func, sep='/n'),
153 'list:spacesep': functools.partial(self._list_func, sep=' '),
153 'list:spacesep': functools.partial(self._list_func, sep=' '),
154 'string': functools.partial(self._string_func, lower=lower),
154 'string': functools.partial(self._string_func, lower=lower),
155 'dir': self._dir_func,
155 'dir': self._dir_func,
156 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
156 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
157 'file': self._file_path_func,
157 'file': self._file_path_func,
158 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
158 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
159 None: lambda i: i
159 None: lambda i: i
160 }[parser]
160 }[parser]
161
161
162 envvar_value = self.maybe_env_key(key)
162 envvar_value = self.maybe_env_key(key)
163 if envvar_value:
163 if envvar_value:
164 input_val = envvar_value
164 input_val = envvar_value
165 set_keys[key] = input_val
165 set_keys[key] = input_val
166
166
167 self.settings[key] = parser_func(input_val)
167 self.settings[key] = parser_func(input_val)
168 return self.settings[key]
168 return self.settings[key]
@@ -1,8 +1,10 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Provides a stub implementation for VCS operations.
4 Provides a stub implementation for VCS operations.
3
5
4 Intended usage is to help in performance measurements. The basic idea is to
6 Intended usage is to help in performance measurements. The basic idea is to
5 implement an `EchoApp` which sends back what it gets. Based on a configuration
7 implement an `EchoApp` which sends back what it gets. Based on a configuration
6 parameter this app can be activated, so that it replaced the endpoints for Git
8 parameter this app can be activated, so that it replaced the endpoints for Git
7 and Mercurial.
9 and Mercurial.
8 """
10 """
@@ -1,54 +1,56 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Implementation of :class:`EchoApp`.
4 Implementation of :class:`EchoApp`.
3
5
4 This WSGI application will just echo back the data which it recieves.
6 This WSGI application will just echo back the data which it recieves.
5 """
7 """
6
8
7 import logging
9 import logging
8
10
9
11
10 log = logging.getLogger(__name__)
12 log = logging.getLogger(__name__)
11
13
12
14
13 class EchoApp(object):
15 class EchoApp(object):
14
16
15 def __init__(self, repo_path, repo_name, config):
17 def __init__(self, repo_path, repo_name, config):
16 self._repo_path = repo_path
18 self._repo_path = repo_path
17 log.info("EchoApp initialized for %s", repo_path)
19 log.info("EchoApp initialized for %s", repo_path)
18
20
19 def __call__(self, environ, start_response):
21 def __call__(self, environ, start_response):
20 log.debug("EchoApp called for %s", self._repo_path)
22 log.debug("EchoApp called for %s", self._repo_path)
21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
23 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
22 environ['wsgi.input'].read()
24 environ['wsgi.input'].read()
23 status = '200 OK'
25 status = '200 OK'
24 headers = [('Content-Type', 'text/plain')]
26 headers = [('Content-Type', 'text/plain')]
25 start_response(status, headers)
27 start_response(status, headers)
26 return [b"ECHO"]
28 return [b"ECHO"]
27
29
28
30
29 class EchoAppStream(object):
31 class EchoAppStream(object):
30
32
31 def __init__(self, repo_path, repo_name, config):
33 def __init__(self, repo_path, repo_name, config):
32 self._repo_path = repo_path
34 self._repo_path = repo_path
33 log.info("EchoApp initialized for %s", repo_path)
35 log.info("EchoApp initialized for %s", repo_path)
34
36
35 def __call__(self, environ, start_response):
37 def __call__(self, environ, start_response):
36 log.debug("EchoApp called for %s", self._repo_path)
38 log.debug("EchoApp called for %s", self._repo_path)
37 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
39 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
38 environ['wsgi.input'].read()
40 environ['wsgi.input'].read()
39 status = '200 OK'
41 status = '200 OK'
40 headers = [('Content-Type', 'text/plain')]
42 headers = [('Content-Type', 'text/plain')]
41 start_response(status, headers)
43 start_response(status, headers)
42
44
43 def generator():
45 def generator():
44 for _ in range(1000000):
46 for _ in range(1000000):
45 yield b"ECHO_STREAM"
47 yield b"ECHO_STREAM"
46 return generator()
48 return generator()
47
49
48
50
49 def create_app():
51 def create_app():
50 """
52 """
51 Allows to run this app directly in a WSGI server.
53 Allows to run this app directly in a WSGI server.
52 """
54 """
53 stub_config = {}
55 stub_config = {}
54 return EchoApp('stub_path', 'stub_name', stub_config)
56 return EchoApp('stub_path', 'stub_name', stub_config)
@@ -1,45 +1,47 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Provides the same API as :mod:`remote_wsgi`.
4 Provides the same API as :mod:`remote_wsgi`.
3
5
4 Uses the `EchoApp` instead of real implementations.
6 Uses the `EchoApp` instead of real implementations.
5 """
7 """
6
8
7 import logging
9 import logging
8
10
9 from .echo_app import EchoApp
11 from .echo_app import EchoApp
10 from vcsserver import wsgi_app_caller
12 from vcsserver import wsgi_app_caller
11
13
12
14
13 log = logging.getLogger(__name__)
15 log = logging.getLogger(__name__)
14
16
15
17
16 class GitRemoteWsgi(object):
18 class GitRemoteWsgi(object):
17 def handle(self, environ, input_data, *args, **kwargs):
19 def handle(self, environ, input_data, *args, **kwargs):
18 app = wsgi_app_caller.WSGIAppCaller(
20 app = wsgi_app_caller.WSGIAppCaller(
19 create_echo_wsgi_app(*args, **kwargs))
21 create_echo_wsgi_app(*args, **kwargs))
20
22
21 return app.handle(environ, input_data)
23 return app.handle(environ, input_data)
22
24
23
25
24 class HgRemoteWsgi(object):
26 class HgRemoteWsgi(object):
25 def handle(self, environ, input_data, *args, **kwargs):
27 def handle(self, environ, input_data, *args, **kwargs):
26 app = wsgi_app_caller.WSGIAppCaller(
28 app = wsgi_app_caller.WSGIAppCaller(
27 create_echo_wsgi_app(*args, **kwargs))
29 create_echo_wsgi_app(*args, **kwargs))
28
30
29 return app.handle(environ, input_data)
31 return app.handle(environ, input_data)
30
32
31
33
32 def create_echo_wsgi_app(repo_path, repo_name, config):
34 def create_echo_wsgi_app(repo_path, repo_name, config):
33 log.debug("Creating EchoApp WSGI application")
35 log.debug("Creating EchoApp WSGI application")
34
36
35 _assert_valid_config(config)
37 _assert_valid_config(config)
36
38
37 # Remaining items are forwarded to have the extras available
39 # Remaining items are forwarded to have the extras available
38 return EchoApp(repo_path, repo_name, config=config)
40 return EchoApp(repo_path, repo_name, config=config)
39
41
40
42
41 def _assert_valid_config(config):
43 def _assert_valid_config(config):
42 config = config.copy()
44 config = config.copy()
43
45
44 # This is what git needs from config at this stage
46 # This is what git needs from config at this stage
45 config.pop(b'git_update_server_info')
47 config.pop(b'git_update_server_info')
@@ -1,125 +1,125 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28
28
29
29
30 def _make_exception(kind, org_exc, *args):
30 def _make_exception(kind, org_exc, *args):
31 """
31 """
32 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
33
33
34 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
36 """
36 """
37 exc = Exception(*args)
37 exc = Exception(*args)
38 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
39 exc._org_exc = org_exc
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 return exc
41 return exc
42
42
43
43
44 def AbortException(org_exc=None):
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
47 return _make_exception_wrapper
48
48
49
49
50 def ArchiveException(org_exc=None):
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
53 return _make_exception_wrapper
54
54
55
55
56 def LookupException(org_exc=None):
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
59 return _make_exception_wrapper
60
60
61
61
62 def VcsException(org_exc=None):
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
65 return _make_exception_wrapper
66
66
67
67
68 def RepositoryLockedException(org_exc=None):
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
71 return _make_exception_wrapper
72
72
73
73
74 def RepositoryBranchProtectedException(org_exc=None):
74 def RepositoryBranchProtectedException(org_exc=None):
75 def _make_exception_wrapper(*args):
75 def _make_exception_wrapper(*args):
76 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception('repo_branch_protected', org_exc, *args)
77 return _make_exception_wrapper
77 return _make_exception_wrapper
78
78
79
79
80 def RequirementException(org_exc=None):
80 def RequirementException(org_exc=None):
81 def _make_exception_wrapper(*args):
81 def _make_exception_wrapper(*args):
82 return _make_exception('requirement', org_exc, *args)
82 return _make_exception('requirement', org_exc, *args)
83 return _make_exception_wrapper
83 return _make_exception_wrapper
84
84
85
85
86 def UnhandledException(org_exc=None):
86 def UnhandledException(org_exc=None):
87 def _make_exception_wrapper(*args):
87 def _make_exception_wrapper(*args):
88 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception('unhandled', org_exc, *args)
89 return _make_exception_wrapper
89 return _make_exception_wrapper
90
90
91
91
92 def URLError(org_exc=None):
92 def URLError(org_exc=None):
93 def _make_exception_wrapper(*args):
93 def _make_exception_wrapper(*args):
94 return _make_exception('url_error', org_exc, *args)
94 return _make_exception('url_error', org_exc, *args)
95 return _make_exception_wrapper
95 return _make_exception_wrapper
96
96
97
97
98 def SubrepoMergeException(org_exc=None):
98 def SubrepoMergeException(org_exc=None):
99 def _make_exception_wrapper(*args):
99 def _make_exception_wrapper(*args):
100 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 return _make_exception_wrapper
101 return _make_exception_wrapper
102
102
103
103
104 class HTTPRepoLocked(HTTPLocked):
104 class HTTPRepoLocked(HTTPLocked):
105 """
105 """
106 Subclass of HTTPLocked response that allows to set the title and status
106 Subclass of HTTPLocked response that allows to set the title and status
107 code via constructor arguments.
107 code via constructor arguments.
108 """
108 """
109 def __init__(self, title, status_code=None, **kwargs):
109 def __init__(self, title, status_code=None, **kwargs):
110 self.code = status_code or HTTPLocked.code
110 self.code = status_code or HTTPLocked.code
111 self.title = title
111 self.title = title
112 super().__init__(**kwargs)
112 super().__init__(**kwargs)
113
113
114
114
115 class HTTPRepoBranchProtected(HTTPForbidden):
115 class HTTPRepoBranchProtected(HTTPForbidden):
116 def __init__(self, *args, **kwargs):
116 def __init__(self, *args, **kwargs):
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118
118
119
119
120 class RefNotFoundException(KeyError):
120 class RefNotFoundException(KeyError):
121 pass
121 pass
122
122
123
123
124 class NoContentException(ValueError):
124 class NoContentException(ValueError):
125 pass
125 pass
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from .app import create_app
19 from .app import create_app
@@ -1,292 +1,292 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
20 from wsgiref.util import FileWrapper
21
21
22 from pyramid.config import Configurator
22 from pyramid.config import Configurator
23 from pyramid.response import Response, FileIter
23 from pyramid.response import Response, FileIter
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPUnprocessableEntity)
26 HTTPUnprocessableEntity)
27
27
28 from vcsserver.lib.rc_json import json
28 from vcsserver.lib.rc_json import json
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.str_utils import safe_int
31 from vcsserver.str_utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired(object):
51 class AuthHeaderRequired(object):
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 return write_response_error(HTTPForbidden)
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
64 return func(*fargs[1:], **fkwargs)
65
65
66
66
67 # views
67 # views
68
68
69 def lfs_objects(request):
69 def lfs_objects(request):
70 # indicate not supported, V1 API
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
73
74
74
75 @AuthHeaderRequired()
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
76 def lfs_objects_batch(request):
77 """
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
79
80 operation - Should be download or upload.
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
87 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
89 auth = request.authorization
90 repo = request.matchdict.get('repo')
90 repo = request.matchdict.get('repo')
91 data = request.json
91 data = request.json
92 operation = data.get('operation')
92 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
93 http_scheme = request.registry.git_lfs_http_scheme
94
94
95 if operation not in ('download', 'upload'):
95 if operation not in ('download', 'upload'):
96 log.debug('LFS: unsupported operation:%s', operation)
96 log.debug('LFS: unsupported operation:%s', operation)
97 return write_response_error(
97 return write_response_error(
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99
99
100 if 'objects' not in data:
100 if 'objects' not in data:
101 log.debug('LFS: missing objects data')
101 log.debug('LFS: missing objects data')
102 return write_response_error(
102 return write_response_error(
103 HTTPBadRequest, 'missing objects data')
103 HTTPBadRequest, 'missing objects data')
104
104
105 log.debug('LFS: handling operation of type: %s', operation)
105 log.debug('LFS: handling operation of type: %s', operation)
106
106
107 objects = []
107 objects = []
108 for o in data['objects']:
108 for o in data['objects']:
109 try:
109 try:
110 oid = o['oid']
110 oid = o['oid']
111 obj_size = o['size']
111 obj_size = o['size']
112 except KeyError:
112 except KeyError:
113 log.exception('LFS, failed to extract data')
113 log.exception('LFS, failed to extract data')
114 return write_response_error(
114 return write_response_error(
115 HTTPBadRequest, 'unsupported data in objects')
115 HTTPBadRequest, 'unsupported data in objects')
116
116
117 obj_data = {'oid': oid}
117 obj_data = {'oid': oid}
118
118
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 _scheme=http_scheme)
120 _scheme=http_scheme)
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 _scheme=http_scheme)
122 _scheme=http_scheme)
123 store = LFSOidStore(
123 store = LFSOidStore(
124 oid, repo, store_location=request.registry.git_lfs_store_path)
124 oid, repo, store_location=request.registry.git_lfs_store_path)
125 handler = OidHandler(
125 handler = OidHandler(
126 store, repo, auth, oid, obj_size, obj_data,
126 store, repo, auth, oid, obj_size, obj_data,
127 obj_href, obj_verify_href)
127 obj_href, obj_verify_href)
128
128
129 # this verifies also OIDs
129 # this verifies also OIDs
130 actions, errors = handler.exec_operation(operation)
130 actions, errors = handler.exec_operation(operation)
131 if errors:
131 if errors:
132 log.warning('LFS: got following errors: %s', errors)
132 log.warning('LFS: got following errors: %s', errors)
133 obj_data['errors'] = errors
133 obj_data['errors'] = errors
134
134
135 if actions:
135 if actions:
136 obj_data['actions'] = actions
136 obj_data['actions'] = actions
137
137
138 obj_data['size'] = obj_size
138 obj_data['size'] = obj_size
139 obj_data['authenticated'] = True
139 obj_data['authenticated'] = True
140 objects.append(obj_data)
140 objects.append(obj_data)
141
141
142 result = {'objects': objects, 'transfer': 'basic'}
142 result = {'objects': objects, 'transfer': 'basic'}
143 log.debug('LFS Response %s', safe_result(result))
143 log.debug('LFS Response %s', safe_result(result))
144
144
145 return result
145 return result
146
146
147
147
148 def lfs_objects_oid_upload(request):
148 def lfs_objects_oid_upload(request):
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 repo = request.matchdict.get('repo')
150 repo = request.matchdict.get('repo')
151 oid = request.matchdict.get('oid')
151 oid = request.matchdict.get('oid')
152 store = LFSOidStore(
152 store = LFSOidStore(
153 oid, repo, store_location=request.registry.git_lfs_store_path)
153 oid, repo, store_location=request.registry.git_lfs_store_path)
154 engine = store.get_engine(mode='wb')
154 engine = store.get_engine(mode='wb')
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156
156
157 body = request.environ['wsgi.input']
157 body = request.environ['wsgi.input']
158
158
159 with engine as f:
159 with engine as f:
160 blksize = 64 * 1024 # 64kb
160 blksize = 64 * 1024 # 64kb
161 while True:
161 while True:
162 # read in chunks as stream comes in from Gunicorn
162 # read in chunks as stream comes in from Gunicorn
163 # this is a specific Gunicorn support function.
163 # this is a specific Gunicorn support function.
164 # might work differently on waitress
164 # might work differently on waitress
165 chunk = body.read(blksize)
165 chunk = body.read(blksize)
166 if not chunk:
166 if not chunk:
167 break
167 break
168 f.write(chunk)
168 f.write(chunk)
169
169
170 return {'upload': 'ok'}
170 return {'upload': 'ok'}
171
171
172
172
173 def lfs_objects_oid_download(request):
173 def lfs_objects_oid_download(request):
174 repo = request.matchdict.get('repo')
174 repo = request.matchdict.get('repo')
175 oid = request.matchdict.get('oid')
175 oid = request.matchdict.get('oid')
176
176
177 store = LFSOidStore(
177 store = LFSOidStore(
178 oid, repo, store_location=request.registry.git_lfs_store_path)
178 oid, repo, store_location=request.registry.git_lfs_store_path)
179 if not store.has_oid():
179 if not store.has_oid():
180 log.debug('LFS: oid %s does not exists in store', oid)
180 log.debug('LFS: oid %s does not exists in store', oid)
181 return write_response_error(
181 return write_response_error(
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183
183
184 # TODO(marcink): support range header ?
184 # TODO(marcink): support range header ?
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
186
186
187 f = open(store.oid_path, 'rb')
187 f = open(store.oid_path, 'rb')
188 response = Response(
188 response = Response(
189 content_type='application/octet-stream', app_iter=FileIter(f))
189 content_type='application/octet-stream', app_iter=FileIter(f))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 return response
191 return response
192
192
193
193
194 def lfs_objects_verify(request):
194 def lfs_objects_verify(request):
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 repo = request.matchdict.get('repo')
196 repo = request.matchdict.get('repo')
197
197
198 data = request.json
198 data = request.json
199 oid = data.get('oid')
199 oid = data.get('oid')
200 size = safe_int(data.get('size'))
200 size = safe_int(data.get('size'))
201
201
202 if not (oid and size):
202 if not (oid and size):
203 return write_response_error(
203 return write_response_error(
204 HTTPBadRequest, 'missing oid and size in request data')
204 HTTPBadRequest, 'missing oid and size in request data')
205
205
206 store = LFSOidStore(
206 store = LFSOidStore(
207 oid, repo, store_location=request.registry.git_lfs_store_path)
207 oid, repo, store_location=request.registry.git_lfs_store_path)
208 if not store.has_oid():
208 if not store.has_oid():
209 log.debug('LFS: oid %s does not exists in store', oid)
209 log.debug('LFS: oid %s does not exists in store', oid)
210 return write_response_error(
210 return write_response_error(
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212
212
213 store_size = store.size_oid()
213 store_size = store.size_oid()
214 if store_size != size:
214 if store_size != size:
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
216 store_size, size)
216 store_size, size)
217 return write_response_error(
217 return write_response_error(
218 HTTPUnprocessableEntity, msg)
218 HTTPUnprocessableEntity, msg)
219
219
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221
221
222
222
223 def lfs_objects_lock(request):
223 def lfs_objects_lock(request):
224 return write_response_error(
224 return write_response_error(
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
226
226
227
227
228 def not_found(request):
228 def not_found(request):
229 return write_response_error(
229 return write_response_error(
230 HTTPNotFound, 'request path not found')
230 HTTPNotFound, 'request path not found')
231
231
232
232
233 def lfs_disabled(request):
233 def lfs_disabled(request):
234 return write_response_error(
234 return write_response_error(
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236
236
237
237
238 def git_lfs_app(config):
238 def git_lfs_app(config):
239
239
240 # v1 API deprecation endpoint
240 # v1 API deprecation endpoint
241 config.add_route('lfs_objects',
241 config.add_route('lfs_objects',
242 '/{repo:.*?[^/]}/info/lfs/objects')
242 '/{repo:.*?[^/]}/info/lfs/objects')
243 config.add_view(lfs_objects, route_name='lfs_objects',
243 config.add_view(lfs_objects, route_name='lfs_objects',
244 request_method='POST', renderer='json')
244 request_method='POST', renderer='json')
245
245
246 # locking API
246 # locking API
247 config.add_route('lfs_objects_lock',
247 config.add_route('lfs_objects_lock',
248 '/{repo:.*?[^/]}/info/lfs/locks')
248 '/{repo:.*?[^/]}/info/lfs/locks')
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 request_method=('POST', 'GET'), renderer='json')
250 request_method=('POST', 'GET'), renderer='json')
251
251
252 config.add_route('lfs_objects_lock_verify',
252 config.add_route('lfs_objects_lock_verify',
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 request_method=('POST', 'GET'), renderer='json')
255 request_method=('POST', 'GET'), renderer='json')
256
256
257 # batch API
257 # batch API
258 config.add_route('lfs_objects_batch',
258 config.add_route('lfs_objects_batch',
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 request_method='POST', renderer='json')
261 request_method='POST', renderer='json')
262
262
263 # oid upload/download API
263 # oid upload/download API
264 config.add_route('lfs_objects_oid',
264 config.add_route('lfs_objects_oid',
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 request_method='PUT', renderer='json')
267 request_method='PUT', renderer='json')
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 request_method='GET', renderer='json')
269 request_method='GET', renderer='json')
270
270
271 # verification API
271 # verification API
272 config.add_route('lfs_objects_verify',
272 config.add_route('lfs_objects_verify',
273 '/{repo:.*?[^/]}/info/lfs/verify')
273 '/{repo:.*?[^/]}/info/lfs/verify')
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 request_method='POST', renderer='json')
275 request_method='POST', renderer='json')
276
276
277 # not found handler for API
277 # not found handler for API
278 config.add_notfound_view(not_found, renderer='json')
278 config.add_notfound_view(not_found, renderer='json')
279
279
280
280
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 config = Configurator()
282 config = Configurator()
283 if git_lfs_enabled:
283 if git_lfs_enabled:
284 config.include(git_lfs_app)
284 config.include(git_lfs_app)
285 config.registry.git_lfs_store_path = git_lfs_store_path
285 config.registry.git_lfs_store_path = git_lfs_store_path
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 else:
287 else:
288 # not found handler for API, reporting disabled LFS support
288 # not found handler for API, reporting disabled LFS support
289 config.add_notfound_view(lfs_disabled, renderer='json')
289 config.add_notfound_view(lfs_disabled, renderer='json')
290
290
291 app = config.make_wsgi_app()
291 app = config.make_wsgi_app()
292 return app
292 return app
@@ -1,175 +1,175 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import logging
20 import logging
21 from collections import OrderedDict
21 from collections import OrderedDict
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class OidHandler(object):
26 class OidHandler(object):
27
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
29 obj_verify_href=None):
30 self.current_store = store
30 self.current_store = store
31 self.repo_name = repo_name
31 self.repo_name = repo_name
32 self.auth = auth
32 self.auth = auth
33 self.oid = oid
33 self.oid = oid
34 self.obj_size = obj_size
34 self.obj_size = obj_size
35 self.obj_data = obj_data
35 self.obj_data = obj_data
36 self.obj_href = obj_href
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
37 self.obj_verify_href = obj_verify_href
38
38
39 def get_store(self, mode=None):
39 def get_store(self, mode=None):
40 return self.current_store
40 return self.current_store
41
41
42 def get_auth(self):
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
44 return " ".join(self.auth)
45
45
46 def download(self):
46 def download(self):
47
47
48 store = self.get_store()
48 store = self.get_store()
49 response = None
49 response = None
50 has_errors = None
50 has_errors = None
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
58 message=err_msg
58 message=err_msg
59 )
59 )
60 )
60 )
61
61
62 download_action = OrderedDict(
62 download_action = OrderedDict(
63 href=self.obj_href,
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
65 )
66 if not has_errors:
66 if not has_errors:
67 response = OrderedDict(download=download_action)
67 response = OrderedDict(download=download_action)
68 return response, has_errors
68 return response, has_errors
69
69
70 def upload(self, skip_existing=True):
70 def upload(self, skip_existing=True):
71 """
71 """
72 Write upload action for git-lfs server
72 Write upload action for git-lfs server
73 """
73 """
74
74
75 store = self.get_store()
75 store = self.get_store()
76 response = None
76 response = None
77 has_errors = None
77 has_errors = None
78
78
79 # verify if we have the OID before, if we do, reply with empty
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
81 log.debug('LFS: store already has oid %s', store.oid)
82
82
83 # validate size
83 # validate size
84 store_size = store.size_oid()
84 store_size = store.size_oid()
85 size_match = store_size == self.obj_size
85 size_match = store_size == self.obj_size
86 if not size_match:
86 if not size_match:
87 log.warning(
87 log.warning(
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 self.oid, store_size, self.obj_size)
89 self.oid, store_size, self.obj_size)
90 elif skip_existing:
90 elif skip_existing:
91 log.debug('LFS: skipping further action as oid is existing')
91 log.debug('LFS: skipping further action as oid is existing')
92 return response, has_errors
92 return response, has_errors
93
93
94 chunked = ("Transfer-Encoding", "chunked")
94 chunked = ("Transfer-Encoding", "chunked")
95 upload_action = OrderedDict(
95 upload_action = OrderedDict(
96 href=self.obj_href,
96 href=self.obj_href,
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 )
98 )
99 if not has_errors:
99 if not has_errors:
100 response = OrderedDict(upload=upload_action)
100 response = OrderedDict(upload=upload_action)
101 # if specified in handler, return the verification endpoint
101 # if specified in handler, return the verification endpoint
102 if self.obj_verify_href:
102 if self.obj_verify_href:
103 verify_action = OrderedDict(
103 verify_action = OrderedDict(
104 href=self.obj_verify_href,
104 href=self.obj_verify_href,
105 header=OrderedDict([("Authorization", self.get_auth())])
105 header=OrderedDict([("Authorization", self.get_auth())])
106 )
106 )
107 response['verify'] = verify_action
107 response['verify'] = verify_action
108 return response, has_errors
108 return response, has_errors
109
109
110 def exec_operation(self, operation, *args, **kwargs):
110 def exec_operation(self, operation, *args, **kwargs):
111 handler = getattr(self, operation)
111 handler = getattr(self, operation)
112 log.debug('LFS: handling request using %s handler', handler)
112 log.debug('LFS: handling request using %s handler', handler)
113 return handler(*args, **kwargs)
113 return handler(*args, **kwargs)
114
114
115
115
116 class LFSOidStore(object):
116 class LFSOidStore(object):
117
117
118 def __init__(self, oid, repo, store_location=None):
118 def __init__(self, oid, repo, store_location=None):
119 self.oid = oid
119 self.oid = oid
120 self.repo = repo
120 self.repo = repo
121 self.store_path = store_location or self.get_default_store()
121 self.store_path = store_location or self.get_default_store()
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 self.oid_path = os.path.join(self.store_path, oid)
123 self.oid_path = os.path.join(self.store_path, oid)
124 self.fd = None
124 self.fd = None
125
125
126 def get_engine(self, mode):
126 def get_engine(self, mode):
127 """
127 """
128 engine = .get_engine(mode='wb')
128 engine = .get_engine(mode='wb')
129 with engine as f:
129 with engine as f:
130 f.write('...')
130 f.write('...')
131 """
131 """
132
132
133 class StoreEngine(object):
133 class StoreEngine(object):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 self.mode = mode
135 self.mode = mode
136 self.store_path = store_path
136 self.store_path = store_path
137 self.oid_path = oid_path
137 self.oid_path = oid_path
138 self.tmp_oid_path = tmp_oid_path
138 self.tmp_oid_path = tmp_oid_path
139
139
140 def __enter__(self):
140 def __enter__(self):
141 if not os.path.isdir(self.store_path):
141 if not os.path.isdir(self.store_path):
142 os.makedirs(self.store_path)
142 os.makedirs(self.store_path)
143
143
144 # TODO(marcink): maybe write metadata here with size/oid ?
144 # TODO(marcink): maybe write metadata here with size/oid ?
145 fd = open(self.tmp_oid_path, self.mode)
145 fd = open(self.tmp_oid_path, self.mode)
146 self.fd = fd
146 self.fd = fd
147 return fd
147 return fd
148
148
149 def __exit__(self, exc_type, exc_value, traceback):
149 def __exit__(self, exc_type, exc_value, traceback):
150 # close tmp file, and rename to final destination
150 # close tmp file, and rename to final destination
151 self.fd.close()
151 self.fd.close()
152 shutil.move(self.tmp_oid_path, self.oid_path)
152 shutil.move(self.tmp_oid_path, self.oid_path)
153
153
154 return StoreEngine(
154 return StoreEngine(
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156
156
157 def get_default_store(self):
157 def get_default_store(self):
158 """
158 """
159 Default store, consistent with defaults of Mercurial large files store
159 Default store, consistent with defaults of Mercurial large files store
160 which is /home/username/.cache/largefiles
160 which is /home/username/.cache/largefiles
161 """
161 """
162 user_home = os.path.expanduser("~")
162 user_home = os.path.expanduser("~")
163 return os.path.join(user_home, '.cache', 'lfs-store')
163 return os.path.join(user_home, '.cache', 'lfs-store')
164
164
165 def has_oid(self):
165 def has_oid(self):
166 return os.path.exists(os.path.join(self.store_path, self.oid))
166 return os.path.exists(os.path.join(self.store_path, self.oid))
167
167
168 def size_oid(self):
168 def size_oid(self):
169 size = -1
169 size = -1
170
170
171 if self.has_oid():
171 if self.has_oid():
172 oid = os.path.join(self.store_path, self.oid)
172 oid = os.path.join(self.store_path, self.oid)
173 size = os.stat(oid).st_size
173 size = os.stat(oid).st_size
174
174
175 return size
175 return size
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,273 +1,273 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21
21
22 from vcsserver.lib.rc_json import json
22 from vcsserver.lib.rc_json import json
23 from vcsserver.str_utils import safe_bytes
23 from vcsserver.str_utils import safe_bytes
24 from vcsserver.git_lfs.app import create_app
24 from vcsserver.git_lfs.app import create_app
25
25
26
26
27 @pytest.fixture(scope='function')
27 @pytest.fixture(scope='function')
28 def git_lfs_app(tmpdir):
28 def git_lfs_app(tmpdir):
29 custom_app = WebObTestApp(create_app(
29 custom_app = WebObTestApp(create_app(
30 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
31 git_lfs_http_scheme='http'))
31 git_lfs_http_scheme='http'))
32 custom_app._store = str(tmpdir)
32 custom_app._store = str(tmpdir)
33 return custom_app
33 return custom_app
34
34
35
35
36 @pytest.fixture(scope='function')
36 @pytest.fixture(scope='function')
37 def git_lfs_https_app(tmpdir):
37 def git_lfs_https_app(tmpdir):
38 custom_app = WebObTestApp(create_app(
38 custom_app = WebObTestApp(create_app(
39 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
40 git_lfs_http_scheme='https'))
40 git_lfs_http_scheme='https'))
41 custom_app._store = str(tmpdir)
41 custom_app._store = str(tmpdir)
42 return custom_app
42 return custom_app
43
43
44
44
45 @pytest.fixture()
45 @pytest.fixture()
46 def http_auth():
46 def http_auth():
47 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
47 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
48
48
49
49
50 class TestLFSApplication(object):
50 class TestLFSApplication(object):
51
51
52 def test_app_wrong_path(self, git_lfs_app):
52 def test_app_wrong_path(self, git_lfs_app):
53 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
53 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
54
54
55 def test_app_deprecated_endpoint(self, git_lfs_app):
55 def test_app_deprecated_endpoint(self, git_lfs_app):
56 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
57 assert response.status_code == 501
57 assert response.status_code == 501
58 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
58 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
59
59
60 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 def test_app_lock_verify_api_not_available(self, git_lfs_app):
61 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
62 assert response.status_code == 501
62 assert response.status_code == 501
63 assert json.loads(response.text) == {
63 assert json.loads(response.text) == {
64 'message': 'GIT LFS locking api not supported'}
64 'message': 'GIT LFS locking api not supported'}
65
65
66 def test_app_lock_api_not_available(self, git_lfs_app):
66 def test_app_lock_api_not_available(self, git_lfs_app):
67 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
68 assert response.status_code == 501
68 assert response.status_code == 501
69 assert json.loads(response.text) == {
69 assert json.loads(response.text) == {
70 'message': 'GIT LFS locking api not supported'}
70 'message': 'GIT LFS locking api not supported'}
71
71
72 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 def test_app_batch_api_missing_auth(self, git_lfs_app):
73 git_lfs_app.post_json(
73 git_lfs_app.post_json(
74 '/repo/info/lfs/objects/batch', params={}, status=403)
74 '/repo/info/lfs/objects/batch', params={}, status=403)
75
75
76 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
76 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
77 response = git_lfs_app.post_json(
77 response = git_lfs_app.post_json(
78 '/repo/info/lfs/objects/batch', params={}, status=400,
78 '/repo/info/lfs/objects/batch', params={}, status=400,
79 extra_environ=http_auth)
79 extra_environ=http_auth)
80 assert json.loads(response.text) == {
80 assert json.loads(response.text) == {
81 'message': 'unsupported operation mode: `None`'}
81 'message': 'unsupported operation mode: `None`'}
82
82
83 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
84 response = git_lfs_app.post_json(
84 response = git_lfs_app.post_json(
85 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
86 status=400, extra_environ=http_auth)
86 status=400, extra_environ=http_auth)
87 assert json.loads(response.text) == {
87 assert json.loads(response.text) == {
88 'message': 'missing objects data'}
88 'message': 'missing objects data'}
89
89
90 def test_app_batch_api_unsupported_data_in_objects(
90 def test_app_batch_api_unsupported_data_in_objects(
91 self, git_lfs_app, http_auth):
91 self, git_lfs_app, http_auth):
92 params = {'operation': 'download',
92 params = {'operation': 'download',
93 'objects': [{}]}
93 'objects': [{}]}
94 response = git_lfs_app.post_json(
94 response = git_lfs_app.post_json(
95 '/repo/info/lfs/objects/batch', params=params, status=400,
95 '/repo/info/lfs/objects/batch', params=params, status=400,
96 extra_environ=http_auth)
96 extra_environ=http_auth)
97 assert json.loads(response.text) == {
97 assert json.loads(response.text) == {
98 'message': 'unsupported data in objects'}
98 'message': 'unsupported data in objects'}
99
99
100 def test_app_batch_api_download_missing_object(
100 def test_app_batch_api_download_missing_object(
101 self, git_lfs_app, http_auth):
101 self, git_lfs_app, http_auth):
102 params = {'operation': 'download',
102 params = {'operation': 'download',
103 'objects': [{'oid': '123', 'size': '1024'}]}
103 'objects': [{'oid': '123', 'size': '1024'}]}
104 response = git_lfs_app.post_json(
104 response = git_lfs_app.post_json(
105 '/repo/info/lfs/objects/batch', params=params,
105 '/repo/info/lfs/objects/batch', params=params,
106 extra_environ=http_auth)
106 extra_environ=http_auth)
107
107
108 expected_objects = [
108 expected_objects = [
109 {'authenticated': True,
109 {'authenticated': True,
110 'errors': {'error': {
110 'errors': {'error': {
111 'code': 404,
111 'code': 404,
112 'message': 'object: 123 does not exist in store'}},
112 'message': 'object: 123 does not exist in store'}},
113 'oid': '123',
113 'oid': '123',
114 'size': '1024'}
114 'size': '1024'}
115 ]
115 ]
116 assert json.loads(response.text) == {
116 assert json.loads(response.text) == {
117 'objects': expected_objects, 'transfer': 'basic'}
117 'objects': expected_objects, 'transfer': 'basic'}
118
118
119 def test_app_batch_api_download(self, git_lfs_app, http_auth):
119 def test_app_batch_api_download(self, git_lfs_app, http_auth):
120 oid = '456'
120 oid = '456'
121 oid_path = os.path.join(git_lfs_app._store, oid)
121 oid_path = os.path.join(git_lfs_app._store, oid)
122 if not os.path.isdir(os.path.dirname(oid_path)):
122 if not os.path.isdir(os.path.dirname(oid_path)):
123 os.makedirs(os.path.dirname(oid_path))
123 os.makedirs(os.path.dirname(oid_path))
124 with open(oid_path, 'wb') as f:
124 with open(oid_path, 'wb') as f:
125 f.write(safe_bytes('OID_CONTENT'))
125 f.write(safe_bytes('OID_CONTENT'))
126
126
127 params = {'operation': 'download',
127 params = {'operation': 'download',
128 'objects': [{'oid': oid, 'size': '1024'}]}
128 'objects': [{'oid': oid, 'size': '1024'}]}
129 response = git_lfs_app.post_json(
129 response = git_lfs_app.post_json(
130 '/repo/info/lfs/objects/batch', params=params,
130 '/repo/info/lfs/objects/batch', params=params,
131 extra_environ=http_auth)
131 extra_environ=http_auth)
132
132
133 expected_objects = [
133 expected_objects = [
134 {'authenticated': True,
134 {'authenticated': True,
135 'actions': {
135 'actions': {
136 'download': {
136 'download': {
137 'header': {'Authorization': 'Basic XXXXX'},
137 'header': {'Authorization': 'Basic XXXXX'},
138 'href': 'http://localhost/repo/info/lfs/objects/456'},
138 'href': 'http://localhost/repo/info/lfs/objects/456'},
139 },
139 },
140 'oid': '456',
140 'oid': '456',
141 'size': '1024'}
141 'size': '1024'}
142 ]
142 ]
143 assert json.loads(response.text) == {
143 assert json.loads(response.text) == {
144 'objects': expected_objects, 'transfer': 'basic'}
144 'objects': expected_objects, 'transfer': 'basic'}
145
145
146 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
146 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
147 params = {'operation': 'upload',
147 params = {'operation': 'upload',
148 'objects': [{'oid': '123', 'size': '1024'}]}
148 'objects': [{'oid': '123', 'size': '1024'}]}
149 response = git_lfs_app.post_json(
149 response = git_lfs_app.post_json(
150 '/repo/info/lfs/objects/batch', params=params,
150 '/repo/info/lfs/objects/batch', params=params,
151 extra_environ=http_auth)
151 extra_environ=http_auth)
152 expected_objects = [
152 expected_objects = [
153 {'authenticated': True,
153 {'authenticated': True,
154 'actions': {
154 'actions': {
155 'upload': {
155 'upload': {
156 'header': {'Authorization': 'Basic XXXXX',
156 'header': {'Authorization': 'Basic XXXXX',
157 'Transfer-Encoding': 'chunked'},
157 'Transfer-Encoding': 'chunked'},
158 'href': 'http://localhost/repo/info/lfs/objects/123'},
158 'href': 'http://localhost/repo/info/lfs/objects/123'},
159 'verify': {
159 'verify': {
160 'header': {'Authorization': 'Basic XXXXX'},
160 'header': {'Authorization': 'Basic XXXXX'},
161 'href': 'http://localhost/repo/info/lfs/verify'}
161 'href': 'http://localhost/repo/info/lfs/verify'}
162 },
162 },
163 'oid': '123',
163 'oid': '123',
164 'size': '1024'}
164 'size': '1024'}
165 ]
165 ]
166 assert json.loads(response.text) == {
166 assert json.loads(response.text) == {
167 'objects': expected_objects, 'transfer': 'basic'}
167 'objects': expected_objects, 'transfer': 'basic'}
168
168
169 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
170 params = {'operation': 'upload',
170 params = {'operation': 'upload',
171 'objects': [{'oid': '123', 'size': '1024'}]}
171 'objects': [{'oid': '123', 'size': '1024'}]}
172 response = git_lfs_https_app.post_json(
172 response = git_lfs_https_app.post_json(
173 '/repo/info/lfs/objects/batch', params=params,
173 '/repo/info/lfs/objects/batch', params=params,
174 extra_environ=http_auth)
174 extra_environ=http_auth)
175 expected_objects = [
175 expected_objects = [
176 {'authenticated': True,
176 {'authenticated': True,
177 'actions': {
177 'actions': {
178 'upload': {
178 'upload': {
179 'header': {'Authorization': 'Basic XXXXX',
179 'header': {'Authorization': 'Basic XXXXX',
180 'Transfer-Encoding': 'chunked'},
180 'Transfer-Encoding': 'chunked'},
181 'href': 'https://localhost/repo/info/lfs/objects/123'},
181 'href': 'https://localhost/repo/info/lfs/objects/123'},
182 'verify': {
182 'verify': {
183 'header': {'Authorization': 'Basic XXXXX'},
183 'header': {'Authorization': 'Basic XXXXX'},
184 'href': 'https://localhost/repo/info/lfs/verify'}
184 'href': 'https://localhost/repo/info/lfs/verify'}
185 },
185 },
186 'oid': '123',
186 'oid': '123',
187 'size': '1024'}
187 'size': '1024'}
188 ]
188 ]
189 assert json.loads(response.text) == {
189 assert json.loads(response.text) == {
190 'objects': expected_objects, 'transfer': 'basic'}
190 'objects': expected_objects, 'transfer': 'basic'}
191
191
192 def test_app_verify_api_missing_data(self, git_lfs_app):
192 def test_app_verify_api_missing_data(self, git_lfs_app):
193 params = {'oid': 'missing'}
193 params = {'oid': 'missing'}
194 response = git_lfs_app.post_json(
194 response = git_lfs_app.post_json(
195 '/repo/info/lfs/verify', params=params,
195 '/repo/info/lfs/verify', params=params,
196 status=400)
196 status=400)
197
197
198 assert json.loads(response.text) == {
198 assert json.loads(response.text) == {
199 'message': 'missing oid and size in request data'}
199 'message': 'missing oid and size in request data'}
200
200
201 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 def test_app_verify_api_missing_obj(self, git_lfs_app):
202 params = {'oid': 'missing', 'size': '1024'}
202 params = {'oid': 'missing', 'size': '1024'}
203 response = git_lfs_app.post_json(
203 response = git_lfs_app.post_json(
204 '/repo/info/lfs/verify', params=params,
204 '/repo/info/lfs/verify', params=params,
205 status=404)
205 status=404)
206
206
207 assert json.loads(response.text) == {
207 assert json.loads(response.text) == {
208 'message': 'oid `missing` does not exists in store'}
208 'message': 'oid `missing` does not exists in store'}
209
209
210 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 def test_app_verify_api_size_mismatch(self, git_lfs_app):
211 oid = 'existing'
211 oid = 'existing'
212 oid_path = os.path.join(git_lfs_app._store, oid)
212 oid_path = os.path.join(git_lfs_app._store, oid)
213 if not os.path.isdir(os.path.dirname(oid_path)):
213 if not os.path.isdir(os.path.dirname(oid_path)):
214 os.makedirs(os.path.dirname(oid_path))
214 os.makedirs(os.path.dirname(oid_path))
215 with open(oid_path, 'wb') as f:
215 with open(oid_path, 'wb') as f:
216 f.write(safe_bytes('OID_CONTENT'))
216 f.write(safe_bytes('OID_CONTENT'))
217
217
218 params = {'oid': oid, 'size': '1024'}
218 params = {'oid': oid, 'size': '1024'}
219 response = git_lfs_app.post_json(
219 response = git_lfs_app.post_json(
220 '/repo/info/lfs/verify', params=params, status=422)
220 '/repo/info/lfs/verify', params=params, status=422)
221
221
222 assert json.loads(response.text) == {
222 assert json.loads(response.text) == {
223 'message': 'requested file size mismatch '
223 'message': 'requested file size mismatch '
224 'store size:11 requested:1024'}
224 'store size:11 requested:1024'}
225
225
226 def test_app_verify_api(self, git_lfs_app):
226 def test_app_verify_api(self, git_lfs_app):
227 oid = 'existing'
227 oid = 'existing'
228 oid_path = os.path.join(git_lfs_app._store, oid)
228 oid_path = os.path.join(git_lfs_app._store, oid)
229 if not os.path.isdir(os.path.dirname(oid_path)):
229 if not os.path.isdir(os.path.dirname(oid_path)):
230 os.makedirs(os.path.dirname(oid_path))
230 os.makedirs(os.path.dirname(oid_path))
231 with open(oid_path, 'wb') as f:
231 with open(oid_path, 'wb') as f:
232 f.write(safe_bytes('OID_CONTENT'))
232 f.write(safe_bytes('OID_CONTENT'))
233
233
234 params = {'oid': oid, 'size': 11}
234 params = {'oid': oid, 'size': 11}
235 response = git_lfs_app.post_json(
235 response = git_lfs_app.post_json(
236 '/repo/info/lfs/verify', params=params)
236 '/repo/info/lfs/verify', params=params)
237
237
238 assert json.loads(response.text) == {
238 assert json.loads(response.text) == {
239 'message': {'size': 'ok', 'in_store': 'ok'}}
239 'message': {'size': 'ok', 'in_store': 'ok'}}
240
240
241 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 def test_app_download_api_oid_not_existing(self, git_lfs_app):
242 oid = 'missing'
242 oid = 'missing'
243
243
244 response = git_lfs_app.get(
244 response = git_lfs_app.get(
245 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
246
246
247 assert json.loads(response.text) == {
247 assert json.loads(response.text) == {
248 'message': 'requested file with oid `missing` not found in store'}
248 'message': 'requested file with oid `missing` not found in store'}
249
249
250 def test_app_download_api(self, git_lfs_app):
250 def test_app_download_api(self, git_lfs_app):
251 oid = 'existing'
251 oid = 'existing'
252 oid_path = os.path.join(git_lfs_app._store, oid)
252 oid_path = os.path.join(git_lfs_app._store, oid)
253 if not os.path.isdir(os.path.dirname(oid_path)):
253 if not os.path.isdir(os.path.dirname(oid_path)):
254 os.makedirs(os.path.dirname(oid_path))
254 os.makedirs(os.path.dirname(oid_path))
255 with open(oid_path, 'wb') as f:
255 with open(oid_path, 'wb') as f:
256 f.write(safe_bytes('OID_CONTENT'))
256 f.write(safe_bytes('OID_CONTENT'))
257
257
258 response = git_lfs_app.get(
258 response = git_lfs_app.get(
259 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
259 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
260 assert response
260 assert response
261
261
262 def test_app_upload(self, git_lfs_app):
262 def test_app_upload(self, git_lfs_app):
263 oid = 'uploaded'
263 oid = 'uploaded'
264
264
265 response = git_lfs_app.put(
265 response = git_lfs_app.put(
266 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
267
267
268 assert json.loads(response.text) == {'upload': 'ok'}
268 assert json.loads(response.text) == {'upload': 'ok'}
269
269
270 # verify that we actually wrote that OID
270 # verify that we actually wrote that OID
271 oid_path = os.path.join(git_lfs_app._store, oid)
271 oid_path = os.path.join(git_lfs_app._store, oid)
272 assert os.path.isfile(oid_path)
272 assert os.path.isfile(oid_path)
273 assert 'CONTENT' == open(oid_path).read()
273 assert 'CONTENT' == open(oid_path).read()
@@ -1,142 +1,142 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.str_utils import safe_bytes
20 from vcsserver.str_utils import safe_bytes
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
22
22
23
23
24 @pytest.fixture()
24 @pytest.fixture()
25 def lfs_store(tmpdir):
25 def lfs_store(tmpdir):
26 repo = 'test'
26 repo = 'test'
27 oid = '123456789'
27 oid = '123456789'
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
29 return store
29 return store
30
30
31
31
32 @pytest.fixture()
32 @pytest.fixture()
33 def oid_handler(lfs_store):
33 def oid_handler(lfs_store):
34 store = lfs_store
34 store = lfs_store
35 repo = store.repo
35 repo = store.repo
36 oid = store.oid
36 oid = store.oid
37
37
38 oid_handler = OidHandler(
38 oid_handler = OidHandler(
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
40 oid=oid,
40 oid=oid,
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
42 obj_verify_href='http://localhost/verify')
42 obj_verify_href='http://localhost/verify')
43 return oid_handler
43 return oid_handler
44
44
45
45
46 class TestOidHandler(object):
46 class TestOidHandler(object):
47
47
48 @pytest.mark.parametrize('exec_action', [
48 @pytest.mark.parametrize('exec_action', [
49 'download',
49 'download',
50 'upload',
50 'upload',
51 ])
51 ])
52 def test_exec_action(self, exec_action, oid_handler):
52 def test_exec_action(self, exec_action, oid_handler):
53 handler = oid_handler.exec_operation(exec_action)
53 handler = oid_handler.exec_operation(exec_action)
54 assert handler
54 assert handler
55
55
56 def test_exec_action_undefined(self, oid_handler):
56 def test_exec_action_undefined(self, oid_handler):
57 with pytest.raises(AttributeError):
57 with pytest.raises(AttributeError):
58 oid_handler.exec_operation('wrong')
58 oid_handler.exec_operation('wrong')
59
59
60 def test_download_oid_not_existing(self, oid_handler):
60 def test_download_oid_not_existing(self, oid_handler):
61 response, has_errors = oid_handler.exec_operation('download')
61 response, has_errors = oid_handler.exec_operation('download')
62
62
63 assert response is None
63 assert response is None
64 assert has_errors['error'] == {
64 assert has_errors['error'] == {
65 'code': 404,
65 'code': 404,
66 'message': 'object: 123456789 does not exist in store'}
66 'message': 'object: 123456789 does not exist in store'}
67
67
68 def test_download_oid(self, oid_handler):
68 def test_download_oid(self, oid_handler):
69 store = oid_handler.get_store()
69 store = oid_handler.get_store()
70 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 if not os.path.isdir(os.path.dirname(store.oid_path)):
71 os.makedirs(os.path.dirname(store.oid_path))
71 os.makedirs(os.path.dirname(store.oid_path))
72
72
73 with open(store.oid_path, 'wb') as f:
73 with open(store.oid_path, 'wb') as f:
74 f.write(safe_bytes('CONTENT'))
74 f.write(safe_bytes('CONTENT'))
75
75
76 response, has_errors = oid_handler.exec_operation('download')
76 response, has_errors = oid_handler.exec_operation('download')
77
77
78 assert has_errors is None
78 assert has_errors is None
79 assert response['download'] == {
79 assert response['download'] == {
80 'header': {'Authorization': 'basic xxxx'},
80 'header': {'Authorization': 'basic xxxx'},
81 'href': 'http://localhost/handle_oid'
81 'href': 'http://localhost/handle_oid'
82 }
82 }
83
83
84 def test_upload_oid_that_exists(self, oid_handler):
84 def test_upload_oid_that_exists(self, oid_handler):
85 store = oid_handler.get_store()
85 store = oid_handler.get_store()
86 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 if not os.path.isdir(os.path.dirname(store.oid_path)):
87 os.makedirs(os.path.dirname(store.oid_path))
87 os.makedirs(os.path.dirname(store.oid_path))
88
88
89 with open(store.oid_path, 'wb') as f:
89 with open(store.oid_path, 'wb') as f:
90 f.write(safe_bytes('CONTENT'))
90 f.write(safe_bytes('CONTENT'))
91 oid_handler.obj_size = 7
91 oid_handler.obj_size = 7
92 response, has_errors = oid_handler.exec_operation('upload')
92 response, has_errors = oid_handler.exec_operation('upload')
93 assert has_errors is None
93 assert has_errors is None
94 assert response is None
94 assert response is None
95
95
96 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
97 store = oid_handler.get_store()
97 store = oid_handler.get_store()
98 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 if not os.path.isdir(os.path.dirname(store.oid_path)):
99 os.makedirs(os.path.dirname(store.oid_path))
99 os.makedirs(os.path.dirname(store.oid_path))
100
100
101 with open(store.oid_path, 'wb') as f:
101 with open(store.oid_path, 'wb') as f:
102 f.write(safe_bytes('CONTENT'))
102 f.write(safe_bytes('CONTENT'))
103
103
104 oid_handler.obj_size = 10240
104 oid_handler.obj_size = 10240
105 response, has_errors = oid_handler.exec_operation('upload')
105 response, has_errors = oid_handler.exec_operation('upload')
106 assert has_errors is None
106 assert has_errors is None
107 assert response['upload'] == {
107 assert response['upload'] == {
108 'header': {'Authorization': 'basic xxxx',
108 'header': {'Authorization': 'basic xxxx',
109 'Transfer-Encoding': 'chunked'},
109 'Transfer-Encoding': 'chunked'},
110 'href': 'http://localhost/handle_oid',
110 'href': 'http://localhost/handle_oid',
111 }
111 }
112
112
113 def test_upload_oid(self, oid_handler):
113 def test_upload_oid(self, oid_handler):
114 response, has_errors = oid_handler.exec_operation('upload')
114 response, has_errors = oid_handler.exec_operation('upload')
115 assert has_errors is None
115 assert has_errors is None
116 assert response['upload'] == {
116 assert response['upload'] == {
117 'header': {'Authorization': 'basic xxxx',
117 'header': {'Authorization': 'basic xxxx',
118 'Transfer-Encoding': 'chunked'},
118 'Transfer-Encoding': 'chunked'},
119 'href': 'http://localhost/handle_oid'
119 'href': 'http://localhost/handle_oid'
120 }
120 }
121
121
122
122
123 class TestLFSStore(object):
123 class TestLFSStore(object):
124 def test_write_oid(self, lfs_store):
124 def test_write_oid(self, lfs_store):
125 oid_location = lfs_store.oid_path
125 oid_location = lfs_store.oid_path
126
126
127 assert not os.path.isfile(oid_location)
127 assert not os.path.isfile(oid_location)
128
128
129 engine = lfs_store.get_engine(mode='wb')
129 engine = lfs_store.get_engine(mode='wb')
130 with engine as f:
130 with engine as f:
131 f.write(safe_bytes('CONTENT'))
131 f.write(safe_bytes('CONTENT'))
132
132
133 assert os.path.isfile(oid_location)
133 assert os.path.isfile(oid_location)
134
134
135 def test_detect_has_oid(self, lfs_store):
135 def test_detect_has_oid(self, lfs_store):
136
136
137 assert lfs_store.has_oid() is False
137 assert lfs_store.has_oid() is False
138 engine = lfs_store.get_engine(mode='wb')
138 engine = lfs_store.get_engine(mode='wb')
139 with engine as f:
139 with engine as f:
140 f.write(safe_bytes('CONTENT'))
140 f.write(safe_bytes('CONTENT'))
141
141
142 assert lfs_store.has_oid() is True
142 assert lfs_store.has_oid() is True
@@ -1,50 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
17 import copy
18 from functools import wraps
18 from functools import wraps
19
19
20
20
21 def get_cython_compat_decorator(wrapper, func):
21 def get_cython_compat_decorator(wrapper, func):
22 """
22 """
23 Creates a cython compatible decorator. The previously used
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
24 decorator.decorator() function seems to be incompatible with cython.
25
25
26 :param wrapper: __wrapper method of the decorator class
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
27 :param func: decorated function
28 """
28 """
29 @wraps(func)
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
33 return local_wrapper
34
34
35
35
36 def safe_result(result):
36 def safe_result(result):
37 """clean result for better representation in logs"""
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
38 clean_copy = copy.deepcopy(result)
39
39
40 try:
40 try:
41 if 'objects' in clean_copy:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
47 except Exception:
48 return result
48 return result
49
49
50 return clean_copy
50 return clean_copy
@@ -1,88 +1,88 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24
24
25 # patch demandimport, due to bug in mercurial when it always triggers
25 # patch demandimport, due to bug in mercurial when it always triggers
26 # demandimport.enable()
26 # demandimport.enable()
27 from vcsserver.str_utils import safe_bytes
27 from vcsserver.str_utils import safe_bytes
28
28
29 demandimport.enable = lambda *args, **kwargs: 1
29 demandimport.enable = lambda *args, **kwargs: 1
30
30
31 from mercurial import ui
31 from mercurial import ui
32 from mercurial import patch
32 from mercurial import patch
33 from mercurial import config
33 from mercurial import config
34 from mercurial import extensions
34 from mercurial import extensions
35 from mercurial import scmutil
35 from mercurial import scmutil
36 from mercurial import archival
36 from mercurial import archival
37 from mercurial import discovery
37 from mercurial import discovery
38 from mercurial import unionrepo
38 from mercurial import unionrepo
39 from mercurial import localrepo
39 from mercurial import localrepo
40 from mercurial import merge as hg_merge
40 from mercurial import merge as hg_merge
41 from mercurial import subrepo
41 from mercurial import subrepo
42 from mercurial import subrepoutil
42 from mercurial import subrepoutil
43 from mercurial import tags as hg_tag
43 from mercurial import tags as hg_tag
44 from mercurial import util as hgutil
44 from mercurial import util as hgutil
45 from mercurial.commands import clone, pull
45 from mercurial.commands import clone, pull
46 from mercurial.node import nullid
46 from mercurial.node import nullid
47 from mercurial.context import memctx, memfilectx
47 from mercurial.context import memctx, memfilectx
48 from mercurial.error import (
48 from mercurial.error import (
49 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
49 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
50 RequirementError, ProgrammingError)
50 RequirementError, ProgrammingError)
51 from mercurial.hgweb import hgweb_mod
51 from mercurial.hgweb import hgweb_mod
52 from mercurial.localrepo import instance
52 from mercurial.localrepo import instance
53 from mercurial.match import match, alwaysmatcher, patternmatcher
53 from mercurial.match import match, alwaysmatcher, patternmatcher
54 from mercurial.mdiff import diffopts
54 from mercurial.mdiff import diffopts
55 from mercurial.node import bin, hex
55 from mercurial.node import bin, hex
56 from mercurial.encoding import tolocal
56 from mercurial.encoding import tolocal
57 from mercurial.discovery import findcommonoutgoing
57 from mercurial.discovery import findcommonoutgoing
58 from mercurial.hg import peer
58 from mercurial.hg import peer
59 from mercurial.httppeer import makepeer
59 from mercurial.httppeer import makepeer
60 from mercurial.utils.urlutil import url as hg_url
60 from mercurial.utils.urlutil import url as hg_url
61 from mercurial.scmutil import revrange, revsymbol
61 from mercurial.scmutil import revrange, revsymbol
62 from mercurial.node import nullrev
62 from mercurial.node import nullrev
63 from mercurial import exchange
63 from mercurial import exchange
64 from hgext import largefiles
64 from hgext import largefiles
65
65
66 # those authnadlers are patched for python 2.6.5 bug an
66 # those authnadlers are patched for python 2.6.5 bug an
67 # infinit looping when given invalid resources
67 # infinit looping when given invalid resources
68 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
68 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
69
69
70 # hg strip is in core now
70 # hg strip is in core now
71 from mercurial import strip as hgext_strip
71 from mercurial import strip as hgext_strip
72
72
73
73
74 def get_ctx(repo, ref):
74 def get_ctx(repo, ref):
75 if not isinstance(ref, int):
75 if not isinstance(ref, int):
76 ref = safe_bytes(ref)
76 ref = safe_bytes(ref)
77 try:
77 try:
78 ctx = repo[ref]
78 ctx = repo[ref]
79 except (ProgrammingError, TypeError):
79 except (ProgrammingError, TypeError):
80 # we're unable to find the rev using a regular lookup, we fallback
80 # we're unable to find the rev using a regular lookup, we fallback
81 # to slower, but backward compat revsymbol usage
81 # to slower, but backward compat revsymbol usage
82 ctx = revsymbol(repo, ref)
82 ctx = revsymbol(repo, ref)
83 except (LookupError, RepoLookupError):
83 except (LookupError, RepoLookupError):
84 # Similar case as above but only for refs that are not numeric
84 # Similar case as above but only for refs that are not numeric
85 if isinstance(ref, int):
85 if isinstance(ref, int):
86 raise
86 raise
87 ctx = revsymbol(repo, ref)
87 ctx = revsymbol(repo, ref)
88 return ctx
88 return ctx
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto._capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto._capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(orig, repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 return calc_capabilities(orig, repo, proto)
56 else:
56 else:
57 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
58 return orig(repo, proto)
58 return orig(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from .hgcompat import subrepo, subrepoutil
65 from .hgcompat import subrepo, subrepoutil
66 from vcsserver.exceptions import SubrepoMergeException
66 from vcsserver.exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepoutil.state(self._ctx, self.ui)
100 substate = subrepoutil.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,202 +1,202 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import os
19 import os
20 import sys
20 import sys
21 import datetime
21 import datetime
22 import logging
22 import logging
23 import pkg_resources
23 import pkg_resources
24
24
25 import vcsserver
25 import vcsserver
26 from vcsserver.str_utils import safe_bytes
26 from vcsserver.str_utils import safe_bytes
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 def get_git_hooks_path(repo_path, bare):
31 def get_git_hooks_path(repo_path, bare):
32 hooks_path = os.path.join(repo_path, 'hooks')
32 hooks_path = os.path.join(repo_path, 'hooks')
33 if not bare:
33 if not bare:
34 hooks_path = os.path.join(repo_path, '.git', 'hooks')
34 hooks_path = os.path.join(repo_path, '.git', 'hooks')
35
35
36 return hooks_path
36 return hooks_path
37
37
38
38
39 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
39 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
40 """
40 """
41 Creates a RhodeCode hook inside a git repository
41 Creates a RhodeCode hook inside a git repository
42
42
43 :param repo_path: path to repository
43 :param repo_path: path to repository
44 :param executable: binary executable to put in the hooks
44 :param executable: binary executable to put in the hooks
45 :param force_create: Create even if same name hook exists
45 :param force_create: Create even if same name hook exists
46 """
46 """
47 executable = executable or sys.executable
47 executable = executable or sys.executable
48 hooks_path = get_git_hooks_path(repo_path, bare)
48 hooks_path = get_git_hooks_path(repo_path, bare)
49
49
50 if not os.path.isdir(hooks_path):
50 if not os.path.isdir(hooks_path):
51 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
51 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
52
52
53 tmpl_post = pkg_resources.resource_string(
53 tmpl_post = pkg_resources.resource_string(
54 'vcsserver', '/'.join(
54 'vcsserver', '/'.join(
55 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
55 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
56 tmpl_pre = pkg_resources.resource_string(
56 tmpl_pre = pkg_resources.resource_string(
57 'vcsserver', '/'.join(
57 'vcsserver', '/'.join(
58 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
58 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
59
59
60 path = '' # not used for now
60 path = '' # not used for now
61 timestamp = datetime.datetime.utcnow().isoformat()
61 timestamp = datetime.datetime.utcnow().isoformat()
62
62
63 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
63 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
64 log.debug('Installing git hook in repo %s', repo_path)
64 log.debug('Installing git hook in repo %s', repo_path)
65 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
65 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
66 _rhodecode_hook = check_rhodecode_hook(_hook_file)
66 _rhodecode_hook = check_rhodecode_hook(_hook_file)
67
67
68 if _rhodecode_hook or force_create:
68 if _rhodecode_hook or force_create:
69 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
69 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
70 try:
70 try:
71 with open(_hook_file, 'wb') as f:
71 with open(_hook_file, 'wb') as f:
72 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
72 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
73 template = template.replace(b'_DATE_', safe_bytes(timestamp))
73 template = template.replace(b'_DATE_', safe_bytes(timestamp))
74 template = template.replace(b'_ENV_', safe_bytes(executable))
74 template = template.replace(b'_ENV_', safe_bytes(executable))
75 template = template.replace(b'_PATH_', safe_bytes(path))
75 template = template.replace(b'_PATH_', safe_bytes(path))
76 f.write(template)
76 f.write(template)
77 os.chmod(_hook_file, 0o755)
77 os.chmod(_hook_file, 0o755)
78 except IOError:
78 except IOError:
79 log.exception('error writing hook file %s', _hook_file)
79 log.exception('error writing hook file %s', _hook_file)
80 else:
80 else:
81 log.debug('skipping writing hook file')
81 log.debug('skipping writing hook file')
82
82
83 return True
83 return True
84
84
85
85
86 def get_svn_hooks_path(repo_path):
86 def get_svn_hooks_path(repo_path):
87 hooks_path = os.path.join(repo_path, 'hooks')
87 hooks_path = os.path.join(repo_path, 'hooks')
88
88
89 return hooks_path
89 return hooks_path
90
90
91
91
92 def install_svn_hooks(repo_path, executable=None, force_create=False):
92 def install_svn_hooks(repo_path, executable=None, force_create=False):
93 """
93 """
94 Creates RhodeCode hooks inside a svn repository
94 Creates RhodeCode hooks inside a svn repository
95
95
96 :param repo_path: path to repository
96 :param repo_path: path to repository
97 :param executable: binary executable to put in the hooks
97 :param executable: binary executable to put in the hooks
98 :param force_create: Create even if same name hook exists
98 :param force_create: Create even if same name hook exists
99 """
99 """
100 executable = executable or sys.executable
100 executable = executable or sys.executable
101 hooks_path = get_svn_hooks_path(repo_path)
101 hooks_path = get_svn_hooks_path(repo_path)
102 if not os.path.isdir(hooks_path):
102 if not os.path.isdir(hooks_path):
103 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
103 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
104
104
105 tmpl_post = pkg_resources.resource_string(
105 tmpl_post = pkg_resources.resource_string(
106 'vcsserver', '/'.join(
106 'vcsserver', '/'.join(
107 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
107 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
108 tmpl_pre = pkg_resources.resource_string(
108 tmpl_pre = pkg_resources.resource_string(
109 'vcsserver', '/'.join(
109 'vcsserver', '/'.join(
110 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
110 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
111
111
112 path = '' # not used for now
112 path = '' # not used for now
113 timestamp = datetime.datetime.utcnow().isoformat()
113 timestamp = datetime.datetime.utcnow().isoformat()
114
114
115 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
115 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
116 log.debug('Installing svn hook in repo %s', repo_path)
116 log.debug('Installing svn hook in repo %s', repo_path)
117 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
117 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
118 _rhodecode_hook = check_rhodecode_hook(_hook_file)
118 _rhodecode_hook = check_rhodecode_hook(_hook_file)
119
119
120 if _rhodecode_hook or force_create:
120 if _rhodecode_hook or force_create:
121 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
121 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
122
122
123 try:
123 try:
124 with open(_hook_file, 'wb') as f:
124 with open(_hook_file, 'wb') as f:
125 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
125 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
126 template = template.replace(b'_DATE_', safe_bytes(timestamp))
126 template = template.replace(b'_DATE_', safe_bytes(timestamp))
127 template = template.replace(b'_ENV_', safe_bytes(executable))
127 template = template.replace(b'_ENV_', safe_bytes(executable))
128 template = template.replace(b'_PATH_', safe_bytes(path))
128 template = template.replace(b'_PATH_', safe_bytes(path))
129
129
130 f.write(template)
130 f.write(template)
131 os.chmod(_hook_file, 0o755)
131 os.chmod(_hook_file, 0o755)
132 except IOError:
132 except IOError:
133 log.exception('error writing hook file %s', _hook_file)
133 log.exception('error writing hook file %s', _hook_file)
134 else:
134 else:
135 log.debug('skipping writing hook file')
135 log.debug('skipping writing hook file')
136
136
137 return True
137 return True
138
138
139
139
140 def get_version_from_hook(hook_path):
140 def get_version_from_hook(hook_path):
141 version = b''
141 version = b''
142 hook_content = read_hook_content(hook_path)
142 hook_content = read_hook_content(hook_path)
143 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
143 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
144 if matches:
144 if matches:
145 try:
145 try:
146 version = matches.groups()[0]
146 version = matches.groups()[0]
147 log.debug('got version %s from hooks.', version)
147 log.debug('got version %s from hooks.', version)
148 except Exception:
148 except Exception:
149 log.exception("Exception while reading the hook version.")
149 log.exception("Exception while reading the hook version.")
150 return version.replace(b"'", b"")
150 return version.replace(b"'", b"")
151
151
152
152
153 def check_rhodecode_hook(hook_path):
153 def check_rhodecode_hook(hook_path):
154 """
154 """
155 Check if the hook was created by RhodeCode
155 Check if the hook was created by RhodeCode
156 """
156 """
157 if not os.path.exists(hook_path):
157 if not os.path.exists(hook_path):
158 return True
158 return True
159
159
160 log.debug('hook exists, checking if it is from RhodeCode')
160 log.debug('hook exists, checking if it is from RhodeCode')
161
161
162 version = get_version_from_hook(hook_path)
162 version = get_version_from_hook(hook_path)
163 if version:
163 if version:
164 return True
164 return True
165
165
166 return False
166 return False
167
167
168
168
169 def read_hook_content(hook_path) -> bytes:
169 def read_hook_content(hook_path) -> bytes:
170 content = b''
170 content = b''
171 if os.path.isfile(hook_path):
171 if os.path.isfile(hook_path):
172 with open(hook_path, 'rb') as f:
172 with open(hook_path, 'rb') as f:
173 content = f.read()
173 content = f.read()
174 return content
174 return content
175
175
176
176
177 def get_git_pre_hook_version(repo_path, bare):
177 def get_git_pre_hook_version(repo_path, bare):
178 hooks_path = get_git_hooks_path(repo_path, bare)
178 hooks_path = get_git_hooks_path(repo_path, bare)
179 _hook_file = os.path.join(hooks_path, 'pre-receive')
179 _hook_file = os.path.join(hooks_path, 'pre-receive')
180 version = get_version_from_hook(_hook_file)
180 version = get_version_from_hook(_hook_file)
181 return version
181 return version
182
182
183
183
184 def get_git_post_hook_version(repo_path, bare):
184 def get_git_post_hook_version(repo_path, bare):
185 hooks_path = get_git_hooks_path(repo_path, bare)
185 hooks_path = get_git_hooks_path(repo_path, bare)
186 _hook_file = os.path.join(hooks_path, 'post-receive')
186 _hook_file = os.path.join(hooks_path, 'post-receive')
187 version = get_version_from_hook(_hook_file)
187 version = get_version_from_hook(_hook_file)
188 return version
188 return version
189
189
190
190
191 def get_svn_pre_hook_version(repo_path):
191 def get_svn_pre_hook_version(repo_path):
192 hooks_path = get_svn_hooks_path(repo_path)
192 hooks_path = get_svn_hooks_path(repo_path)
193 _hook_file = os.path.join(hooks_path, 'pre-commit')
193 _hook_file = os.path.join(hooks_path, 'pre-commit')
194 version = get_version_from_hook(_hook_file)
194 version = get_version_from_hook(_hook_file)
195 return version
195 return version
196
196
197
197
198 def get_svn_post_hook_version(repo_path):
198 def get_svn_post_hook_version(repo_path):
199 hooks_path = get_svn_hooks_path(repo_path)
199 hooks_path = get_svn_hooks_path(repo_path)
200 _hook_file = os.path.join(hooks_path, 'post-commit')
200 _hook_file = os.path.join(hooks_path, 'post-commit')
201 version = get_version_from_hook(_hook_file)
201 version = get_version_from_hook(_hook_file)
202 return version
202 return version
@@ -1,779 +1,779 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import importlib
23 import importlib
24 import base64
24 import base64
25 import msgpack
25 import msgpack
26 import dataclasses
26 import dataclasses
27 import pygit2
27 import pygit2
28
28
29 import http.client
29 import http.client
30
30
31
31
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.node
33 import mercurial.node
34
34
35 from vcsserver.lib.rc_json import json
35 from vcsserver.lib.rc_json import json
36 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import ascii_str, safe_str
37 from vcsserver.str_utils import ascii_str, safe_str
38 from vcsserver.remote.git import Repository
38 from vcsserver.remote.git import Repository
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HooksHttpClient(object):
43 class HooksHttpClient(object):
44 proto = 'msgpack.v1'
44 proto = 'msgpack.v1'
45 connection = None
45 connection = None
46
46
47 def __init__(self, hooks_uri):
47 def __init__(self, hooks_uri):
48 self.hooks_uri = hooks_uri
48 self.hooks_uri = hooks_uri
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
52
53 def __call__(self, method, extras):
53 def __call__(self, method, extras):
54 connection = http.client.HTTPConnection(self.hooks_uri)
54 connection = http.client.HTTPConnection(self.hooks_uri)
55 # binary msgpack body
55 # binary msgpack body
56 headers, body = self._serialize(method, extras)
56 headers, body = self._serialize(method, extras)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58
58
59 try:
59 try:
60 try:
60 try:
61 connection.request('POST', '/', body, headers)
61 connection.request('POST', '/', body, headers)
62 except Exception as error:
62 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
64 raise
65
65
66 response = connection.getresponse()
66 response = connection.getresponse()
67 try:
67 try:
68 return msgpack.load(response)
68 return msgpack.load(response)
69 except Exception:
69 except Exception:
70 response_data = response.read()
70 response_data = response.read()
71 log.exception('Failed to decode hook response json data. '
71 log.exception('Failed to decode hook response json data. '
72 'response_code:%s, raw_data:%s',
72 'response_code:%s, raw_data:%s',
73 response.status, response_data)
73 response.status, response_data)
74 raise
74 raise
75 finally:
75 finally:
76 connection.close()
76 connection.close()
77
77
78 @classmethod
78 @classmethod
79 def _serialize(cls, hook_name, extras):
79 def _serialize(cls, hook_name, extras):
80 data = {
80 data = {
81 'method': hook_name,
81 'method': hook_name,
82 'extras': extras
82 'extras': extras
83 }
83 }
84 headers = {
84 headers = {
85 "rc-hooks-protocol": cls.proto,
85 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
86 "Connection": "keep-alive"
87 }
87 }
88 return headers, msgpack.packb(data)
88 return headers, msgpack.packb(data)
89
89
90
90
91 class HooksDummyClient(object):
91 class HooksDummyClient(object):
92 def __init__(self, hooks_module):
92 def __init__(self, hooks_module):
93 self._hooks_module = importlib.import_module(hooks_module)
93 self._hooks_module = importlib.import_module(hooks_module)
94
94
95 def __call__(self, hook_name, extras):
95 def __call__(self, hook_name, extras):
96 with self._hooks_module.Hooks() as hooks:
96 with self._hooks_module.Hooks() as hooks:
97 return getattr(hooks, hook_name)(extras)
97 return getattr(hooks, hook_name)(extras)
98
98
99
99
100 class HooksShadowRepoClient(object):
100 class HooksShadowRepoClient(object):
101
101
102 def __call__(self, hook_name, extras):
102 def __call__(self, hook_name, extras):
103 return {'output': '', 'status': 0}
103 return {'output': '', 'status': 0}
104
104
105
105
106 class RemoteMessageWriter(object):
106 class RemoteMessageWriter(object):
107 """Writer base class."""
107 """Writer base class."""
108 def write(self, message):
108 def write(self, message):
109 raise NotImplementedError()
109 raise NotImplementedError()
110
110
111
111
112 class HgMessageWriter(RemoteMessageWriter):
112 class HgMessageWriter(RemoteMessageWriter):
113 """Writer that knows how to send messages to mercurial clients."""
113 """Writer that knows how to send messages to mercurial clients."""
114
114
115 def __init__(self, ui):
115 def __init__(self, ui):
116 self.ui = ui
116 self.ui = ui
117
117
118 def write(self, message: str):
118 def write(self, message: str):
119 # TODO: Check why the quiet flag is set by default.
119 # TODO: Check why the quiet flag is set by default.
120 old = self.ui.quiet
120 old = self.ui.quiet
121 self.ui.quiet = False
121 self.ui.quiet = False
122 self.ui.status(message.encode('utf-8'))
122 self.ui.status(message.encode('utf-8'))
123 self.ui.quiet = old
123 self.ui.quiet = old
124
124
125
125
126 class GitMessageWriter(RemoteMessageWriter):
126 class GitMessageWriter(RemoteMessageWriter):
127 """Writer that knows how to send messages to git clients."""
127 """Writer that knows how to send messages to git clients."""
128
128
129 def __init__(self, stdout=None):
129 def __init__(self, stdout=None):
130 self.stdout = stdout or sys.stdout
130 self.stdout = stdout or sys.stdout
131
131
132 def write(self, message: str):
132 def write(self, message: str):
133 self.stdout.write(message)
133 self.stdout.write(message)
134
134
135
135
136 class SvnMessageWriter(RemoteMessageWriter):
136 class SvnMessageWriter(RemoteMessageWriter):
137 """Writer that knows how to send messages to svn clients."""
137 """Writer that knows how to send messages to svn clients."""
138
138
139 def __init__(self, stderr=None):
139 def __init__(self, stderr=None):
140 # SVN needs data sent to stderr for back-to-client messaging
140 # SVN needs data sent to stderr for back-to-client messaging
141 self.stderr = stderr or sys.stderr
141 self.stderr = stderr or sys.stderr
142
142
143 def write(self, message):
143 def write(self, message):
144 self.stderr.write(message.encode('utf-8'))
144 self.stderr.write(message.encode('utf-8'))
145
145
146
146
147 def _handle_exception(result):
147 def _handle_exception(result):
148 exception_class = result.get('exception')
148 exception_class = result.get('exception')
149 exception_traceback = result.get('exception_traceback')
149 exception_traceback = result.get('exception_traceback')
150 log.debug('Handling hook-call exception: %s', exception_class)
150 log.debug('Handling hook-call exception: %s', exception_class)
151
151
152 if exception_traceback:
152 if exception_traceback:
153 log.error('Got traceback from remote call:%s', exception_traceback)
153 log.error('Got traceback from remote call:%s', exception_traceback)
154
154
155 if exception_class == 'HTTPLockedRC':
155 if exception_class == 'HTTPLockedRC':
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
157 elif exception_class == 'HTTPBranchProtected':
157 elif exception_class == 'HTTPBranchProtected':
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
159 elif exception_class == 'RepositoryError':
159 elif exception_class == 'RepositoryError':
160 raise exceptions.VcsException()(*result['exception_args'])
160 raise exceptions.VcsException()(*result['exception_args'])
161 elif exception_class:
161 elif exception_class:
162 raise Exception(
162 raise Exception(
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
164 )
164 )
165
165
166
166
167 def _get_hooks_client(extras):
167 def _get_hooks_client(extras):
168 hooks_uri = extras.get('hooks_uri')
168 hooks_uri = extras.get('hooks_uri')
169 is_shadow_repo = extras.get('is_shadow_repo')
169 is_shadow_repo = extras.get('is_shadow_repo')
170
170
171 if hooks_uri:
171 if hooks_uri:
172 return HooksHttpClient(extras['hooks_uri'])
172 return HooksHttpClient(extras['hooks_uri'])
173 elif is_shadow_repo:
173 elif is_shadow_repo:
174 return HooksShadowRepoClient()
174 return HooksShadowRepoClient()
175 else:
175 else:
176 return HooksDummyClient(extras['hooks_module'])
176 return HooksDummyClient(extras['hooks_module'])
177
177
178
178
179 def _call_hook(hook_name, extras, writer):
179 def _call_hook(hook_name, extras, writer):
180 hooks_client = _get_hooks_client(extras)
180 hooks_client = _get_hooks_client(extras)
181 log.debug('Hooks, using client:%s', hooks_client)
181 log.debug('Hooks, using client:%s', hooks_client)
182 result = hooks_client(hook_name, extras)
182 result = hooks_client(hook_name, extras)
183 log.debug('Hooks got result: %s', result)
183 log.debug('Hooks got result: %s', result)
184 _handle_exception(result)
184 _handle_exception(result)
185 writer.write(result['output'])
185 writer.write(result['output'])
186
186
187 return result['status']
187 return result['status']
188
188
189
189
190 def _extras_from_ui(ui):
190 def _extras_from_ui(ui):
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
192 if not hook_data:
192 if not hook_data:
193 # maybe it's inside environ ?
193 # maybe it's inside environ ?
194 env_hook_data = os.environ.get('RC_SCM_DATA')
194 env_hook_data = os.environ.get('RC_SCM_DATA')
195 if env_hook_data:
195 if env_hook_data:
196 hook_data = env_hook_data
196 hook_data = env_hook_data
197
197
198 extras = {}
198 extras = {}
199 if hook_data:
199 if hook_data:
200 extras = json.loads(hook_data)
200 extras = json.loads(hook_data)
201 return extras
201 return extras
202
202
203
203
204 def _rev_range_hash(repo, node, check_heads=False):
204 def _rev_range_hash(repo, node, check_heads=False):
205 from vcsserver.hgcompat import get_ctx
205 from vcsserver.hgcompat import get_ctx
206
206
207 commits = []
207 commits = []
208 revs = []
208 revs = []
209 start = get_ctx(repo, node).rev()
209 start = get_ctx(repo, node).rev()
210 end = len(repo)
210 end = len(repo)
211 for rev in range(start, end):
211 for rev in range(start, end):
212 revs.append(rev)
212 revs.append(rev)
213 ctx = get_ctx(repo, rev)
213 ctx = get_ctx(repo, rev)
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
215 branch = safe_str(ctx.branch())
215 branch = safe_str(ctx.branch())
216 commits.append((commit_id, branch))
216 commits.append((commit_id, branch))
217
217
218 parent_heads = []
218 parent_heads = []
219 if check_heads:
219 if check_heads:
220 parent_heads = _check_heads(repo, start, end, revs)
220 parent_heads = _check_heads(repo, start, end, revs)
221 return commits, parent_heads
221 return commits, parent_heads
222
222
223
223
224 def _check_heads(repo, start, end, commits):
224 def _check_heads(repo, start, end, commits):
225 from vcsserver.hgcompat import get_ctx
225 from vcsserver.hgcompat import get_ctx
226 changelog = repo.changelog
226 changelog = repo.changelog
227 parents = set()
227 parents = set()
228
228
229 for new_rev in commits:
229 for new_rev in commits:
230 for p in changelog.parentrevs(new_rev):
230 for p in changelog.parentrevs(new_rev):
231 if p == mercurial.node.nullrev:
231 if p == mercurial.node.nullrev:
232 continue
232 continue
233 if p < start:
233 if p < start:
234 parents.add(p)
234 parents.add(p)
235
235
236 for p in parents:
236 for p in parents:
237 branch = get_ctx(repo, p).branch()
237 branch = get_ctx(repo, p).branch()
238 # The heads descending from that parent, on the same branch
238 # The heads descending from that parent, on the same branch
239 parent_heads = {p}
239 parent_heads = {p}
240 reachable = {p}
240 reachable = {p}
241 for x in range(p + 1, end):
241 for x in range(p + 1, end):
242 if get_ctx(repo, x).branch() != branch:
242 if get_ctx(repo, x).branch() != branch:
243 continue
243 continue
244 for pp in changelog.parentrevs(x):
244 for pp in changelog.parentrevs(x):
245 if pp in reachable:
245 if pp in reachable:
246 reachable.add(x)
246 reachable.add(x)
247 parent_heads.discard(pp)
247 parent_heads.discard(pp)
248 parent_heads.add(x)
248 parent_heads.add(x)
249 # More than one head? Suggest merging
249 # More than one head? Suggest merging
250 if len(parent_heads) > 1:
250 if len(parent_heads) > 1:
251 return list(parent_heads)
251 return list(parent_heads)
252
252
253 return []
253 return []
254
254
255
255
256 def _get_git_env():
256 def _get_git_env():
257 env = {}
257 env = {}
258 for k, v in os.environ.items():
258 for k, v in os.environ.items():
259 if k.startswith('GIT'):
259 if k.startswith('GIT'):
260 env[k] = v
260 env[k] = v
261
261
262 # serialized version
262 # serialized version
263 return [(k, v) for k, v in env.items()]
263 return [(k, v) for k, v in env.items()]
264
264
265
265
266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
267 env = {}
267 env = {}
268 for k, v in os.environ.items():
268 for k, v in os.environ.items():
269 if k.startswith('HG'):
269 if k.startswith('HG'):
270 env[k] = v
270 env[k] = v
271
271
272 env['HG_NODE'] = old_rev
272 env['HG_NODE'] = old_rev
273 env['HG_NODE_LAST'] = new_rev
273 env['HG_NODE_LAST'] = new_rev
274 env['HG_TXNID'] = txnid
274 env['HG_TXNID'] = txnid
275 env['HG_PENDING'] = repo_path
275 env['HG_PENDING'] = repo_path
276
276
277 return [(k, v) for k, v in env.items()]
277 return [(k, v) for k, v in env.items()]
278
278
279
279
280 def repo_size(ui, repo, **kwargs):
280 def repo_size(ui, repo, **kwargs):
281 extras = _extras_from_ui(ui)
281 extras = _extras_from_ui(ui)
282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
283
283
284
284
285 def pre_pull(ui, repo, **kwargs):
285 def pre_pull(ui, repo, **kwargs):
286 extras = _extras_from_ui(ui)
286 extras = _extras_from_ui(ui)
287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
288
288
289
289
290 def pre_pull_ssh(ui, repo, **kwargs):
290 def pre_pull_ssh(ui, repo, **kwargs):
291 extras = _extras_from_ui(ui)
291 extras = _extras_from_ui(ui)
292 if extras and extras.get('SSH'):
292 if extras and extras.get('SSH'):
293 return pre_pull(ui, repo, **kwargs)
293 return pre_pull(ui, repo, **kwargs)
294 return 0
294 return 0
295
295
296
296
297 def post_pull(ui, repo, **kwargs):
297 def post_pull(ui, repo, **kwargs):
298 extras = _extras_from_ui(ui)
298 extras = _extras_from_ui(ui)
299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
300
300
301
301
302 def post_pull_ssh(ui, repo, **kwargs):
302 def post_pull_ssh(ui, repo, **kwargs):
303 extras = _extras_from_ui(ui)
303 extras = _extras_from_ui(ui)
304 if extras and extras.get('SSH'):
304 if extras and extras.get('SSH'):
305 return post_pull(ui, repo, **kwargs)
305 return post_pull(ui, repo, **kwargs)
306 return 0
306 return 0
307
307
308
308
309 def pre_push(ui, repo, node=None, **kwargs):
309 def pre_push(ui, repo, node=None, **kwargs):
310 """
310 """
311 Mercurial pre_push hook
311 Mercurial pre_push hook
312 """
312 """
313 extras = _extras_from_ui(ui)
313 extras = _extras_from_ui(ui)
314 detect_force_push = extras.get('detect_force_push')
314 detect_force_push = extras.get('detect_force_push')
315
315
316 rev_data = []
316 rev_data = []
317 hook_type: str = safe_str(kwargs.get('hooktype'))
317 hook_type: str = safe_str(kwargs.get('hooktype'))
318
318
319 if node and hook_type == 'pretxnchangegroup':
319 if node and hook_type == 'pretxnchangegroup':
320 branches = collections.defaultdict(list)
320 branches = collections.defaultdict(list)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
322 for commit_id, branch in commits:
322 for commit_id, branch in commits:
323 branches[branch].append(commit_id)
323 branches[branch].append(commit_id)
324
324
325 for branch, commits in branches.items():
325 for branch, commits in branches.items():
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
327 rev_data.append({
327 rev_data.append({
328 'total_commits': len(commits),
328 'total_commits': len(commits),
329 'old_rev': old_rev,
329 'old_rev': old_rev,
330 'new_rev': commits[-1],
330 'new_rev': commits[-1],
331 'ref': '',
331 'ref': '',
332 'type': 'branch',
332 'type': 'branch',
333 'name': branch,
333 'name': branch,
334 })
334 })
335
335
336 for push_ref in rev_data:
336 for push_ref in rev_data:
337 push_ref['multiple_heads'] = _heads
337 push_ref['multiple_heads'] = _heads
338
338
339 repo_path = os.path.join(
339 repo_path = os.path.join(
340 extras.get('repo_store', ''), extras.get('repository', ''))
340 extras.get('repo_store', ''), extras.get('repository', ''))
341 push_ref['hg_env'] = _get_hg_env(
341 push_ref['hg_env'] = _get_hg_env(
342 old_rev=push_ref['old_rev'],
342 old_rev=push_ref['old_rev'],
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
344 repo_path=repo_path)
344 repo_path=repo_path)
345
345
346 extras['hook_type'] = hook_type or 'pre_push'
346 extras['hook_type'] = hook_type or 'pre_push'
347 extras['commit_ids'] = rev_data
347 extras['commit_ids'] = rev_data
348
348
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
350
350
351
351
352 def pre_push_ssh(ui, repo, node=None, **kwargs):
352 def pre_push_ssh(ui, repo, node=None, **kwargs):
353 extras = _extras_from_ui(ui)
353 extras = _extras_from_ui(ui)
354 if extras.get('SSH'):
354 if extras.get('SSH'):
355 return pre_push(ui, repo, node, **kwargs)
355 return pre_push(ui, repo, node, **kwargs)
356
356
357 return 0
357 return 0
358
358
359
359
360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
361 """
361 """
362 Mercurial pre_push hook for SSH
362 Mercurial pre_push hook for SSH
363 """
363 """
364 extras = _extras_from_ui(ui)
364 extras = _extras_from_ui(ui)
365 if extras.get('SSH'):
365 if extras.get('SSH'):
366 permission = extras['SSH_PERMISSIONS']
366 permission = extras['SSH_PERMISSIONS']
367
367
368 if 'repository.write' == permission or 'repository.admin' == permission:
368 if 'repository.write' == permission or 'repository.admin' == permission:
369 return 0
369 return 0
370
370
371 # non-zero ret code
371 # non-zero ret code
372 return 1
372 return 1
373
373
374 return 0
374 return 0
375
375
376
376
377 def post_push(ui, repo, node, **kwargs):
377 def post_push(ui, repo, node, **kwargs):
378 """
378 """
379 Mercurial post_push hook
379 Mercurial post_push hook
380 """
380 """
381 extras = _extras_from_ui(ui)
381 extras = _extras_from_ui(ui)
382
382
383 commit_ids = []
383 commit_ids = []
384 branches = []
384 branches = []
385 bookmarks = []
385 bookmarks = []
386 tags = []
386 tags = []
387 hook_type: str = safe_str(kwargs.get('hooktype'))
387 hook_type: str = safe_str(kwargs.get('hooktype'))
388
388
389 commits, _heads = _rev_range_hash(repo, node)
389 commits, _heads = _rev_range_hash(repo, node)
390 for commit_id, branch in commits:
390 for commit_id, branch in commits:
391 commit_ids.append(commit_id)
391 commit_ids.append(commit_id)
392 if branch not in branches:
392 if branch not in branches:
393 branches.append(branch)
393 branches.append(branch)
394
394
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
396 bookmarks = ui._rc_pushkey_bookmarks
396 bookmarks = ui._rc_pushkey_bookmarks
397
397
398 extras['hook_type'] = hook_type or 'post_push'
398 extras['hook_type'] = hook_type or 'post_push'
399 extras['commit_ids'] = commit_ids
399 extras['commit_ids'] = commit_ids
400
400
401 extras['new_refs'] = {
401 extras['new_refs'] = {
402 'branches': branches,
402 'branches': branches,
403 'bookmarks': bookmarks,
403 'bookmarks': bookmarks,
404 'tags': tags
404 'tags': tags
405 }
405 }
406
406
407 return _call_hook('post_push', extras, HgMessageWriter(ui))
407 return _call_hook('post_push', extras, HgMessageWriter(ui))
408
408
409
409
410 def post_push_ssh(ui, repo, node, **kwargs):
410 def post_push_ssh(ui, repo, node, **kwargs):
411 """
411 """
412 Mercurial post_push hook for SSH
412 Mercurial post_push hook for SSH
413 """
413 """
414 if _extras_from_ui(ui).get('SSH'):
414 if _extras_from_ui(ui).get('SSH'):
415 return post_push(ui, repo, node, **kwargs)
415 return post_push(ui, repo, node, **kwargs)
416 return 0
416 return 0
417
417
418
418
419 def key_push(ui, repo, **kwargs):
419 def key_push(ui, repo, **kwargs):
420 from vcsserver.hgcompat import get_ctx
420 from vcsserver.hgcompat import get_ctx
421
421
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
423 # store new bookmarks in our UI object propagated later to post_push
423 # store new bookmarks in our UI object propagated later to post_push
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
425 return
425 return
426
426
427
427
428 # backward compat
428 # backward compat
429 log_pull_action = post_pull
429 log_pull_action = post_pull
430
430
431 # backward compat
431 # backward compat
432 log_push_action = post_push
432 log_push_action = post_push
433
433
434
434
435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
436 """
436 """
437 Old hook name: keep here for backward compatibility.
437 Old hook name: keep here for backward compatibility.
438
438
439 This is only required when the installed git hooks are not upgraded.
439 This is only required when the installed git hooks are not upgraded.
440 """
440 """
441 pass
441 pass
442
442
443
443
444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
445 """
445 """
446 Old hook name: keep here for backward compatibility.
446 Old hook name: keep here for backward compatibility.
447
447
448 This is only required when the installed git hooks are not upgraded.
448 This is only required when the installed git hooks are not upgraded.
449 """
449 """
450 pass
450 pass
451
451
452
452
453 @dataclasses.dataclass
453 @dataclasses.dataclass
454 class HookResponse:
454 class HookResponse:
455 status: int
455 status: int
456 output: str
456 output: str
457
457
458
458
459 def git_pre_pull(extras) -> HookResponse:
459 def git_pre_pull(extras) -> HookResponse:
460 """
460 """
461 Pre pull hook.
461 Pre pull hook.
462
462
463 :param extras: dictionary containing the keys defined in simplevcs
463 :param extras: dictionary containing the keys defined in simplevcs
464 :type extras: dict
464 :type extras: dict
465
465
466 :return: status code of the hook. 0 for success.
466 :return: status code of the hook. 0 for success.
467 :rtype: int
467 :rtype: int
468 """
468 """
469
469
470 if 'pull' not in extras['hooks']:
470 if 'pull' not in extras['hooks']:
471 return HookResponse(0, '')
471 return HookResponse(0, '')
472
472
473 stdout = io.StringIO()
473 stdout = io.StringIO()
474 try:
474 try:
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
476
476
477 except Exception as error:
477 except Exception as error:
478 log.exception('Failed to call pre_pull hook')
478 log.exception('Failed to call pre_pull hook')
479 status_code = 128
479 status_code = 128
480 stdout.write(f'ERROR: {error}\n')
480 stdout.write(f'ERROR: {error}\n')
481
481
482 return HookResponse(status_code, stdout.getvalue())
482 return HookResponse(status_code, stdout.getvalue())
483
483
484
484
485 def git_post_pull(extras) -> HookResponse:
485 def git_post_pull(extras) -> HookResponse:
486 """
486 """
487 Post pull hook.
487 Post pull hook.
488
488
489 :param extras: dictionary containing the keys defined in simplevcs
489 :param extras: dictionary containing the keys defined in simplevcs
490 :type extras: dict
490 :type extras: dict
491
491
492 :return: status code of the hook. 0 for success.
492 :return: status code of the hook. 0 for success.
493 :rtype: int
493 :rtype: int
494 """
494 """
495 if 'pull' not in extras['hooks']:
495 if 'pull' not in extras['hooks']:
496 return HookResponse(0, '')
496 return HookResponse(0, '')
497
497
498 stdout = io.StringIO()
498 stdout = io.StringIO()
499 try:
499 try:
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
501 except Exception as error:
501 except Exception as error:
502 status = 128
502 status = 128
503 stdout.write(f'ERROR: {error}\n')
503 stdout.write(f'ERROR: {error}\n')
504
504
505 return HookResponse(status, stdout.getvalue())
505 return HookResponse(status, stdout.getvalue())
506
506
507
507
508 def _parse_git_ref_lines(revision_lines):
508 def _parse_git_ref_lines(revision_lines):
509 rev_data = []
509 rev_data = []
510 for revision_line in revision_lines or []:
510 for revision_line in revision_lines or []:
511 old_rev, new_rev, ref = revision_line.strip().split(' ')
511 old_rev, new_rev, ref = revision_line.strip().split(' ')
512 ref_data = ref.split('/', 2)
512 ref_data = ref.split('/', 2)
513 if ref_data[1] in ('tags', 'heads'):
513 if ref_data[1] in ('tags', 'heads'):
514 rev_data.append({
514 rev_data.append({
515 # NOTE(marcink):
515 # NOTE(marcink):
516 # we're unable to tell total_commits for git at this point
516 # we're unable to tell total_commits for git at this point
517 # but we set the variable for consistency with GIT
517 # but we set the variable for consistency with GIT
518 'total_commits': -1,
518 'total_commits': -1,
519 'old_rev': old_rev,
519 'old_rev': old_rev,
520 'new_rev': new_rev,
520 'new_rev': new_rev,
521 'ref': ref,
521 'ref': ref,
522 'type': ref_data[1],
522 'type': ref_data[1],
523 'name': ref_data[2],
523 'name': ref_data[2],
524 })
524 })
525 return rev_data
525 return rev_data
526
526
527
527
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
529 """
529 """
530 Pre push hook.
530 Pre push hook.
531
531
532 :return: status code of the hook. 0 for success.
532 :return: status code of the hook. 0 for success.
533 """
533 """
534 extras = json.loads(env['RC_SCM_DATA'])
534 extras = json.loads(env['RC_SCM_DATA'])
535 rev_data = _parse_git_ref_lines(revision_lines)
535 rev_data = _parse_git_ref_lines(revision_lines)
536 if 'push' not in extras['hooks']:
536 if 'push' not in extras['hooks']:
537 return 0
537 return 0
538 empty_commit_id = '0' * 40
538 empty_commit_id = '0' * 40
539
539
540 detect_force_push = extras.get('detect_force_push')
540 detect_force_push = extras.get('detect_force_push')
541
541
542 for push_ref in rev_data:
542 for push_ref in rev_data:
543 # store our git-env which holds the temp store
543 # store our git-env which holds the temp store
544 push_ref['git_env'] = _get_git_env()
544 push_ref['git_env'] = _get_git_env()
545 push_ref['pruned_sha'] = ''
545 push_ref['pruned_sha'] = ''
546 if not detect_force_push:
546 if not detect_force_push:
547 # don't check for forced-push when we don't need to
547 # don't check for forced-push when we don't need to
548 continue
548 continue
549
549
550 type_ = push_ref['type']
550 type_ = push_ref['type']
551 new_branch = push_ref['old_rev'] == empty_commit_id
551 new_branch = push_ref['old_rev'] == empty_commit_id
552 delete_branch = push_ref['new_rev'] == empty_commit_id
552 delete_branch = push_ref['new_rev'] == empty_commit_id
553 if type_ == 'heads' and not (new_branch or delete_branch):
553 if type_ == 'heads' and not (new_branch or delete_branch):
554 old_rev = push_ref['old_rev']
554 old_rev = push_ref['old_rev']
555 new_rev = push_ref['new_rev']
555 new_rev = push_ref['new_rev']
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
557 stdout, stderr = subprocessio.run_command(
557 stdout, stderr = subprocessio.run_command(
558 cmd, env=os.environ.copy())
558 cmd, env=os.environ.copy())
559 # means we're having some non-reachable objects, this forced push was used
559 # means we're having some non-reachable objects, this forced push was used
560 if stdout:
560 if stdout:
561 push_ref['pruned_sha'] = stdout.splitlines()
561 push_ref['pruned_sha'] = stdout.splitlines()
562
562
563 extras['hook_type'] = 'pre_receive'
563 extras['hook_type'] = 'pre_receive'
564 extras['commit_ids'] = rev_data
564 extras['commit_ids'] = rev_data
565
565
566 stdout = sys.stdout
566 stdout = sys.stdout
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
568
568
569 return status_code
569 return status_code
570
570
571
571
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
573 """
573 """
574 Post push hook.
574 Post push hook.
575
575
576 :return: status code of the hook. 0 for success.
576 :return: status code of the hook. 0 for success.
577 """
577 """
578 extras = json.loads(env['RC_SCM_DATA'])
578 extras = json.loads(env['RC_SCM_DATA'])
579 if 'push' not in extras['hooks']:
579 if 'push' not in extras['hooks']:
580 return 0
580 return 0
581
581
582 rev_data = _parse_git_ref_lines(revision_lines)
582 rev_data = _parse_git_ref_lines(revision_lines)
583
583
584 git_revs = []
584 git_revs = []
585
585
586 # N.B.(skreft): it is ok to just call git, as git before calling a
586 # N.B.(skreft): it is ok to just call git, as git before calling a
587 # subcommand sets the PATH environment variable so that it point to the
587 # subcommand sets the PATH environment variable so that it point to the
588 # correct version of the git executable.
588 # correct version of the git executable.
589 empty_commit_id = '0' * 40
589 empty_commit_id = '0' * 40
590 branches = []
590 branches = []
591 tags = []
591 tags = []
592 for push_ref in rev_data:
592 for push_ref in rev_data:
593 type_ = push_ref['type']
593 type_ = push_ref['type']
594
594
595 if type_ == 'heads':
595 if type_ == 'heads':
596 # starting new branch case
596 # starting new branch case
597 if push_ref['old_rev'] == empty_commit_id:
597 if push_ref['old_rev'] == empty_commit_id:
598 push_ref_name = push_ref['name']
598 push_ref_name = push_ref['name']
599
599
600 if push_ref_name not in branches:
600 if push_ref_name not in branches:
601 branches.append(push_ref_name)
601 branches.append(push_ref_name)
602
602
603 need_head_set = ''
603 need_head_set = ''
604 with Repository(os.getcwd()) as repo:
604 with Repository(os.getcwd()) as repo:
605 try:
605 try:
606 repo.head
606 repo.head
607 except pygit2.GitError:
607 except pygit2.GitError:
608 need_head_set = f'refs/heads/{push_ref_name}'
608 need_head_set = f'refs/heads/{push_ref_name}'
609
609
610 if need_head_set:
610 if need_head_set:
611 repo.set_head(need_head_set)
611 repo.set_head(need_head_set)
612 print(f"Setting default branch to {push_ref_name}")
612 print(f"Setting default branch to {push_ref_name}")
613
613
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
615 stdout, stderr = subprocessio.run_command(
615 stdout, stderr = subprocessio.run_command(
616 cmd, env=os.environ.copy())
616 cmd, env=os.environ.copy())
617 heads = safe_str(stdout)
617 heads = safe_str(stdout)
618 heads = heads.replace(push_ref['ref'], '')
618 heads = heads.replace(push_ref['ref'], '')
619 heads = ' '.join(head for head
619 heads = ' '.join(head for head
620 in heads.splitlines() if head) or '.'
620 in heads.splitlines() if head) or '.'
621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
622 '--pretty=format:%H', '--', push_ref['new_rev'],
622 '--pretty=format:%H', '--', push_ref['new_rev'],
623 '--not', heads]
623 '--not', heads]
624 stdout, stderr = subprocessio.run_command(
624 stdout, stderr = subprocessio.run_command(
625 cmd, env=os.environ.copy())
625 cmd, env=os.environ.copy())
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
627
627
628 # delete branch case
628 # delete branch case
629 elif push_ref['new_rev'] == empty_commit_id:
629 elif push_ref['new_rev'] == empty_commit_id:
630 git_revs.append('delete_branch=>%s' % push_ref['name'])
630 git_revs.append('delete_branch=>%s' % push_ref['name'])
631 else:
631 else:
632 if push_ref['name'] not in branches:
632 if push_ref['name'] not in branches:
633 branches.append(push_ref['name'])
633 branches.append(push_ref['name'])
634
634
635 cmd = [settings.GIT_EXECUTABLE, 'log',
635 cmd = [settings.GIT_EXECUTABLE, 'log',
636 '{old_rev}..{new_rev}'.format(**push_ref),
636 '{old_rev}..{new_rev}'.format(**push_ref),
637 '--reverse', '--pretty=format:%H']
637 '--reverse', '--pretty=format:%H']
638 stdout, stderr = subprocessio.run_command(
638 stdout, stderr = subprocessio.run_command(
639 cmd, env=os.environ.copy())
639 cmd, env=os.environ.copy())
640 # we get bytes from stdout, we need str to be consistent
640 # we get bytes from stdout, we need str to be consistent
641 log_revs = list(map(ascii_str, stdout.splitlines()))
641 log_revs = list(map(ascii_str, stdout.splitlines()))
642 git_revs.extend(log_revs)
642 git_revs.extend(log_revs)
643
643
644 # Pure pygit2 impl. but still 2-3x slower :/
644 # Pure pygit2 impl. but still 2-3x slower :/
645 # results = []
645 # results = []
646 #
646 #
647 # with Repository(os.getcwd()) as repo:
647 # with Repository(os.getcwd()) as repo:
648 # repo_new_rev = repo[push_ref['new_rev']]
648 # repo_new_rev = repo[push_ref['new_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
651 #
651 #
652 # for commit in walker:
652 # for commit in walker:
653 # if commit.id == repo_old_rev.id:
653 # if commit.id == repo_old_rev.id:
654 # break
654 # break
655 # results.append(commit.id.hex)
655 # results.append(commit.id.hex)
656 # # reverse the order, can't use GIT_SORT_REVERSE
656 # # reverse the order, can't use GIT_SORT_REVERSE
657 # log_revs = results[::-1]
657 # log_revs = results[::-1]
658
658
659 elif type_ == 'tags':
659 elif type_ == 'tags':
660 if push_ref['name'] not in tags:
660 if push_ref['name'] not in tags:
661 tags.append(push_ref['name'])
661 tags.append(push_ref['name'])
662 git_revs.append('tag=>%s' % push_ref['name'])
662 git_revs.append('tag=>%s' % push_ref['name'])
663
663
664 extras['hook_type'] = 'post_receive'
664 extras['hook_type'] = 'post_receive'
665 extras['commit_ids'] = git_revs
665 extras['commit_ids'] = git_revs
666 extras['new_refs'] = {
666 extras['new_refs'] = {
667 'branches': branches,
667 'branches': branches,
668 'bookmarks': [],
668 'bookmarks': [],
669 'tags': tags,
669 'tags': tags,
670 }
670 }
671
671
672 stdout = sys.stdout
672 stdout = sys.stdout
673
673
674 if 'repo_size' in extras['hooks']:
674 if 'repo_size' in extras['hooks']:
675 try:
675 try:
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
677 except Exception:
677 except Exception:
678 pass
678 pass
679
679
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
681 return status_code
681 return status_code
682
682
683
683
684 def _get_extras_from_txn_id(path, txn_id):
684 def _get_extras_from_txn_id(path, txn_id):
685 extras = {}
685 extras = {}
686 try:
686 try:
687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
688 '-t', txn_id,
688 '-t', txn_id,
689 '--revprop', path, 'rc-scm-extras']
689 '--revprop', path, 'rc-scm-extras']
690 stdout, stderr = subprocessio.run_command(
690 stdout, stderr = subprocessio.run_command(
691 cmd, env=os.environ.copy())
691 cmd, env=os.environ.copy())
692 extras = json.loads(base64.urlsafe_b64decode(stdout))
692 extras = json.loads(base64.urlsafe_b64decode(stdout))
693 except Exception:
693 except Exception:
694 log.exception('Failed to extract extras info from txn_id')
694 log.exception('Failed to extract extras info from txn_id')
695
695
696 return extras
696 return extras
697
697
698
698
699 def _get_extras_from_commit_id(commit_id, path):
699 def _get_extras_from_commit_id(commit_id, path):
700 extras = {}
700 extras = {}
701 try:
701 try:
702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
703 '-r', commit_id,
703 '-r', commit_id,
704 '--revprop', path, 'rc-scm-extras']
704 '--revprop', path, 'rc-scm-extras']
705 stdout, stderr = subprocessio.run_command(
705 stdout, stderr = subprocessio.run_command(
706 cmd, env=os.environ.copy())
706 cmd, env=os.environ.copy())
707 extras = json.loads(base64.urlsafe_b64decode(stdout))
707 extras = json.loads(base64.urlsafe_b64decode(stdout))
708 except Exception:
708 except Exception:
709 log.exception('Failed to extract extras info from commit_id')
709 log.exception('Failed to extract extras info from commit_id')
710
710
711 return extras
711 return extras
712
712
713
713
714 def svn_pre_commit(repo_path, commit_data, env):
714 def svn_pre_commit(repo_path, commit_data, env):
715 path, txn_id = commit_data
715 path, txn_id = commit_data
716 branches = []
716 branches = []
717 tags = []
717 tags = []
718
718
719 if env.get('RC_SCM_DATA'):
719 if env.get('RC_SCM_DATA'):
720 extras = json.loads(env['RC_SCM_DATA'])
720 extras = json.loads(env['RC_SCM_DATA'])
721 else:
721 else:
722 # fallback method to read from TXN-ID stored data
722 # fallback method to read from TXN-ID stored data
723 extras = _get_extras_from_txn_id(path, txn_id)
723 extras = _get_extras_from_txn_id(path, txn_id)
724 if not extras:
724 if not extras:
725 return 0
725 return 0
726
726
727 extras['hook_type'] = 'pre_commit'
727 extras['hook_type'] = 'pre_commit'
728 extras['commit_ids'] = [txn_id]
728 extras['commit_ids'] = [txn_id]
729 extras['txn_id'] = txn_id
729 extras['txn_id'] = txn_id
730 extras['new_refs'] = {
730 extras['new_refs'] = {
731 'total_commits': 1,
731 'total_commits': 1,
732 'branches': branches,
732 'branches': branches,
733 'bookmarks': [],
733 'bookmarks': [],
734 'tags': tags,
734 'tags': tags,
735 }
735 }
736
736
737 return _call_hook('pre_push', extras, SvnMessageWriter())
737 return _call_hook('pre_push', extras, SvnMessageWriter())
738
738
739
739
740 def svn_post_commit(repo_path, commit_data, env):
740 def svn_post_commit(repo_path, commit_data, env):
741 """
741 """
742 commit_data is path, rev, txn_id
742 commit_data is path, rev, txn_id
743 """
743 """
744 if len(commit_data) == 3:
744 if len(commit_data) == 3:
745 path, commit_id, txn_id = commit_data
745 path, commit_id, txn_id = commit_data
746 elif len(commit_data) == 2:
746 elif len(commit_data) == 2:
747 log.error('Failed to extract txn_id from commit_data using legacy method. '
747 log.error('Failed to extract txn_id from commit_data using legacy method. '
748 'Some functionality might be limited')
748 'Some functionality might be limited')
749 path, commit_id = commit_data
749 path, commit_id = commit_data
750 txn_id = None
750 txn_id = None
751
751
752 branches = []
752 branches = []
753 tags = []
753 tags = []
754
754
755 if env.get('RC_SCM_DATA'):
755 if env.get('RC_SCM_DATA'):
756 extras = json.loads(env['RC_SCM_DATA'])
756 extras = json.loads(env['RC_SCM_DATA'])
757 else:
757 else:
758 # fallback method to read from TXN-ID stored data
758 # fallback method to read from TXN-ID stored data
759 extras = _get_extras_from_commit_id(commit_id, path)
759 extras = _get_extras_from_commit_id(commit_id, path)
760 if not extras:
760 if not extras:
761 return 0
761 return 0
762
762
763 extras['hook_type'] = 'post_commit'
763 extras['hook_type'] = 'post_commit'
764 extras['commit_ids'] = [commit_id]
764 extras['commit_ids'] = [commit_id]
765 extras['txn_id'] = txn_id
765 extras['txn_id'] = txn_id
766 extras['new_refs'] = {
766 extras['new_refs'] = {
767 'branches': branches,
767 'branches': branches,
768 'bookmarks': [],
768 'bookmarks': [],
769 'tags': tags,
769 'tags': tags,
770 'total_commits': 1,
770 'total_commits': 1,
771 }
771 }
772
772
773 if 'repo_size' in extras['hooks']:
773 if 'repo_size' in extras['hooks']:
774 try:
774 try:
775 _call_hook('repo_size', extras, SvnMessageWriter())
775 _call_hook('repo_size', extras, SvnMessageWriter())
776 except Exception:
776 except Exception:
777 pass
777 pass
778
778
779 return _call_hook('post_push', extras, SvnMessageWriter())
779 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,777 +1,777 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import time
24 import time
25 import wsgiref.util
25 import wsgiref.util
26 import traceback
26 import traceback
27 import tempfile
27 import tempfile
28 import psutil
28 import psutil
29
29
30 from itertools import chain
30 from itertools import chain
31
31
32 import msgpack
32 import msgpack
33 import configparser
33 import configparser
34
34
35 from pyramid.config import Configurator
35 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
36 from pyramid.wsgi import wsgiapp
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40 from vcsserver.lib.rc_json import json
40 from vcsserver.lib.rc_json import json
41 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.config.settings_maker import SettingsMaker
42 from vcsserver.str_utils import safe_int
42 from vcsserver.str_utils import safe_int
43 from vcsserver.lib.statsd_client import StatsdClient
43 from vcsserver.lib.statsd_client import StatsdClient
44 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
44 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
48 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
49 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
49 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
50
50
51 try:
51 try:
52 locale.setlocale(locale.LC_ALL, '')
52 locale.setlocale(locale.LC_ALL, '')
53 except locale.Error as e:
53 except locale.Error as e:
54 log.error(
54 log.error(
55 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
55 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
56 os.environ['LC_ALL'] = 'C'
56 os.environ['LC_ALL'] = 'C'
57
57
58
58
59 import vcsserver
59 import vcsserver
60 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
60 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
61 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
61 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
62 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
62 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
63 from vcsserver.echo_stub.echo_app import EchoApp
63 from vcsserver.echo_stub.echo_app import EchoApp
64 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
64 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
65 from vcsserver.lib.exc_tracking import store_exception
65 from vcsserver.lib.exc_tracking import store_exception
66 from vcsserver.server import VcsServer
66 from vcsserver.server import VcsServer
67
67
68 strict_vcs = True
68 strict_vcs = True
69
69
70 git_import_err = None
70 git_import_err = None
71 try:
71 try:
72 from vcsserver.remote.git import GitFactory, GitRemote
72 from vcsserver.remote.git import GitFactory, GitRemote
73 except ImportError as e:
73 except ImportError as e:
74 GitFactory = None
74 GitFactory = None
75 GitRemote = None
75 GitRemote = None
76 git_import_err = e
76 git_import_err = e
77 if strict_vcs:
77 if strict_vcs:
78 raise
78 raise
79
79
80
80
81 hg_import_err = None
81 hg_import_err = None
82 try:
82 try:
83 from vcsserver.remote.hg import MercurialFactory, HgRemote
83 from vcsserver.remote.hg import MercurialFactory, HgRemote
84 except ImportError as e:
84 except ImportError as e:
85 MercurialFactory = None
85 MercurialFactory = None
86 HgRemote = None
86 HgRemote = None
87 hg_import_err = e
87 hg_import_err = e
88 if strict_vcs:
88 if strict_vcs:
89 raise
89 raise
90
90
91
91
92 svn_import_err = None
92 svn_import_err = None
93 try:
93 try:
94 from vcsserver.remote.svn import SubversionFactory, SvnRemote
94 from vcsserver.remote.svn import SubversionFactory, SvnRemote
95 except ImportError as e:
95 except ImportError as e:
96 SubversionFactory = None
96 SubversionFactory = None
97 SvnRemote = None
97 SvnRemote = None
98 svn_import_err = e
98 svn_import_err = e
99 if strict_vcs:
99 if strict_vcs:
100 raise
100 raise
101
101
102
102
103 def _is_request_chunked(environ):
103 def _is_request_chunked(environ):
104 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
105 return stream
105 return stream
106
106
107
107
108 def log_max_fd():
108 def log_max_fd():
109 try:
109 try:
110 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
110 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
111 log.info('Max file descriptors value: %s', maxfd)
111 log.info('Max file descriptors value: %s', maxfd)
112 except Exception:
112 except Exception:
113 pass
113 pass
114
114
115
115
116 class VCS(object):
116 class VCS(object):
117 def __init__(self, locale_conf=None, cache_config=None):
117 def __init__(self, locale_conf=None, cache_config=None):
118 self.locale = locale_conf
118 self.locale = locale_conf
119 self.cache_config = cache_config
119 self.cache_config = cache_config
120 self._configure_locale()
120 self._configure_locale()
121
121
122 log_max_fd()
122 log_max_fd()
123
123
124 if GitFactory and GitRemote:
124 if GitFactory and GitRemote:
125 git_factory = GitFactory()
125 git_factory = GitFactory()
126 self._git_remote = GitRemote(git_factory)
126 self._git_remote = GitRemote(git_factory)
127 else:
127 else:
128 log.error("Git client import failed: %s", git_import_err)
128 log.error("Git client import failed: %s", git_import_err)
129
129
130 if MercurialFactory and HgRemote:
130 if MercurialFactory and HgRemote:
131 hg_factory = MercurialFactory()
131 hg_factory = MercurialFactory()
132 self._hg_remote = HgRemote(hg_factory)
132 self._hg_remote = HgRemote(hg_factory)
133 else:
133 else:
134 log.error("Mercurial client import failed: %s", hg_import_err)
134 log.error("Mercurial client import failed: %s", hg_import_err)
135
135
136 if SubversionFactory and SvnRemote:
136 if SubversionFactory and SvnRemote:
137 svn_factory = SubversionFactory()
137 svn_factory = SubversionFactory()
138
138
139 # hg factory is used for svn url validation
139 # hg factory is used for svn url validation
140 hg_factory = MercurialFactory()
140 hg_factory = MercurialFactory()
141 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 else:
142 else:
143 log.error("Subversion client import failed: %s", svn_import_err)
143 log.error("Subversion client import failed: %s", svn_import_err)
144
144
145 self._vcsserver = VcsServer()
145 self._vcsserver = VcsServer()
146
146
147 def _configure_locale(self):
147 def _configure_locale(self):
148 if self.locale:
148 if self.locale:
149 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 else:
150 else:
151 log.info('Configuring locale subsystem based on environment variables')
151 log.info('Configuring locale subsystem based on environment variables')
152 try:
152 try:
153 # If self.locale is the empty string, then the locale
153 # If self.locale is the empty string, then the locale
154 # module will use the environment variables. See the
154 # module will use the environment variables. See the
155 # documentation of the package `locale`.
155 # documentation of the package `locale`.
156 locale.setlocale(locale.LC_ALL, self.locale)
156 locale.setlocale(locale.LC_ALL, self.locale)
157
157
158 language_code, encoding = locale.getlocale()
158 language_code, encoding = locale.getlocale()
159 log.info(
159 log.info(
160 'Locale set to language code "%s" with encoding "%s".',
160 'Locale set to language code "%s" with encoding "%s".',
161 language_code, encoding)
161 language_code, encoding)
162 except locale.Error:
162 except locale.Error:
163 log.exception('Cannot set locale, not configuring the locale system')
163 log.exception('Cannot set locale, not configuring the locale system')
164
164
165
165
166 class WsgiProxy(object):
166 class WsgiProxy(object):
167 def __init__(self, wsgi):
167 def __init__(self, wsgi):
168 self.wsgi = wsgi
168 self.wsgi = wsgi
169
169
170 def __call__(self, environ, start_response):
170 def __call__(self, environ, start_response):
171 input_data = environ['wsgi.input'].read()
171 input_data = environ['wsgi.input'].read()
172 input_data = msgpack.unpackb(input_data)
172 input_data = msgpack.unpackb(input_data)
173
173
174 error = None
174 error = None
175 try:
175 try:
176 data, status, headers = self.wsgi.handle(
176 data, status, headers = self.wsgi.handle(
177 input_data['environment'], input_data['input_data'],
177 input_data['environment'], input_data['input_data'],
178 *input_data['args'], **input_data['kwargs'])
178 *input_data['args'], **input_data['kwargs'])
179 except Exception as e:
179 except Exception as e:
180 data, status, headers = [], None, None
180 data, status, headers = [], None, None
181 error = {
181 error = {
182 'message': str(e),
182 'message': str(e),
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 }
184 }
185
185
186 start_response(200, {})
186 start_response(200, {})
187 return self._iterator(error, status, headers, data)
187 return self._iterator(error, status, headers, data)
188
188
189 def _iterator(self, error, status, headers, data):
189 def _iterator(self, error, status, headers, data):
190 initial_data = [
190 initial_data = [
191 error,
191 error,
192 status,
192 status,
193 headers,
193 headers,
194 ]
194 ]
195
195
196 for d in chain(initial_data, data):
196 for d in chain(initial_data, data):
197 yield msgpack.packb(d)
197 yield msgpack.packb(d)
198
198
199
199
200 def not_found(request):
200 def not_found(request):
201 return {'status': '404 NOT FOUND'}
201 return {'status': '404 NOT FOUND'}
202
202
203
203
204 class VCSViewPredicate(object):
204 class VCSViewPredicate(object):
205 def __init__(self, val, config):
205 def __init__(self, val, config):
206 self.remotes = val
206 self.remotes = val
207
207
208 def text(self):
208 def text(self):
209 return f'vcs view method = {list(self.remotes.keys())}'
209 return f'vcs view method = {list(self.remotes.keys())}'
210
210
211 phash = text
211 phash = text
212
212
213 def __call__(self, context, request):
213 def __call__(self, context, request):
214 """
214 """
215 View predicate that returns true if given backend is supported by
215 View predicate that returns true if given backend is supported by
216 defined remotes.
216 defined remotes.
217 """
217 """
218 backend = request.matchdict.get('backend')
218 backend = request.matchdict.get('backend')
219 return backend in self.remotes
219 return backend in self.remotes
220
220
221
221
222 class HTTPApplication(object):
222 class HTTPApplication(object):
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224
224
225 remote_wsgi = remote_wsgi
225 remote_wsgi = remote_wsgi
226 _use_echo_app = False
226 _use_echo_app = False
227
227
228 def __init__(self, settings=None, global_config=None):
228 def __init__(self, settings=None, global_config=None):
229
229
230 self.config = Configurator(settings=settings)
230 self.config = Configurator(settings=settings)
231 # Init our statsd at very start
231 # Init our statsd at very start
232 self.config.registry.statsd = StatsdClient.statsd
232 self.config.registry.statsd = StatsdClient.statsd
233 self.config.registry.vcs_call_context = {}
233 self.config.registry.vcs_call_context = {}
234
234
235 self.global_config = global_config
235 self.global_config = global_config
236 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache')
237 self.config.include('vcsserver.lib.rc_cache.archive_cache')
237 self.config.include('vcsserver.lib.rc_cache.archive_cache')
238
238
239 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
240 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
241 self._remotes = {
241 self._remotes = {
242 'hg': vcs._hg_remote,
242 'hg': vcs._hg_remote,
243 'git': vcs._git_remote,
243 'git': vcs._git_remote,
244 'svn': vcs._svn_remote,
244 'svn': vcs._svn_remote,
245 'server': vcs._vcsserver,
245 'server': vcs._vcsserver,
246 }
246 }
247 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
248 self._use_echo_app = True
248 self._use_echo_app = True
249 log.warning("Using EchoApp for VCS operations.")
249 log.warning("Using EchoApp for VCS operations.")
250 self.remote_wsgi = remote_wsgi_stub
250 self.remote_wsgi = remote_wsgi_stub
251
251
252 self._configure_settings(global_config, settings)
252 self._configure_settings(global_config, settings)
253
253
254 self._configure()
254 self._configure()
255
255
256 def _configure_settings(self, global_config, app_settings):
256 def _configure_settings(self, global_config, app_settings):
257 """
257 """
258 Configure the settings module.
258 Configure the settings module.
259 """
259 """
260 settings_merged = global_config.copy()
260 settings_merged = global_config.copy()
261 settings_merged.update(app_settings)
261 settings_merged.update(app_settings)
262
262
263 git_path = app_settings.get('git_path', None)
263 git_path = app_settings.get('git_path', None)
264 if git_path:
264 if git_path:
265 settings.GIT_EXECUTABLE = git_path
265 settings.GIT_EXECUTABLE = git_path
266 binary_dir = app_settings.get('core.binary_dir', None)
266 binary_dir = app_settings.get('core.binary_dir', None)
267 if binary_dir:
267 if binary_dir:
268 settings.BINARY_DIR = binary_dir
268 settings.BINARY_DIR = binary_dir
269
269
270 # Store the settings to make them available to other modules.
270 # Store the settings to make them available to other modules.
271 vcsserver.PYRAMID_SETTINGS = settings_merged
271 vcsserver.PYRAMID_SETTINGS = settings_merged
272 vcsserver.CONFIG = settings_merged
272 vcsserver.CONFIG = settings_merged
273
273
274 def _configure(self):
274 def _configure(self):
275 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
275 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
276
276
277 self.config.add_route('service', '/_service')
277 self.config.add_route('service', '/_service')
278 self.config.add_route('status', '/status')
278 self.config.add_route('status', '/status')
279 self.config.add_route('hg_proxy', '/proxy/hg')
279 self.config.add_route('hg_proxy', '/proxy/hg')
280 self.config.add_route('git_proxy', '/proxy/git')
280 self.config.add_route('git_proxy', '/proxy/git')
281
281
282 # rpc methods
282 # rpc methods
283 self.config.add_route('vcs', '/{backend}')
283 self.config.add_route('vcs', '/{backend}')
284
284
285 # streaming rpc remote methods
285 # streaming rpc remote methods
286 self.config.add_route('vcs_stream', '/{backend}/stream')
286 self.config.add_route('vcs_stream', '/{backend}/stream')
287
287
288 # vcs operations clone/push as streaming
288 # vcs operations clone/push as streaming
289 self.config.add_route('stream_git', '/stream/git/*repo_name')
289 self.config.add_route('stream_git', '/stream/git/*repo_name')
290 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
290 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
291
291
292 self.config.add_view(self.status_view, route_name='status', renderer='json')
292 self.config.add_view(self.status_view, route_name='status', renderer='json')
293 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
293 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
294
294
295 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
295 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
296 self.config.add_view(self.git_proxy(), route_name='git_proxy')
296 self.config.add_view(self.git_proxy(), route_name='git_proxy')
297 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
297 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
298 vcs_view=self._remotes)
298 vcs_view=self._remotes)
299 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
299 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
300 vcs_view=self._remotes)
300 vcs_view=self._remotes)
301
301
302 self.config.add_view(self.hg_stream(), route_name='stream_hg')
302 self.config.add_view(self.hg_stream(), route_name='stream_hg')
303 self.config.add_view(self.git_stream(), route_name='stream_git')
303 self.config.add_view(self.git_stream(), route_name='stream_git')
304
304
305 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
305 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
306
306
307 self.config.add_notfound_view(not_found, renderer='json')
307 self.config.add_notfound_view(not_found, renderer='json')
308
308
309 self.config.add_view(self.handle_vcs_exception, context=Exception)
309 self.config.add_view(self.handle_vcs_exception, context=Exception)
310
310
311 self.config.add_tween(
311 self.config.add_tween(
312 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
312 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
313 )
313 )
314 self.config.add_request_method(
314 self.config.add_request_method(
315 'vcsserver.lib.request_counter.get_request_counter',
315 'vcsserver.lib.request_counter.get_request_counter',
316 'request_count')
316 'request_count')
317
317
318 def wsgi_app(self):
318 def wsgi_app(self):
319 return self.config.make_wsgi_app()
319 return self.config.make_wsgi_app()
320
320
321 def _vcs_view_params(self, request):
321 def _vcs_view_params(self, request):
322 remote = self._remotes[request.matchdict['backend']]
322 remote = self._remotes[request.matchdict['backend']]
323 payload = msgpack.unpackb(request.body, use_list=True)
323 payload = msgpack.unpackb(request.body, use_list=True)
324
324
325 method = payload.get('method')
325 method = payload.get('method')
326 params = payload['params']
326 params = payload['params']
327 wire = params.get('wire')
327 wire = params.get('wire')
328 args = params.get('args')
328 args = params.get('args')
329 kwargs = params.get('kwargs')
329 kwargs = params.get('kwargs')
330 context_uid = None
330 context_uid = None
331
331
332 request.registry.vcs_call_context = {
332 request.registry.vcs_call_context = {
333 'method': method,
333 'method': method,
334 'repo_name': payload.get('_repo_name'),
334 'repo_name': payload.get('_repo_name'),
335 }
335 }
336
336
337 if wire:
337 if wire:
338 try:
338 try:
339 wire['context'] = context_uid = uuid.UUID(wire['context'])
339 wire['context'] = context_uid = uuid.UUID(wire['context'])
340 except KeyError:
340 except KeyError:
341 pass
341 pass
342 args.insert(0, wire)
342 args.insert(0, wire)
343 repo_state_uid = wire.get('repo_state_uid') if wire else None
343 repo_state_uid = wire.get('repo_state_uid') if wire else None
344
344
345 # NOTE(marcink): trading complexity for slight performance
345 # NOTE(marcink): trading complexity for slight performance
346 if log.isEnabledFor(logging.DEBUG):
346 if log.isEnabledFor(logging.DEBUG):
347 # also we SKIP printing out any of those methods args since they maybe excessive
347 # also we SKIP printing out any of those methods args since they maybe excessive
348 just_args_methods = {
348 just_args_methods = {
349 'commitctx': ('content', 'removed', 'updated'),
349 'commitctx': ('content', 'removed', 'updated'),
350 'commit': ('content', 'removed', 'updated')
350 'commit': ('content', 'removed', 'updated')
351 }
351 }
352 if method in just_args_methods:
352 if method in just_args_methods:
353 skip_args = just_args_methods[method]
353 skip_args = just_args_methods[method]
354 call_args = ''
354 call_args = ''
355 call_kwargs = {}
355 call_kwargs = {}
356 for k in kwargs:
356 for k in kwargs:
357 if k in skip_args:
357 if k in skip_args:
358 # replace our skip key with dummy
358 # replace our skip key with dummy
359 call_kwargs[k] = f'RemovedParam({k})'
359 call_kwargs[k] = f'RemovedParam({k})'
360 else:
360 else:
361 call_kwargs[k] = kwargs[k]
361 call_kwargs[k] = kwargs[k]
362 else:
362 else:
363 call_args = args[1:]
363 call_args = args[1:]
364 call_kwargs = kwargs
364 call_kwargs = kwargs
365
365
366 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
366 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
367 method, call_args, call_kwargs, context_uid, repo_state_uid)
367 method, call_args, call_kwargs, context_uid, repo_state_uid)
368
368
369 statsd = request.registry.statsd
369 statsd = request.registry.statsd
370 if statsd:
370 if statsd:
371 statsd.incr(
371 statsd.incr(
372 'vcsserver_method_total', tags=[
372 'vcsserver_method_total', tags=[
373 f"method:{method}",
373 f"method:{method}",
374 ])
374 ])
375 return payload, remote, method, args, kwargs
375 return payload, remote, method, args, kwargs
376
376
377 def vcs_view(self, request):
377 def vcs_view(self, request):
378
378
379 payload, remote, method, args, kwargs = self._vcs_view_params(request)
379 payload, remote, method, args, kwargs = self._vcs_view_params(request)
380 payload_id = payload.get('id')
380 payload_id = payload.get('id')
381
381
382 try:
382 try:
383 resp = getattr(remote, method)(*args, **kwargs)
383 resp = getattr(remote, method)(*args, **kwargs)
384 except Exception as e:
384 except Exception as e:
385 exc_info = list(sys.exc_info())
385 exc_info = list(sys.exc_info())
386 exc_type, exc_value, exc_traceback = exc_info
386 exc_type, exc_value, exc_traceback = exc_info
387
387
388 org_exc = getattr(e, '_org_exc', None)
388 org_exc = getattr(e, '_org_exc', None)
389 org_exc_name = None
389 org_exc_name = None
390 org_exc_tb = ''
390 org_exc_tb = ''
391 if org_exc:
391 if org_exc:
392 org_exc_name = org_exc.__class__.__name__
392 org_exc_name = org_exc.__class__.__name__
393 org_exc_tb = getattr(e, '_org_exc_tb', '')
393 org_exc_tb = getattr(e, '_org_exc_tb', '')
394 # replace our "faked" exception with our org
394 # replace our "faked" exception with our org
395 exc_info[0] = org_exc.__class__
395 exc_info[0] = org_exc.__class__
396 exc_info[1] = org_exc
396 exc_info[1] = org_exc
397
397
398 should_store_exc = True
398 should_store_exc = True
399 if org_exc:
399 if org_exc:
400 def get_exc_fqn(_exc_obj):
400 def get_exc_fqn(_exc_obj):
401 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
401 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
402 return module_name + '.' + org_exc_name
402 return module_name + '.' + org_exc_name
403
403
404 exc_fqn = get_exc_fqn(org_exc)
404 exc_fqn = get_exc_fqn(org_exc)
405
405
406 if exc_fqn in ['mercurial.error.RepoLookupError',
406 if exc_fqn in ['mercurial.error.RepoLookupError',
407 'vcsserver.exceptions.RefNotFoundException']:
407 'vcsserver.exceptions.RefNotFoundException']:
408 should_store_exc = False
408 should_store_exc = False
409
409
410 if should_store_exc:
410 if should_store_exc:
411 store_exception(id(exc_info), exc_info, request_path=request.path)
411 store_exception(id(exc_info), exc_info, request_path=request.path)
412
412
413 tb_info = ''.join(
413 tb_info = ''.join(
414 traceback.format_exception(exc_type, exc_value, exc_traceback))
414 traceback.format_exception(exc_type, exc_value, exc_traceback))
415
415
416 type_ = e.__class__.__name__
416 type_ = e.__class__.__name__
417 if type_ not in self.ALLOWED_EXCEPTIONS:
417 if type_ not in self.ALLOWED_EXCEPTIONS:
418 type_ = None
418 type_ = None
419
419
420 resp = {
420 resp = {
421 'id': payload_id,
421 'id': payload_id,
422 'error': {
422 'error': {
423 'message': str(e),
423 'message': str(e),
424 'traceback': tb_info,
424 'traceback': tb_info,
425 'org_exc': org_exc_name,
425 'org_exc': org_exc_name,
426 'org_exc_tb': org_exc_tb,
426 'org_exc_tb': org_exc_tb,
427 'type': type_
427 'type': type_
428 }
428 }
429 }
429 }
430
430
431 try:
431 try:
432 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
432 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
433 except AttributeError:
433 except AttributeError:
434 pass
434 pass
435 else:
435 else:
436 resp = {
436 resp = {
437 'id': payload_id,
437 'id': payload_id,
438 'result': resp
438 'result': resp
439 }
439 }
440 log.debug('Serving data for method %s', method)
440 log.debug('Serving data for method %s', method)
441 return resp
441 return resp
442
442
443 def vcs_stream_view(self, request):
443 def vcs_stream_view(self, request):
444 payload, remote, method, args, kwargs = self._vcs_view_params(request)
444 payload, remote, method, args, kwargs = self._vcs_view_params(request)
445 # this method has a stream: marker we remove it here
445 # this method has a stream: marker we remove it here
446 method = method.split('stream:')[-1]
446 method = method.split('stream:')[-1]
447 chunk_size = safe_int(payload.get('chunk_size')) or 4096
447 chunk_size = safe_int(payload.get('chunk_size')) or 4096
448
448
449 try:
449 try:
450 resp = getattr(remote, method)(*args, **kwargs)
450 resp = getattr(remote, method)(*args, **kwargs)
451 except Exception as e:
451 except Exception as e:
452 raise
452 raise
453
453
454 def get_chunked_data(method_resp):
454 def get_chunked_data(method_resp):
455 stream = io.BytesIO(method_resp)
455 stream = io.BytesIO(method_resp)
456 while 1:
456 while 1:
457 chunk = stream.read(chunk_size)
457 chunk = stream.read(chunk_size)
458 if not chunk:
458 if not chunk:
459 break
459 break
460 yield chunk
460 yield chunk
461
461
462 response = Response(app_iter=get_chunked_data(resp))
462 response = Response(app_iter=get_chunked_data(resp))
463 response.content_type = 'application/octet-stream'
463 response.content_type = 'application/octet-stream'
464
464
465 return response
465 return response
466
466
467 def status_view(self, request):
467 def status_view(self, request):
468 import vcsserver
468 import vcsserver
469 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
469 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
470 'pid': os.getpid()}
470 'pid': os.getpid()}
471
471
472 def service_view(self, request):
472 def service_view(self, request):
473 import vcsserver
473 import vcsserver
474
474
475 payload = msgpack.unpackb(request.body, use_list=True)
475 payload = msgpack.unpackb(request.body, use_list=True)
476 server_config, app_config = {}, {}
476 server_config, app_config = {}, {}
477
477
478 try:
478 try:
479 path = self.global_config['__file__']
479 path = self.global_config['__file__']
480 config = configparser.RawConfigParser()
480 config = configparser.RawConfigParser()
481
481
482 config.read(path)
482 config.read(path)
483
483
484 if config.has_section('server:main'):
484 if config.has_section('server:main'):
485 server_config = dict(config.items('server:main'))
485 server_config = dict(config.items('server:main'))
486 if config.has_section('app:main'):
486 if config.has_section('app:main'):
487 app_config = dict(config.items('app:main'))
487 app_config = dict(config.items('app:main'))
488
488
489 except Exception:
489 except Exception:
490 log.exception('Failed to read .ini file for display')
490 log.exception('Failed to read .ini file for display')
491
491
492 environ = list(os.environ.items())
492 environ = list(os.environ.items())
493
493
494 resp = {
494 resp = {
495 'id': payload.get('id'),
495 'id': payload.get('id'),
496 'result': dict(
496 'result': dict(
497 version=vcsserver.__version__,
497 version=vcsserver.__version__,
498 config=server_config,
498 config=server_config,
499 app_config=app_config,
499 app_config=app_config,
500 environ=environ,
500 environ=environ,
501 payload=payload,
501 payload=payload,
502 )
502 )
503 }
503 }
504 return resp
504 return resp
505
505
506 def _msgpack_renderer_factory(self, info):
506 def _msgpack_renderer_factory(self, info):
507
507
508 def _render(value, system):
508 def _render(value, system):
509 bin_type = False
509 bin_type = False
510 res = value.get('result')
510 res = value.get('result')
511 if isinstance(res, BytesEnvelope):
511 if isinstance(res, BytesEnvelope):
512 log.debug('Result is wrapped in BytesEnvelope type')
512 log.debug('Result is wrapped in BytesEnvelope type')
513 bin_type = True
513 bin_type = True
514 elif isinstance(res, BinaryEnvelope):
514 elif isinstance(res, BinaryEnvelope):
515 log.debug('Result is wrapped in BinaryEnvelope type')
515 log.debug('Result is wrapped in BinaryEnvelope type')
516 value['result'] = res.val
516 value['result'] = res.val
517 bin_type = True
517 bin_type = True
518
518
519 request = system.get('request')
519 request = system.get('request')
520 if request is not None:
520 if request is not None:
521 response = request.response
521 response = request.response
522 ct = response.content_type
522 ct = response.content_type
523 if ct == response.default_content_type:
523 if ct == response.default_content_type:
524 response.content_type = 'application/x-msgpack'
524 response.content_type = 'application/x-msgpack'
525 if bin_type:
525 if bin_type:
526 response.content_type = 'application/x-msgpack-bin'
526 response.content_type = 'application/x-msgpack-bin'
527
527
528 return msgpack.packb(value, use_bin_type=bin_type)
528 return msgpack.packb(value, use_bin_type=bin_type)
529 return _render
529 return _render
530
530
531 def set_env_from_config(self, environ, config):
531 def set_env_from_config(self, environ, config):
532 dict_conf = {}
532 dict_conf = {}
533 try:
533 try:
534 for elem in config:
534 for elem in config:
535 if elem[0] == 'rhodecode':
535 if elem[0] == 'rhodecode':
536 dict_conf = json.loads(elem[2])
536 dict_conf = json.loads(elem[2])
537 break
537 break
538 except Exception:
538 except Exception:
539 log.exception('Failed to fetch SCM CONFIG')
539 log.exception('Failed to fetch SCM CONFIG')
540 return
540 return
541
541
542 username = dict_conf.get('username')
542 username = dict_conf.get('username')
543 if username:
543 if username:
544 environ['REMOTE_USER'] = username
544 environ['REMOTE_USER'] = username
545 # mercurial specific, some extension api rely on this
545 # mercurial specific, some extension api rely on this
546 environ['HGUSER'] = username
546 environ['HGUSER'] = username
547
547
548 ip = dict_conf.get('ip')
548 ip = dict_conf.get('ip')
549 if ip:
549 if ip:
550 environ['REMOTE_HOST'] = ip
550 environ['REMOTE_HOST'] = ip
551
551
552 if _is_request_chunked(environ):
552 if _is_request_chunked(environ):
553 # set the compatibility flag for webob
553 # set the compatibility flag for webob
554 environ['wsgi.input_terminated'] = True
554 environ['wsgi.input_terminated'] = True
555
555
556 def hg_proxy(self):
556 def hg_proxy(self):
557 @wsgiapp
557 @wsgiapp
558 def _hg_proxy(environ, start_response):
558 def _hg_proxy(environ, start_response):
559 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
559 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
560 return app(environ, start_response)
560 return app(environ, start_response)
561 return _hg_proxy
561 return _hg_proxy
562
562
563 def git_proxy(self):
563 def git_proxy(self):
564 @wsgiapp
564 @wsgiapp
565 def _git_proxy(environ, start_response):
565 def _git_proxy(environ, start_response):
566 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
566 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
567 return app(environ, start_response)
567 return app(environ, start_response)
568 return _git_proxy
568 return _git_proxy
569
569
570 def hg_stream(self):
570 def hg_stream(self):
571 if self._use_echo_app:
571 if self._use_echo_app:
572 @wsgiapp
572 @wsgiapp
573 def _hg_stream(environ, start_response):
573 def _hg_stream(environ, start_response):
574 app = EchoApp('fake_path', 'fake_name', None)
574 app = EchoApp('fake_path', 'fake_name', None)
575 return app(environ, start_response)
575 return app(environ, start_response)
576 return _hg_stream
576 return _hg_stream
577 else:
577 else:
578 @wsgiapp
578 @wsgiapp
579 def _hg_stream(environ, start_response):
579 def _hg_stream(environ, start_response):
580 log.debug('http-app: handling hg stream')
580 log.debug('http-app: handling hg stream')
581 call_context = get_headers_call_context(environ)
581 call_context = get_headers_call_context(environ)
582
582
583 repo_path = call_context['repo_path']
583 repo_path = call_context['repo_path']
584 repo_name = call_context['repo_name']
584 repo_name = call_context['repo_name']
585 config = call_context['repo_config']
585 config = call_context['repo_config']
586
586
587 app = scm_app.create_hg_wsgi_app(
587 app = scm_app.create_hg_wsgi_app(
588 repo_path, repo_name, config)
588 repo_path, repo_name, config)
589
589
590 # Consistent path information for hgweb
590 # Consistent path information for hgweb
591 environ['PATH_INFO'] = call_context['path_info']
591 environ['PATH_INFO'] = call_context['path_info']
592 environ['REPO_NAME'] = repo_name
592 environ['REPO_NAME'] = repo_name
593 self.set_env_from_config(environ, config)
593 self.set_env_from_config(environ, config)
594
594
595 log.debug('http-app: starting app handler '
595 log.debug('http-app: starting app handler '
596 'with %s and process request', app)
596 'with %s and process request', app)
597 return app(environ, ResponseFilter(start_response))
597 return app(environ, ResponseFilter(start_response))
598 return _hg_stream
598 return _hg_stream
599
599
600 def git_stream(self):
600 def git_stream(self):
601 if self._use_echo_app:
601 if self._use_echo_app:
602 @wsgiapp
602 @wsgiapp
603 def _git_stream(environ, start_response):
603 def _git_stream(environ, start_response):
604 app = EchoApp('fake_path', 'fake_name', None)
604 app = EchoApp('fake_path', 'fake_name', None)
605 return app(environ, start_response)
605 return app(environ, start_response)
606 return _git_stream
606 return _git_stream
607 else:
607 else:
608 @wsgiapp
608 @wsgiapp
609 def _git_stream(environ, start_response):
609 def _git_stream(environ, start_response):
610 log.debug('http-app: handling git stream')
610 log.debug('http-app: handling git stream')
611
611
612 call_context = get_headers_call_context(environ)
612 call_context = get_headers_call_context(environ)
613
613
614 repo_path = call_context['repo_path']
614 repo_path = call_context['repo_path']
615 repo_name = call_context['repo_name']
615 repo_name = call_context['repo_name']
616 config = call_context['repo_config']
616 config = call_context['repo_config']
617
617
618 environ['PATH_INFO'] = call_context['path_info']
618 environ['PATH_INFO'] = call_context['path_info']
619 self.set_env_from_config(environ, config)
619 self.set_env_from_config(environ, config)
620
620
621 content_type = environ.get('CONTENT_TYPE', '')
621 content_type = environ.get('CONTENT_TYPE', '')
622
622
623 path = environ['PATH_INFO']
623 path = environ['PATH_INFO']
624 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
624 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
625 log.debug(
625 log.debug(
626 'LFS: Detecting if request `%s` is LFS server path based '
626 'LFS: Detecting if request `%s` is LFS server path based '
627 'on content type:`%s`, is_lfs:%s',
627 'on content type:`%s`, is_lfs:%s',
628 path, content_type, is_lfs_request)
628 path, content_type, is_lfs_request)
629
629
630 if not is_lfs_request:
630 if not is_lfs_request:
631 # fallback detection by path
631 # fallback detection by path
632 if GIT_LFS_PROTO_PAT.match(path):
632 if GIT_LFS_PROTO_PAT.match(path):
633 is_lfs_request = True
633 is_lfs_request = True
634 log.debug(
634 log.debug(
635 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
635 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
636 path, is_lfs_request)
636 path, is_lfs_request)
637
637
638 if is_lfs_request:
638 if is_lfs_request:
639 app = scm_app.create_git_lfs_wsgi_app(
639 app = scm_app.create_git_lfs_wsgi_app(
640 repo_path, repo_name, config)
640 repo_path, repo_name, config)
641 else:
641 else:
642 app = scm_app.create_git_wsgi_app(
642 app = scm_app.create_git_wsgi_app(
643 repo_path, repo_name, config)
643 repo_path, repo_name, config)
644
644
645 log.debug('http-app: starting app handler '
645 log.debug('http-app: starting app handler '
646 'with %s and process request', app)
646 'with %s and process request', app)
647
647
648 return app(environ, start_response)
648 return app(environ, start_response)
649
649
650 return _git_stream
650 return _git_stream
651
651
652 def handle_vcs_exception(self, exception, request):
652 def handle_vcs_exception(self, exception, request):
653 _vcs_kind = getattr(exception, '_vcs_kind', '')
653 _vcs_kind = getattr(exception, '_vcs_kind', '')
654
654
655 if _vcs_kind == 'repo_locked':
655 if _vcs_kind == 'repo_locked':
656 headers_call_context = get_headers_call_context(request.environ)
656 headers_call_context = get_headers_call_context(request.environ)
657 status_code = safe_int(headers_call_context['locked_status_code'])
657 status_code = safe_int(headers_call_context['locked_status_code'])
658
658
659 return HTTPRepoLocked(
659 return HTTPRepoLocked(
660 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
660 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
661
661
662 elif _vcs_kind == 'repo_branch_protected':
662 elif _vcs_kind == 'repo_branch_protected':
663 # Get custom repo-branch-protected status code if present.
663 # Get custom repo-branch-protected status code if present.
664 return HTTPRepoBranchProtected(
664 return HTTPRepoBranchProtected(
665 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
665 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
666
666
667 exc_info = request.exc_info
667 exc_info = request.exc_info
668 store_exception(id(exc_info), exc_info)
668 store_exception(id(exc_info), exc_info)
669
669
670 traceback_info = 'unavailable'
670 traceback_info = 'unavailable'
671 if request.exc_info:
671 if request.exc_info:
672 exc_type, exc_value, exc_tb = request.exc_info
672 exc_type, exc_value, exc_tb = request.exc_info
673 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
673 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
674
674
675 log.error(
675 log.error(
676 'error occurred handling this request for path: %s, \n tb: %s',
676 'error occurred handling this request for path: %s, \n tb: %s',
677 request.path, traceback_info)
677 request.path, traceback_info)
678
678
679 statsd = request.registry.statsd
679 statsd = request.registry.statsd
680 if statsd:
680 if statsd:
681 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
681 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
682 statsd.incr('vcsserver_exception_total',
682 statsd.incr('vcsserver_exception_total',
683 tags=[f"type:{exc_type}"])
683 tags=[f"type:{exc_type}"])
684 raise exception
684 raise exception
685
685
686
686
687 class ResponseFilter(object):
687 class ResponseFilter(object):
688
688
689 def __init__(self, start_response):
689 def __init__(self, start_response):
690 self._start_response = start_response
690 self._start_response = start_response
691
691
692 def __call__(self, status, response_headers, exc_info=None):
692 def __call__(self, status, response_headers, exc_info=None):
693 headers = tuple(
693 headers = tuple(
694 (h, v) for h, v in response_headers
694 (h, v) for h, v in response_headers
695 if not wsgiref.util.is_hop_by_hop(h))
695 if not wsgiref.util.is_hop_by_hop(h))
696 return self._start_response(status, headers, exc_info)
696 return self._start_response(status, headers, exc_info)
697
697
698
698
699 def sanitize_settings_and_apply_defaults(global_config, settings):
699 def sanitize_settings_and_apply_defaults(global_config, settings):
700 global_settings_maker = SettingsMaker(global_config)
700 global_settings_maker = SettingsMaker(global_config)
701 settings_maker = SettingsMaker(settings)
701 settings_maker = SettingsMaker(settings)
702
702
703 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
703 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
704
704
705 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
705 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
706 settings_maker.enable_logging(logging_conf)
706 settings_maker.enable_logging(logging_conf)
707
707
708 # Default includes, possible to change as a user
708 # Default includes, possible to change as a user
709 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
709 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
710 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
710 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
711
711
712 settings_maker.make_setting('__file__', global_config.get('__file__'))
712 settings_maker.make_setting('__file__', global_config.get('__file__'))
713
713
714 settings_maker.make_setting('pyramid.default_locale_name', 'en')
714 settings_maker.make_setting('pyramid.default_locale_name', 'en')
715 settings_maker.make_setting('locale', 'en_US.UTF-8')
715 settings_maker.make_setting('locale', 'en_US.UTF-8')
716
716
717 settings_maker.make_setting('core.binary_dir', '')
717 settings_maker.make_setting('core.binary_dir', '')
718
718
719 temp_store = tempfile.gettempdir()
719 temp_store = tempfile.gettempdir()
720 default_cache_dir = os.path.join(temp_store, 'rc_cache')
720 default_cache_dir = os.path.join(temp_store, 'rc_cache')
721 # save default, cache dir, and use it for all backends later.
721 # save default, cache dir, and use it for all backends later.
722 default_cache_dir = settings_maker.make_setting(
722 default_cache_dir = settings_maker.make_setting(
723 'cache_dir',
723 'cache_dir',
724 default=default_cache_dir, default_when_empty=True,
724 default=default_cache_dir, default_when_empty=True,
725 parser='dir:ensured')
725 parser='dir:ensured')
726
726
727 # exception store cache
727 # exception store cache
728 settings_maker.make_setting(
728 settings_maker.make_setting(
729 'exception_tracker.store_path',
729 'exception_tracker.store_path',
730 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
730 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
731 parser='dir:ensured'
731 parser='dir:ensured'
732 )
732 )
733
733
734 # repo_object cache defaults
734 # repo_object cache defaults
735 settings_maker.make_setting(
735 settings_maker.make_setting(
736 'rc_cache.repo_object.backend',
736 'rc_cache.repo_object.backend',
737 default='dogpile.cache.rc.file_namespace',
737 default='dogpile.cache.rc.file_namespace',
738 parser='string')
738 parser='string')
739 settings_maker.make_setting(
739 settings_maker.make_setting(
740 'rc_cache.repo_object.expiration_time',
740 'rc_cache.repo_object.expiration_time',
741 default=30 * 24 * 60 * 60, # 30days
741 default=30 * 24 * 60 * 60, # 30days
742 parser='int')
742 parser='int')
743 settings_maker.make_setting(
743 settings_maker.make_setting(
744 'rc_cache.repo_object.arguments.filename',
744 'rc_cache.repo_object.arguments.filename',
745 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
745 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
746 parser='string')
746 parser='string')
747
747
748 # statsd
748 # statsd
749 settings_maker.make_setting('statsd.enabled', False, parser='bool')
749 settings_maker.make_setting('statsd.enabled', False, parser='bool')
750 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
750 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
751 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
751 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
752 settings_maker.make_setting('statsd.statsd_prefix', '')
752 settings_maker.make_setting('statsd.statsd_prefix', '')
753 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
753 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
754
754
755 settings_maker.env_expand()
755 settings_maker.env_expand()
756
756
757
757
758 def main(global_config, **settings):
758 def main(global_config, **settings):
759 start_time = time.time()
759 start_time = time.time()
760 log.info('Pyramid app config starting')
760 log.info('Pyramid app config starting')
761
761
762 if MercurialFactory:
762 if MercurialFactory:
763 hgpatches.patch_largefiles_capabilities()
763 hgpatches.patch_largefiles_capabilities()
764 hgpatches.patch_subrepo_type_mapping()
764 hgpatches.patch_subrepo_type_mapping()
765
765
766 # Fill in and sanitize the defaults & do ENV expansion
766 # Fill in and sanitize the defaults & do ENV expansion
767 sanitize_settings_and_apply_defaults(global_config, settings)
767 sanitize_settings_and_apply_defaults(global_config, settings)
768
768
769 # init and bootstrap StatsdClient
769 # init and bootstrap StatsdClient
770 StatsdClient.setup(settings)
770 StatsdClient.setup(settings)
771
771
772 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
772 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
773 total_time = time.time() - start_time
773 total_time = time.time() - start_time
774 log.info('Pyramid app created and configured in %.2fs', total_time)
774 log.info('Pyramid app created and configured in %.2fs', total_time)
775 return pyramid_app
775 return pyramid_app
776
776
777
777
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,26 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 # This package contains non rhodecode licensed packages that are
18 # This package contains non rhodecode licensed packages that are
19 # vendored for various reasons
19 # vendored for various reasons
20
20
21 import os
21 import os
22 import sys
22 import sys
23
23
24 vendor_dir = os.path.abspath(os.path.dirname(__file__))
24 vendor_dir = os.path.abspath(os.path.dirname(__file__))
25
25
26 sys.path.append(vendor_dir)
26 sys.path.append(vendor_dir)
@@ -1,172 +1,172 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import time
20 import time
21 import datetime
21 import datetime
22 import msgpack
22 import msgpack
23 import logging
23 import logging
24 import traceback
24 import traceback
25 import tempfile
25 import tempfile
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
29 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
30 global_prefix = 'vcsserver'
30 global_prefix = 'vcsserver'
31 exc_store_dir_name = 'rc_exception_store_v1'
31 exc_store_dir_name = 'rc_exception_store_v1'
32
32
33
33
34 def exc_serialize(exc_id, tb, exc_type):
34 def exc_serialize(exc_id, tb, exc_type):
35
35
36 data = {
36 data = {
37 'version': 'v1',
37 'version': 'v1',
38 'exc_id': exc_id,
38 'exc_id': exc_id,
39 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
39 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
40 'exc_timestamp': repr(time.time()),
40 'exc_timestamp': repr(time.time()),
41 'exc_message': tb,
41 'exc_message': tb,
42 'exc_type': exc_type,
42 'exc_type': exc_type,
43 }
43 }
44 return msgpack.packb(data), data
44 return msgpack.packb(data), data
45
45
46
46
47 def exc_unserialize(tb):
47 def exc_unserialize(tb):
48 return msgpack.unpackb(tb)
48 return msgpack.unpackb(tb)
49
49
50
50
51 def get_exc_store():
51 def get_exc_store():
52 """
52 """
53 Get and create exception store if it's not existing
53 Get and create exception store if it's not existing
54 """
54 """
55 import vcsserver as app
55 import vcsserver as app
56
56
57 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
57 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
58 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
58 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
59
59
60 _exc_store_path = os.path.abspath(_exc_store_path)
60 _exc_store_path = os.path.abspath(_exc_store_path)
61 if not os.path.isdir(_exc_store_path):
61 if not os.path.isdir(_exc_store_path):
62 os.makedirs(_exc_store_path)
62 os.makedirs(_exc_store_path)
63 log.debug('Initializing exceptions store at %s', _exc_store_path)
63 log.debug('Initializing exceptions store at %s', _exc_store_path)
64 return _exc_store_path
64 return _exc_store_path
65
65
66
66
67 def _store_exception(exc_id, exc_info, prefix, request_path=''):
67 def _store_exception(exc_id, exc_info, prefix, request_path=''):
68 exc_type, exc_value, exc_traceback = exc_info
68 exc_type, exc_value, exc_traceback = exc_info
69
69
70 tb = ''.join(traceback.format_exception(
70 tb = ''.join(traceback.format_exception(
71 exc_type, exc_value, exc_traceback, None))
71 exc_type, exc_value, exc_traceback, None))
72
72
73 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
73 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
74
74
75 if detailed_tb:
75 if detailed_tb:
76 remote_tb = detailed_tb
76 remote_tb = detailed_tb
77 if isinstance(detailed_tb, str):
77 if isinstance(detailed_tb, str):
78 remote_tb = [detailed_tb]
78 remote_tb = [detailed_tb]
79
79
80 tb += (
80 tb += (
81 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
81 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
82 '{}\n'
82 '{}\n'
83 '+++ END SOURCE EXCEPTION +++\n'
83 '+++ END SOURCE EXCEPTION +++\n'
84 ''.format('\n'.join(remote_tb))
84 ''.format('\n'.join(remote_tb))
85 )
85 )
86
86
87 # Avoid that remote_tb also appears in the frame
87 # Avoid that remote_tb also appears in the frame
88 del remote_tb
88 del remote_tb
89
89
90 exc_type_name = exc_type.__name__
90 exc_type_name = exc_type.__name__
91 exc_store_path = get_exc_store()
91 exc_store_path = get_exc_store()
92 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
92 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
93 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
93 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
94 if not os.path.isdir(exc_store_path):
94 if not os.path.isdir(exc_store_path):
95 os.makedirs(exc_store_path)
95 os.makedirs(exc_store_path)
96 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
96 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
97 with open(stored_exc_path, 'wb') as f:
97 with open(stored_exc_path, 'wb') as f:
98 f.write(exc_data)
98 f.write(exc_data)
99 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
99 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
100
100
101 log.error(
101 log.error(
102 'error occurred handling this request.\n'
102 'error occurred handling this request.\n'
103 'Path: `%s`, tb: %s',
103 'Path: `%s`, tb: %s',
104 request_path, tb)
104 request_path, tb)
105
105
106
106
107 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
107 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
108 """
108 """
109 Example usage::
109 Example usage::
110
110
111 exc_info = sys.exc_info()
111 exc_info = sys.exc_info()
112 store_exception(id(exc_info), exc_info)
112 store_exception(id(exc_info), exc_info)
113 """
113 """
114
114
115 try:
115 try:
116 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
116 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
117 request_path=request_path)
117 request_path=request_path)
118 except Exception:
118 except Exception:
119 log.exception('Failed to store exception `%s` information', exc_id)
119 log.exception('Failed to store exception `%s` information', exc_id)
120 # there's no way this can fail, it will crash server badly if it does.
120 # there's no way this can fail, it will crash server badly if it does.
121 pass
121 pass
122
122
123
123
124 def _find_exc_file(exc_id, prefix=global_prefix):
124 def _find_exc_file(exc_id, prefix=global_prefix):
125 exc_store_path = get_exc_store()
125 exc_store_path = get_exc_store()
126 if prefix:
126 if prefix:
127 exc_id = f'{exc_id}_{prefix}'
127 exc_id = f'{exc_id}_{prefix}'
128 else:
128 else:
129 # search without a prefix
129 # search without a prefix
130 exc_id = f'{exc_id}'
130 exc_id = f'{exc_id}'
131
131
132 # we need to search the store for such start pattern as above
132 # we need to search the store for such start pattern as above
133 for fname in os.listdir(exc_store_path):
133 for fname in os.listdir(exc_store_path):
134 if fname.startswith(exc_id):
134 if fname.startswith(exc_id):
135 exc_id = os.path.join(exc_store_path, fname)
135 exc_id = os.path.join(exc_store_path, fname)
136 break
136 break
137 continue
137 continue
138 else:
138 else:
139 exc_id = None
139 exc_id = None
140
140
141 return exc_id
141 return exc_id
142
142
143
143
144 def _read_exception(exc_id, prefix):
144 def _read_exception(exc_id, prefix):
145 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
145 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
146 if exc_id_file_path:
146 if exc_id_file_path:
147 with open(exc_id_file_path, 'rb') as f:
147 with open(exc_id_file_path, 'rb') as f:
148 return exc_unserialize(f.read())
148 return exc_unserialize(f.read())
149 else:
149 else:
150 log.debug('Exception File `%s` not found', exc_id_file_path)
150 log.debug('Exception File `%s` not found', exc_id_file_path)
151 return None
151 return None
152
152
153
153
154 def read_exception(exc_id, prefix=global_prefix):
154 def read_exception(exc_id, prefix=global_prefix):
155 try:
155 try:
156 return _read_exception(exc_id=exc_id, prefix=prefix)
156 return _read_exception(exc_id=exc_id, prefix=prefix)
157 except Exception:
157 except Exception:
158 log.exception('Failed to read exception `%s` information', exc_id)
158 log.exception('Failed to read exception `%s` information', exc_id)
159 # there's no way this can fail, it will crash server badly if it does.
159 # there's no way this can fail, it will crash server badly if it does.
160 return None
160 return None
161
161
162
162
163 def delete_exception(exc_id, prefix=global_prefix):
163 def delete_exception(exc_id, prefix=global_prefix):
164 try:
164 try:
165 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
165 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
166 if exc_id_file_path:
166 if exc_id_file_path:
167 os.remove(exc_id_file_path)
167 os.remove(exc_id_file_path)
168
168
169 except Exception:
169 except Exception:
170 log.exception('Failed to remove exception `%s` information', exc_id)
170 log.exception('Failed to remove exception `%s` information', exc_id)
171 # there's no way this can fail, it will crash server badly if it does.
171 # there's no way this can fail, it will crash server badly if it does.
172 pass
172 pass
@@ -1,63 +1,63 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import logging
19 import logging
20
20
21 from repoze.lru import LRUCache
21 from repoze.lru import LRUCache
22
22
23 from vcsserver.str_utils import safe_str
23 from vcsserver.str_utils import safe_str
24
24
25 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
26
26
27
27
28 class LRUDict(LRUCache):
28 class LRUDict(LRUCache):
29 """
29 """
30 Wrapper to provide partial dict access
30 Wrapper to provide partial dict access
31 """
31 """
32
32
33 def __setitem__(self, key, value):
33 def __setitem__(self, key, value):
34 return self.put(key, value)
34 return self.put(key, value)
35
35
36 def __getitem__(self, key):
36 def __getitem__(self, key):
37 return self.get(key)
37 return self.get(key)
38
38
39 def __contains__(self, key):
39 def __contains__(self, key):
40 return bool(self.get(key))
40 return bool(self.get(key))
41
41
42 def __delitem__(self, key):
42 def __delitem__(self, key):
43 del self.data[key]
43 del self.data[key]
44
44
45 def keys(self):
45 def keys(self):
46 return list(self.data.keys())
46 return list(self.data.keys())
47
47
48
48
49 class LRUDictDebug(LRUDict):
49 class LRUDictDebug(LRUDict):
50 """
50 """
51 Wrapper to provide some debug options
51 Wrapper to provide some debug options
52 """
52 """
53 def _report_keys(self):
53 def _report_keys(self):
54 elems_cnt = '{}/{}'.format(len(list(self.keys())), self.size)
54 elems_cnt = '{}/{}'.format(len(list(self.keys())), self.size)
55 # trick for pformat print it more nicely
55 # trick for pformat print it more nicely
56 fmt = '\n'
56 fmt = '\n'
57 for cnt, elem in enumerate(self.keys()):
57 for cnt, elem in enumerate(self.keys()):
58 fmt += '{} - {}\n'.format(cnt+1, safe_str(elem))
58 fmt += '{} - {}\n'.format(cnt+1, safe_str(elem))
59 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
59 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
60
60
61 def __getitem__(self, key):
61 def __getitem__(self, key):
62 self._report_keys()
62 self._report_keys()
63 return self.get(key)
63 return self.get(key)
@@ -1,112 +1,112 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import threading
19 import threading
20
20
21 from dogpile.cache import register_backend
21 from dogpile.cache import register_backend
22
22
23 from . import region_meta
23 from . import region_meta
24 from .utils import (
24 from .utils import (
25 backend_key_generator,
25 backend_key_generator,
26 clear_cache_namespace,
26 clear_cache_namespace,
27 get_default_cache_settings,
27 get_default_cache_settings,
28 get_or_create_region,
28 get_or_create_region,
29 make_region,
29 make_region,
30 str2bool,
30 str2bool,
31 )
31 )
32
32
33 module_name = 'vcsserver'
33 module_name = 'vcsserver'
34
34
35 register_backend(
35 register_backend(
36 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
36 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
37 "LRUMemoryBackend")
37 "LRUMemoryBackend")
38
38
39 register_backend(
39 register_backend(
40 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
40 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
41 "FileNamespaceBackend")
41 "FileNamespaceBackend")
42
42
43 register_backend(
43 register_backend(
44 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
44 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
45 "RedisPickleBackend")
45 "RedisPickleBackend")
46
46
47 register_backend(
47 register_backend(
48 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
48 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
49 "RedisMsgPackBackend")
49 "RedisMsgPackBackend")
50
50
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 CLEAR_DELETE = 'delete'
55 CLEAR_DELETE = 'delete'
56 CLEAR_INVALIDATE = 'invalidate'
56 CLEAR_INVALIDATE = 'invalidate'
57
57
58
58
59 def async_creation_runner(cache, somekey, creator, mutex):
59 def async_creation_runner(cache, somekey, creator, mutex):
60
60
61 def runner():
61 def runner():
62 try:
62 try:
63 value = creator()
63 value = creator()
64 cache.set(somekey, value)
64 cache.set(somekey, value)
65 finally:
65 finally:
66 mutex.release()
66 mutex.release()
67
67
68 thread = threading.Thread(target=runner)
68 thread = threading.Thread(target=runner)
69 thread.start()
69 thread.start()
70
70
71
71
72 def configure_dogpile_cache(settings):
72 def configure_dogpile_cache(settings):
73 cache_dir = settings.get('cache_dir')
73 cache_dir = settings.get('cache_dir')
74 if cache_dir:
74 if cache_dir:
75 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
75 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
76
76
77 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
77 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
78
78
79 # inspect available namespaces
79 # inspect available namespaces
80 avail_regions = set()
80 avail_regions = set()
81 for key in rc_cache_data.keys():
81 for key in rc_cache_data.keys():
82 namespace_name = key.split('.', 1)[0]
82 namespace_name = key.split('.', 1)[0]
83 if namespace_name in avail_regions:
83 if namespace_name in avail_regions:
84 continue
84 continue
85
85
86 avail_regions.add(namespace_name)
86 avail_regions.add(namespace_name)
87 log.debug('dogpile: found following cache regions: %s', namespace_name)
87 log.debug('dogpile: found following cache regions: %s', namespace_name)
88
88
89 new_region = make_region(
89 new_region = make_region(
90 name=namespace_name,
90 name=namespace_name,
91 function_key_generator=None,
91 function_key_generator=None,
92 async_creation_runner=None
92 async_creation_runner=None
93 )
93 )
94
94
95 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
95 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
96 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
96 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
97
97
98 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
98 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
99 if async_creator:
99 if async_creator:
100 log.debug('configuring region %s with async creator', new_region)
100 log.debug('configuring region %s with async creator', new_region)
101 new_region.async_creation_runner = async_creation_runner
101 new_region.async_creation_runner = async_creation_runner
102
102
103 if log.isEnabledFor(logging.DEBUG):
103 if log.isEnabledFor(logging.DEBUG):
104 region_args = dict(backend=new_region.actual_backend,
104 region_args = dict(backend=new_region.actual_backend,
105 region_invalidator=new_region.region_invalidator.__class__)
105 region_invalidator=new_region.region_invalidator.__class__)
106 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
106 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
107
107
108 region_meta.dogpile_cache_regions[namespace_name] = new_region
108 region_meta.dogpile_cache_regions[namespace_name] = new_region
109
109
110
110
111 def includeme(config):
111 def includeme(config):
112 configure_dogpile_cache(config.registry.settings)
112 configure_dogpile_cache(config.registry.settings)
@@ -1,72 +1,72 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import diskcache
20 import diskcache
21
21
22 log = logging.getLogger(__name__)
22 log = logging.getLogger(__name__)
23
23
24 cache_meta = None
24 cache_meta = None
25
25
26
26
27 def get_archival_config(config):
27 def get_archival_config(config):
28 final_config = {
28 final_config = {
29 'archive_cache.eviction_policy': 'least-frequently-used'
29 'archive_cache.eviction_policy': 'least-frequently-used'
30 }
30 }
31
31
32 for k, v in config.items():
32 for k, v in config.items():
33 if k.startswith('archive_cache'):
33 if k.startswith('archive_cache'):
34 final_config[k] = v
34 final_config[k] = v
35
35
36 return final_config
36 return final_config
37
37
38
38
39 def get_archival_cache_store(config):
39 def get_archival_cache_store(config):
40
40
41 global cache_meta
41 global cache_meta
42 if cache_meta is not None:
42 if cache_meta is not None:
43 return cache_meta
43 return cache_meta
44
44
45 config = get_archival_config(config)
45 config = get_archival_config(config)
46
46
47 archive_cache_dir = config['archive_cache.store_dir']
47 archive_cache_dir = config['archive_cache.store_dir']
48 archive_cache_size_gb = config['archive_cache.cache_size_gb']
48 archive_cache_size_gb = config['archive_cache.cache_size_gb']
49 archive_cache_shards = config['archive_cache.cache_shards']
49 archive_cache_shards = config['archive_cache.cache_shards']
50 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
50 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
51
51
52 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
52 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
53
53
54 # check if it's ok to write, and re-create the archive cache
54 # check if it's ok to write, and re-create the archive cache
55 if not os.path.isdir(archive_cache_dir):
55 if not os.path.isdir(archive_cache_dir):
56 os.makedirs(archive_cache_dir, exist_ok=True)
56 os.makedirs(archive_cache_dir, exist_ok=True)
57
57
58 d_cache = diskcache.FanoutCache(
58 d_cache = diskcache.FanoutCache(
59 archive_cache_dir, shards=archive_cache_shards,
59 archive_cache_dir, shards=archive_cache_shards,
60 cull_limit=0, # manual eviction required
60 cull_limit=0, # manual eviction required
61 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
61 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
62 eviction_policy=archive_cache_eviction_policy,
62 eviction_policy=archive_cache_eviction_policy,
63 timeout=30
63 timeout=30
64 )
64 )
65 cache_meta = d_cache
65 cache_meta = d_cache
66 return cache_meta
66 return cache_meta
67
67
68
68
69 def includeme(config):
69 def includeme(config):
70 # init our cache at start, for vcsserver we don't init at runtime
70 # init our cache at start, for vcsserver we don't init at runtime
71 # because our cache config is sent via wire on make archive call, this call just lazy-enables the client
71 # because our cache config is sent via wire on make archive call, this call just lazy-enables the client
72 return
72 return
@@ -1,267 +1,267 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import errno
18 import errno
19 import fcntl
19 import fcntl
20 import functools
20 import functools
21 import logging
21 import logging
22 import os
22 import os
23 import pickle
23 import pickle
24 #import time
24 #import time
25
25
26 #import gevent
26 #import gevent
27 import msgpack
27 import msgpack
28 import redis
28 import redis
29
29
30 flock_org = fcntl.flock
30 flock_org = fcntl.flock
31 from typing import Union
31 from typing import Union
32
32
33 from dogpile.cache.api import Deserializer, Serializer
33 from dogpile.cache.api import Deserializer, Serializer
34 from dogpile.cache.backends import file as file_backend
34 from dogpile.cache.backends import file as file_backend
35 from dogpile.cache.backends import memory as memory_backend
35 from dogpile.cache.backends import memory as memory_backend
36 from dogpile.cache.backends import redis as redis_backend
36 from dogpile.cache.backends import redis as redis_backend
37 from dogpile.cache.backends.file import FileLock
37 from dogpile.cache.backends.file import FileLock
38 from dogpile.cache.util import memoized_property
38 from dogpile.cache.util import memoized_property
39
39
40 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
40 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
41 from vcsserver.str_utils import safe_bytes, safe_str
41 from vcsserver.str_utils import safe_bytes, safe_str
42 from vcsserver.type_utils import str2bool
42 from vcsserver.type_utils import str2bool
43
43
44 _default_max_size = 1024
44 _default_max_size = 1024
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 class LRUMemoryBackend(memory_backend.MemoryBackend):
49 class LRUMemoryBackend(memory_backend.MemoryBackend):
50 key_prefix = 'lru_mem_backend'
50 key_prefix = 'lru_mem_backend'
51 pickle_values = False
51 pickle_values = False
52
52
53 def __init__(self, arguments):
53 def __init__(self, arguments):
54 self.max_size = arguments.pop('max_size', _default_max_size)
54 self.max_size = arguments.pop('max_size', _default_max_size)
55
55
56 LRUDictClass = LRUDict
56 LRUDictClass = LRUDict
57 if arguments.pop('log_key_count', None):
57 if arguments.pop('log_key_count', None):
58 LRUDictClass = LRUDictDebug
58 LRUDictClass = LRUDictDebug
59
59
60 arguments['cache_dict'] = LRUDictClass(self.max_size)
60 arguments['cache_dict'] = LRUDictClass(self.max_size)
61 super().__init__(arguments)
61 super().__init__(arguments)
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return f'{self.__class__}(maxsize=`{self.max_size}`)'
64 return f'{self.__class__}(maxsize=`{self.max_size}`)'
65
65
66 def __str__(self):
66 def __str__(self):
67 return self.__repr__()
67 return self.__repr__()
68
68
69 def delete(self, key):
69 def delete(self, key):
70 try:
70 try:
71 del self._cache[key]
71 del self._cache[key]
72 except KeyError:
72 except KeyError:
73 # we don't care if key isn't there at deletion
73 # we don't care if key isn't there at deletion
74 pass
74 pass
75
75
76 def delete_multi(self, keys):
76 def delete_multi(self, keys):
77 for key in keys:
77 for key in keys:
78 self.delete(key)
78 self.delete(key)
79
79
80
80
81 class PickleSerializer:
81 class PickleSerializer:
82 serializer: None | Serializer = staticmethod( # type: ignore
82 serializer: None | Serializer = staticmethod( # type: ignore
83 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
83 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
84 )
84 )
85 deserializer: None | Deserializer = staticmethod( # type: ignore
85 deserializer: None | Deserializer = staticmethod( # type: ignore
86 functools.partial(pickle.loads)
86 functools.partial(pickle.loads)
87 )
87 )
88
88
89
89
90 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
91 serializer: None | Serializer = staticmethod( # type: ignore
91 serializer: None | Serializer = staticmethod( # type: ignore
92 msgpack.packb
92 msgpack.packb
93 )
93 )
94 deserializer: None | Deserializer = staticmethod( # type: ignore
94 deserializer: None | Deserializer = staticmethod( # type: ignore
95 functools.partial(msgpack.unpackb, use_list=False)
95 functools.partial(msgpack.unpackb, use_list=False)
96 )
96 )
97
97
98
98
99 class CustomLockFactory(FileLock):
99 class CustomLockFactory(FileLock):
100
100
101 pass
101 pass
102
102
103
103
104 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
104 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
105 key_prefix = 'file_backend'
105 key_prefix = 'file_backend'
106
106
107 def __init__(self, arguments):
107 def __init__(self, arguments):
108 arguments['lock_factory'] = CustomLockFactory
108 arguments['lock_factory'] = CustomLockFactory
109 db_file = arguments.get('filename')
109 db_file = arguments.get('filename')
110
110
111 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
111 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
112 db_file_dir = os.path.dirname(db_file)
112 db_file_dir = os.path.dirname(db_file)
113 if not os.path.isdir(db_file_dir):
113 if not os.path.isdir(db_file_dir):
114 os.makedirs(db_file_dir)
114 os.makedirs(db_file_dir)
115
115
116 try:
116 try:
117 super().__init__(arguments)
117 super().__init__(arguments)
118 except Exception:
118 except Exception:
119 log.exception('Failed to initialize db at: %s', db_file)
119 log.exception('Failed to initialize db at: %s', db_file)
120 raise
120 raise
121
121
122 def __repr__(self):
122 def __repr__(self):
123 return f'{self.__class__}(file=`{self.filename}`)'
123 return f'{self.__class__}(file=`{self.filename}`)'
124
124
125 def __str__(self):
125 def __str__(self):
126 return self.__repr__()
126 return self.__repr__()
127
127
128 def _get_keys_pattern(self, prefix: bytes = b''):
128 def _get_keys_pattern(self, prefix: bytes = b''):
129 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
129 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
130
130
131 def list_keys(self, prefix: bytes = b''):
131 def list_keys(self, prefix: bytes = b''):
132 prefix = self._get_keys_pattern(prefix)
132 prefix = self._get_keys_pattern(prefix)
133
133
134 def cond(dbm_key: bytes):
134 def cond(dbm_key: bytes):
135 if not prefix:
135 if not prefix:
136 return True
136 return True
137
137
138 if dbm_key.startswith(prefix):
138 if dbm_key.startswith(prefix):
139 return True
139 return True
140 return False
140 return False
141
141
142 with self._dbm_file(True) as dbm:
142 with self._dbm_file(True) as dbm:
143 try:
143 try:
144 return list(filter(cond, dbm.keys()))
144 return list(filter(cond, dbm.keys()))
145 except Exception:
145 except Exception:
146 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
146 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
147 raise
147 raise
148
148
149 def get_store(self):
149 def get_store(self):
150 return self.filename
150 return self.filename
151
151
152
152
153 class BaseRedisBackend(redis_backend.RedisBackend):
153 class BaseRedisBackend(redis_backend.RedisBackend):
154 key_prefix = ''
154 key_prefix = ''
155
155
156 def __init__(self, arguments):
156 def __init__(self, arguments):
157 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
157 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
158 super().__init__(arguments)
158 super().__init__(arguments)
159
159
160 self._lock_timeout = self.lock_timeout
160 self._lock_timeout = self.lock_timeout
161 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
161 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
162
162
163 if self._lock_auto_renewal and not self._lock_timeout:
163 if self._lock_auto_renewal and not self._lock_timeout:
164 # set default timeout for auto_renewal
164 # set default timeout for auto_renewal
165 self._lock_timeout = 30
165 self._lock_timeout = 30
166
166
167 def __repr__(self):
167 def __repr__(self):
168 return f'{self.__class__}(conn=`{self.db_conn}`)'
168 return f'{self.__class__}(conn=`{self.db_conn}`)'
169
169
170 def __str__(self):
170 def __str__(self):
171 return self.__repr__()
171 return self.__repr__()
172
172
173 def _create_client(self):
173 def _create_client(self):
174 args = {}
174 args = {}
175
175
176 if self.url is not None:
176 if self.url is not None:
177 args.update(url=self.url)
177 args.update(url=self.url)
178
178
179 else:
179 else:
180 args.update(
180 args.update(
181 host=self.host, password=self.password,
181 host=self.host, password=self.password,
182 port=self.port, db=self.db
182 port=self.port, db=self.db
183 )
183 )
184
184
185 connection_pool = redis.ConnectionPool(**args)
185 connection_pool = redis.ConnectionPool(**args)
186 self.writer_client = redis.StrictRedis(
186 self.writer_client = redis.StrictRedis(
187 connection_pool=connection_pool
187 connection_pool=connection_pool
188 )
188 )
189 self.reader_client = self.writer_client
189 self.reader_client = self.writer_client
190
190
191 def _get_keys_pattern(self, prefix: bytes = b''):
191 def _get_keys_pattern(self, prefix: bytes = b''):
192 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
192 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
193
193
194 def list_keys(self, prefix: bytes = b''):
194 def list_keys(self, prefix: bytes = b''):
195 prefix = self._get_keys_pattern(prefix)
195 prefix = self._get_keys_pattern(prefix)
196 return self.reader_client.keys(prefix)
196 return self.reader_client.keys(prefix)
197
197
198 def get_store(self):
198 def get_store(self):
199 return self.reader_client.connection_pool
199 return self.reader_client.connection_pool
200
200
201 def get_mutex(self, key):
201 def get_mutex(self, key):
202 if self.distributed_lock:
202 if self.distributed_lock:
203 lock_key = f'_lock_{safe_str(key)}'
203 lock_key = f'_lock_{safe_str(key)}'
204 return get_mutex_lock(
204 return get_mutex_lock(
205 self.writer_client, lock_key,
205 self.writer_client, lock_key,
206 self._lock_timeout,
206 self._lock_timeout,
207 auto_renewal=self._lock_auto_renewal
207 auto_renewal=self._lock_auto_renewal
208 )
208 )
209 else:
209 else:
210 return None
210 return None
211
211
212
212
213 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
213 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
214 key_prefix = 'redis_pickle_backend'
214 key_prefix = 'redis_pickle_backend'
215 pass
215 pass
216
216
217
217
218 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
218 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
219 key_prefix = 'redis_msgpack_backend'
219 key_prefix = 'redis_msgpack_backend'
220 pass
220 pass
221
221
222
222
223 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
223 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
224 from vcsserver.lib._vendor import redis_lock
224 from vcsserver.lib._vendor import redis_lock
225
225
226 class _RedisLockWrapper(object):
226 class _RedisLockWrapper(object):
227 """LockWrapper for redis_lock"""
227 """LockWrapper for redis_lock"""
228
228
229 @classmethod
229 @classmethod
230 def get_lock(cls):
230 def get_lock(cls):
231 return redis_lock.Lock(
231 return redis_lock.Lock(
232 redis_client=client,
232 redis_client=client,
233 name=lock_key,
233 name=lock_key,
234 expire=lock_timeout,
234 expire=lock_timeout,
235 auto_renewal=auto_renewal,
235 auto_renewal=auto_renewal,
236 strict=True,
236 strict=True,
237 )
237 )
238
238
239 def __repr__(self):
239 def __repr__(self):
240 return f"{self.__class__.__name__}:{lock_key}"
240 return f"{self.__class__.__name__}:{lock_key}"
241
241
242 def __str__(self):
242 def __str__(self):
243 return f"{self.__class__.__name__}:{lock_key}"
243 return f"{self.__class__.__name__}:{lock_key}"
244
244
245 def __init__(self):
245 def __init__(self):
246 self.lock = self.get_lock()
246 self.lock = self.get_lock()
247 self.lock_key = lock_key
247 self.lock_key = lock_key
248
248
249 def acquire(self, wait=True):
249 def acquire(self, wait=True):
250 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
250 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
251 try:
251 try:
252 acquired = self.lock.acquire(wait)
252 acquired = self.lock.acquire(wait)
253 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
253 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
254 return acquired
254 return acquired
255 except redis_lock.AlreadyAcquired:
255 except redis_lock.AlreadyAcquired:
256 return False
256 return False
257 except redis_lock.AlreadyStarted:
257 except redis_lock.AlreadyStarted:
258 # refresh thread exists, but it also means we acquired the lock
258 # refresh thread exists, but it also means we acquired the lock
259 return True
259 return True
260
260
261 def release(self):
261 def release(self):
262 try:
262 try:
263 self.lock.release()
263 self.lock.release()
264 except redis_lock.NotAcquired:
264 except redis_lock.NotAcquired:
265 pass
265 pass
266
266
267 return _RedisLockWrapper()
267 return _RedisLockWrapper()
@@ -1,26 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20
20
21 dogpile_config_defaults = {
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
23 }
24
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
26 dogpile_cache_regions = {}
@@ -1,242 +1,242 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import functools
18 import functools
19 import logging
19 import logging
20 import os
20 import os
21 import threading
21 import threading
22 import time
22 import time
23
23
24 import decorator
24 import decorator
25 from dogpile.cache import CacheRegion
25 from dogpile.cache import CacheRegion
26
26
27 from vcsserver.lib.rc_cache import region_meta
27 from vcsserver.lib.rc_cache import region_meta
28 from vcsserver.str_utils import safe_bytes
28 from vcsserver.str_utils import safe_bytes
29 from vcsserver.type_utils import str2bool
29 from vcsserver.type_utils import str2bool
30 from vcsserver.utils import sha1
30 from vcsserver.utils import sha1
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class RhodeCodeCacheRegion(CacheRegion):
35 class RhodeCodeCacheRegion(CacheRegion):
36
36
37 def __repr__(self):
37 def __repr__(self):
38 return f'{self.__class__}(name={self.name})'
38 return f'{self.__class__}(name={self.name})'
39
39
40 def conditional_cache_on_arguments(
40 def conditional_cache_on_arguments(
41 self, namespace=None,
41 self, namespace=None,
42 expiration_time=None,
42 expiration_time=None,
43 should_cache_fn=None,
43 should_cache_fn=None,
44 to_str=str,
44 to_str=str,
45 function_key_generator=None,
45 function_key_generator=None,
46 condition=True):
46 condition=True):
47 """
47 """
48 Custom conditional decorator, that will not touch any dogpile internals if
48 Custom conditional decorator, that will not touch any dogpile internals if
49 condition isn't meet. This works a bit different from should_cache_fn
49 condition isn't meet. This works a bit different from should_cache_fn
50 And it's faster in cases we don't ever want to compute cached values
50 And it's faster in cases we don't ever want to compute cached values
51 """
51 """
52 expiration_time_is_callable = callable(expiration_time)
52 expiration_time_is_callable = callable(expiration_time)
53 if not namespace:
53 if not namespace:
54 namespace = getattr(self, '_default_namespace', None)
54 namespace = getattr(self, '_default_namespace', None)
55
55
56 if function_key_generator is None:
56 if function_key_generator is None:
57 function_key_generator = self.function_key_generator
57 function_key_generator = self.function_key_generator
58
58
59 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
59 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
60
60
61 if not condition:
61 if not condition:
62 log.debug('Calling un-cached method:%s', user_func.__name__)
62 log.debug('Calling un-cached method:%s', user_func.__name__)
63 start = time.time()
63 start = time.time()
64 result = user_func(*arg, **kw)
64 result = user_func(*arg, **kw)
65 total = time.time() - start
65 total = time.time() - start
66 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
66 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
67 return result
67 return result
68
68
69 key = func_key_generator(*arg, **kw)
69 key = func_key_generator(*arg, **kw)
70
70
71 timeout = expiration_time() if expiration_time_is_callable \
71 timeout = expiration_time() if expiration_time_is_callable \
72 else expiration_time
72 else expiration_time
73
73
74 log.debug('Calling cached method:`%s`', user_func.__name__)
74 log.debug('Calling cached method:`%s`', user_func.__name__)
75 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
75 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
76
76
77 def cache_decorator(user_func):
77 def cache_decorator(user_func):
78 if to_str is str:
78 if to_str is str:
79 # backwards compatible
79 # backwards compatible
80 key_generator = function_key_generator(namespace, user_func)
80 key_generator = function_key_generator(namespace, user_func)
81 else:
81 else:
82 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
82 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
83
83
84 def refresh(*arg, **kw):
84 def refresh(*arg, **kw):
85 """
85 """
86 Like invalidate, but regenerates the value instead
86 Like invalidate, but regenerates the value instead
87 """
87 """
88 key = key_generator(*arg, **kw)
88 key = key_generator(*arg, **kw)
89 value = user_func(*arg, **kw)
89 value = user_func(*arg, **kw)
90 self.set(key, value)
90 self.set(key, value)
91 return value
91 return value
92
92
93 def invalidate(*arg, **kw):
93 def invalidate(*arg, **kw):
94 key = key_generator(*arg, **kw)
94 key = key_generator(*arg, **kw)
95 self.delete(key)
95 self.delete(key)
96
96
97 def set_(value, *arg, **kw):
97 def set_(value, *arg, **kw):
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 self.set(key, value)
99 self.set(key, value)
100
100
101 def get(*arg, **kw):
101 def get(*arg, **kw):
102 key = key_generator(*arg, **kw)
102 key = key_generator(*arg, **kw)
103 return self.get(key)
103 return self.get(key)
104
104
105 user_func.set = set_
105 user_func.set = set_
106 user_func.invalidate = invalidate
106 user_func.invalidate = invalidate
107 user_func.get = get
107 user_func.get = get
108 user_func.refresh = refresh
108 user_func.refresh = refresh
109 user_func.key_generator = key_generator
109 user_func.key_generator = key_generator
110 user_func.original = user_func
110 user_func.original = user_func
111
111
112 # Use `decorate` to preserve the signature of :param:`user_func`.
112 # Use `decorate` to preserve the signature of :param:`user_func`.
113 return decorator.decorate(user_func, functools.partial(
113 return decorator.decorate(user_func, functools.partial(
114 get_or_create_for_user_func, key_generator))
114 get_or_create_for_user_func, key_generator))
115
115
116 return cache_decorator
116 return cache_decorator
117
117
118
118
119 def make_region(*arg, **kw):
119 def make_region(*arg, **kw):
120 return RhodeCodeCacheRegion(*arg, **kw)
120 return RhodeCodeCacheRegion(*arg, **kw)
121
121
122
122
123 def get_default_cache_settings(settings, prefixes=None):
123 def get_default_cache_settings(settings, prefixes=None):
124 prefixes = prefixes or []
124 prefixes = prefixes or []
125 cache_settings = {}
125 cache_settings = {}
126 for key in settings.keys():
126 for key in settings.keys():
127 for prefix in prefixes:
127 for prefix in prefixes:
128 if key.startswith(prefix):
128 if key.startswith(prefix):
129 name = key.split(prefix)[1].strip()
129 name = key.split(prefix)[1].strip()
130 val = settings[key]
130 val = settings[key]
131 if isinstance(val, str):
131 if isinstance(val, str):
132 val = val.strip()
132 val = val.strip()
133 cache_settings[name] = val
133 cache_settings[name] = val
134 return cache_settings
134 return cache_settings
135
135
136
136
137 def compute_key_from_params(*args):
137 def compute_key_from_params(*args):
138 """
138 """
139 Helper to compute key from given params to be used in cache manager
139 Helper to compute key from given params to be used in cache manager
140 """
140 """
141 return sha1(safe_bytes("_".join(map(str, args))))
141 return sha1(safe_bytes("_".join(map(str, args))))
142
142
143
143
144 def custom_key_generator(backend, namespace, fn):
144 def custom_key_generator(backend, namespace, fn):
145 func_name = fn.__name__
145 func_name = fn.__name__
146
146
147 def generate_key(*args):
147 def generate_key(*args):
148 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
148 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
149 namespace_pref = namespace or 'default_namespace'
149 namespace_pref = namespace or 'default_namespace'
150 arg_key = compute_key_from_params(*args)
150 arg_key = compute_key_from_params(*args)
151 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
151 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
152
152
153 return final_key
153 return final_key
154
154
155 return generate_key
155 return generate_key
156
156
157
157
158 def backend_key_generator(backend):
158 def backend_key_generator(backend):
159 """
159 """
160 Special wrapper that also sends over the backend to the key generator
160 Special wrapper that also sends over the backend to the key generator
161 """
161 """
162 def wrapper(namespace, fn):
162 def wrapper(namespace, fn):
163 return custom_key_generator(backend, namespace, fn)
163 return custom_key_generator(backend, namespace, fn)
164 return wrapper
164 return wrapper
165
165
166
166
167 def get_or_create_region(region_name, region_namespace: str = None):
167 def get_or_create_region(region_name, region_namespace: str = None):
168 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
168 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
169
169
170 region_obj = region_meta.dogpile_cache_regions.get(region_name)
170 region_obj = region_meta.dogpile_cache_regions.get(region_name)
171 if not region_obj:
171 if not region_obj:
172 reg_keys = list(region_meta.dogpile_cache_regions.keys())
172 reg_keys = list(region_meta.dogpile_cache_regions.keys())
173 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
173 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
174
174
175 region_uid_name = f'{region_name}:{region_namespace}'
175 region_uid_name = f'{region_name}:{region_namespace}'
176
176
177 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
177 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
178 if not region_namespace:
178 if not region_namespace:
179 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
179 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
180
180
181 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
181 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
182 if region_exist:
182 if region_exist:
183 log.debug('Using already configured region: %s', region_namespace)
183 log.debug('Using already configured region: %s', region_namespace)
184 return region_exist
184 return region_exist
185
185
186 expiration_time = region_obj.expiration_time
186 expiration_time = region_obj.expiration_time
187
187
188 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
188 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
189 namespace_cache_dir = cache_dir
189 namespace_cache_dir = cache_dir
190
190
191 # we default the namespace_cache_dir to our default cache dir.
191 # we default the namespace_cache_dir to our default cache dir.
192 # however if this backend is configured with filename= param, we prioritize that
192 # however if this backend is configured with filename= param, we prioritize that
193 # so all caches within that particular region, even those namespaced end up in the same path
193 # so all caches within that particular region, even those namespaced end up in the same path
194 if region_obj.actual_backend.filename:
194 if region_obj.actual_backend.filename:
195 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
195 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
196
196
197 if not os.path.isdir(namespace_cache_dir):
197 if not os.path.isdir(namespace_cache_dir):
198 os.makedirs(namespace_cache_dir)
198 os.makedirs(namespace_cache_dir)
199 new_region = make_region(
199 new_region = make_region(
200 name=region_uid_name,
200 name=region_uid_name,
201 function_key_generator=backend_key_generator(region_obj.actual_backend)
201 function_key_generator=backend_key_generator(region_obj.actual_backend)
202 )
202 )
203
203
204 namespace_filename = os.path.join(
204 namespace_filename = os.path.join(
205 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
205 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
206 # special type that allows 1db per namespace
206 # special type that allows 1db per namespace
207 new_region.configure(
207 new_region.configure(
208 backend='dogpile.cache.rc.file_namespace',
208 backend='dogpile.cache.rc.file_namespace',
209 expiration_time=expiration_time,
209 expiration_time=expiration_time,
210 arguments={"filename": namespace_filename}
210 arguments={"filename": namespace_filename}
211 )
211 )
212
212
213 # create and save in region caches
213 # create and save in region caches
214 log.debug('configuring new region: %s', region_uid_name)
214 log.debug('configuring new region: %s', region_uid_name)
215 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
215 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
216
216
217 region_obj._default_namespace = region_namespace
217 region_obj._default_namespace = region_namespace
218 return region_obj
218 return region_obj
219
219
220
220
221 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
221 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
222 from . import CLEAR_DELETE, CLEAR_INVALIDATE
222 from . import CLEAR_DELETE, CLEAR_INVALIDATE
223
223
224 if not isinstance(cache_region, RhodeCodeCacheRegion):
224 if not isinstance(cache_region, RhodeCodeCacheRegion):
225 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
225 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
226 log.debug('clearing cache region: %s with method=%s', cache_region, method)
226 log.debug('clearing cache region: %s with method=%s', cache_region, method)
227
227
228 num_affected_keys = None
228 num_affected_keys = None
229
229
230 if method == CLEAR_INVALIDATE:
230 if method == CLEAR_INVALIDATE:
231 # NOTE: The CacheRegion.invalidate() method’s default mode of
231 # NOTE: The CacheRegion.invalidate() method’s default mode of
232 # operation is to set a timestamp local to this CacheRegion in this Python process only.
232 # operation is to set a timestamp local to this CacheRegion in this Python process only.
233 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
233 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
234 cache_region.invalidate(hard=True)
234 cache_region.invalidate(hard=True)
235
235
236 if method == CLEAR_DELETE:
236 if method == CLEAR_DELETE:
237 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
237 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
238 num_affected_keys = len(cache_keys)
238 num_affected_keys = len(cache_keys)
239 if num_affected_keys:
239 if num_affected_keys:
240 cache_region.delete_multi(cache_keys)
240 cache_region.delete_multi(cache_keys)
241
241
242 return num_affected_keys
242 return num_affected_keys
@@ -1,25 +1,25 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 counter = 0
19 counter = 0
20
20
21
21
22 def get_request_counter(request):
22 def get_request_counter(request):
23 global counter
23 global counter
24 counter += 1
24 counter += 1
25 return counter
25 return counter
@@ -1,70 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib._vendor.statsd import client_from_config
18 from vcsserver.lib._vendor.statsd import client_from_config
19
19
20
20
21 class StatsdClientNotInitialised(Exception):
21 class StatsdClientNotInitialised(Exception):
22 pass
22 pass
23
23
24
24
25 class _Singleton(type):
25 class _Singleton(type):
26 """A metaclass that creates a Singleton base class when called."""
26 """A metaclass that creates a Singleton base class when called."""
27
27
28 _instances = {}
28 _instances = {}
29
29
30 def __call__(cls, *args, **kwargs):
30 def __call__(cls, *args, **kwargs):
31 if cls not in cls._instances:
31 if cls not in cls._instances:
32 cls._instances[cls] = super().__call__(*args, **kwargs)
32 cls._instances[cls] = super().__call__(*args, **kwargs)
33 return cls._instances[cls]
33 return cls._instances[cls]
34
34
35
35
36 class Singleton(_Singleton("SingletonMeta", (object,), {})):
36 class Singleton(_Singleton("SingletonMeta", (object,), {})):
37 pass
37 pass
38
38
39
39
40 class StatsdClientClass(Singleton):
40 class StatsdClientClass(Singleton):
41 setup_run = False
41 setup_run = False
42 statsd_client = None
42 statsd_client = None
43 statsd = None
43 statsd = None
44 strict_mode_init = False
44 strict_mode_init = False
45
45
46 def __getattribute__(self, name):
46 def __getattribute__(self, name):
47
47
48 if name.startswith("statsd"):
48 if name.startswith("statsd"):
49 if self.setup_run:
49 if self.setup_run:
50 return super().__getattribute__(name)
50 return super().__getattribute__(name)
51 else:
51 else:
52 if self.strict_mode_init:
52 if self.strict_mode_init:
53 raise StatsdClientNotInitialised(f"requested key was {name}")
53 raise StatsdClientNotInitialised(f"requested key was {name}")
54 return None
54 return None
55
55
56 return super().__getattribute__(name)
56 return super().__getattribute__(name)
57
57
58 def setup(self, settings):
58 def setup(self, settings):
59 """
59 """
60 Initialize the client
60 Initialize the client
61 """
61 """
62 strict_init_mode = settings.pop('statsd_strict_init', False)
62 strict_init_mode = settings.pop('statsd_strict_init', False)
63
63
64 statsd = client_from_config(settings)
64 statsd = client_from_config(settings)
65 self.statsd = statsd
65 self.statsd = statsd
66 self.statsd_client = statsd
66 self.statsd_client = statsd
67 self.setup_run = True
67 self.setup_run = True
68
68
69
69
70 StatsdClient = StatsdClientClass()
70 StatsdClient = StatsdClientClass()
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20
20
21 from svn import client
21 from svn import client
22 from svn import core
22 from svn import core
23 from svn import ra
23 from svn import ra
24
24
25 from mercurial import error
25 from mercurial import error
26
26
27 from vcsserver.str_utils import safe_bytes
27 from vcsserver.str_utils import safe_bytes
28
28
29 core.svn_config_ensure(None)
29 core.svn_config_ensure(None)
30 svn_config = core.svn_config_get_config(None)
30 svn_config = core.svn_config_get_config(None)
31
31
32
32
33 class RaCallbacks(ra.Callbacks):
33 class RaCallbacks(ra.Callbacks):
34 @staticmethod
34 @staticmethod
35 def open_tmp_file(pool): # pragma: no cover
35 def open_tmp_file(pool): # pragma: no cover
36 (fd, fn) = tempfile.mkstemp()
36 (fd, fn) = tempfile.mkstemp()
37 os.close(fd)
37 os.close(fd)
38 return fn
38 return fn
39
39
40 @staticmethod
40 @staticmethod
41 def get_client_string(pool):
41 def get_client_string(pool):
42 return b'RhodeCode-subversion-url-checker'
42 return b'RhodeCode-subversion-url-checker'
43
43
44
44
45 class SubversionException(Exception):
45 class SubversionException(Exception):
46 pass
46 pass
47
47
48
48
49 class SubversionConnectionException(SubversionException):
49 class SubversionConnectionException(SubversionException):
50 """Exception raised when a generic error occurs when connecting to a repository."""
50 """Exception raised when a generic error occurs when connecting to a repository."""
51
51
52
52
53 def normalize_url(url):
53 def normalize_url(url):
54 if not url:
54 if not url:
55 return url
55 return url
56 if url.startswith(b'svn+http://') or url.startswith(b'svn+https://'):
56 if url.startswith(b'svn+http://') or url.startswith(b'svn+https://'):
57 url = url[4:]
57 url = url[4:]
58 url = url.rstrip(b'/')
58 url = url.rstrip(b'/')
59 return url
59 return url
60
60
61
61
62 def _create_auth_baton(pool):
62 def _create_auth_baton(pool):
63 """Create a Subversion authentication baton. """
63 """Create a Subversion authentication baton. """
64 # Give the client context baton a suite of authentication
64 # Give the client context baton a suite of authentication
65 # providers.h
65 # providers.h
66 platform_specific = [
66 platform_specific = [
67 'svn_auth_get_gnome_keyring_simple_provider',
67 'svn_auth_get_gnome_keyring_simple_provider',
68 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
68 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
69 'svn_auth_get_keychain_simple_provider',
69 'svn_auth_get_keychain_simple_provider',
70 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
70 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
71 'svn_auth_get_kwallet_simple_provider',
71 'svn_auth_get_kwallet_simple_provider',
72 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
72 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
73 'svn_auth_get_ssl_client_cert_file_provider',
73 'svn_auth_get_ssl_client_cert_file_provider',
74 'svn_auth_get_windows_simple_provider',
74 'svn_auth_get_windows_simple_provider',
75 'svn_auth_get_windows_ssl_server_trust_provider',
75 'svn_auth_get_windows_ssl_server_trust_provider',
76 ]
76 ]
77
77
78 providers = []
78 providers = []
79
79
80 for p in platform_specific:
80 for p in platform_specific:
81 if getattr(core, p, None) is not None:
81 if getattr(core, p, None) is not None:
82 try:
82 try:
83 providers.append(getattr(core, p)())
83 providers.append(getattr(core, p)())
84 except RuntimeError:
84 except RuntimeError:
85 pass
85 pass
86
86
87 providers += [
87 providers += [
88 client.get_simple_provider(),
88 client.get_simple_provider(),
89 client.get_username_provider(),
89 client.get_username_provider(),
90 client.get_ssl_client_cert_file_provider(),
90 client.get_ssl_client_cert_file_provider(),
91 client.get_ssl_client_cert_pw_file_provider(),
91 client.get_ssl_client_cert_pw_file_provider(),
92 client.get_ssl_server_trust_file_provider(),
92 client.get_ssl_server_trust_file_provider(),
93 ]
93 ]
94
94
95 return core.svn_auth_open(providers, pool)
95 return core.svn_auth_open(providers, pool)
96
96
97
97
98 class SubversionRepo(object):
98 class SubversionRepo(object):
99 """Wrapper for a Subversion repository.
99 """Wrapper for a Subversion repository.
100
100
101 It uses the SWIG Python bindings, see above for requirements.
101 It uses the SWIG Python bindings, see above for requirements.
102 """
102 """
103 def __init__(self, svn_url: bytes = b'', username: bytes = b'', password: bytes = b''):
103 def __init__(self, svn_url: bytes = b'', username: bytes = b'', password: bytes = b''):
104
104
105 self.username = username
105 self.username = username
106 self.password = password
106 self.password = password
107 self.svn_url = core.svn_path_canonicalize(svn_url)
107 self.svn_url = core.svn_path_canonicalize(svn_url)
108
108
109 self.auth_baton_pool = core.Pool()
109 self.auth_baton_pool = core.Pool()
110 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
110 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
111 # self.init_ra_and_client() assumes that a pool already exists
111 # self.init_ra_and_client() assumes that a pool already exists
112 self.pool = core.Pool()
112 self.pool = core.Pool()
113
113
114 self.ra = self.init_ra_and_client()
114 self.ra = self.init_ra_and_client()
115 self.uuid = ra.get_uuid(self.ra, self.pool)
115 self.uuid = ra.get_uuid(self.ra, self.pool)
116
116
117 def init_ra_and_client(self):
117 def init_ra_and_client(self):
118 """Initializes the RA and client layers, because sometimes getting
118 """Initializes the RA and client layers, because sometimes getting
119 unified diffs runs the remote server out of open files.
119 unified diffs runs the remote server out of open files.
120 """
120 """
121
121
122 if self.username:
122 if self.username:
123 core.svn_auth_set_parameter(self.auth_baton,
123 core.svn_auth_set_parameter(self.auth_baton,
124 core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
124 core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
125 self.username)
125 self.username)
126 if self.password:
126 if self.password:
127 core.svn_auth_set_parameter(self.auth_baton,
127 core.svn_auth_set_parameter(self.auth_baton,
128 core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
128 core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
129 self.password)
129 self.password)
130
130
131 callbacks = RaCallbacks()
131 callbacks = RaCallbacks()
132 callbacks.auth_baton = self.auth_baton
132 callbacks.auth_baton = self.auth_baton
133
133
134 try:
134 try:
135 return ra.open2(self.svn_url, callbacks, svn_config, self.pool)
135 return ra.open2(self.svn_url, callbacks, svn_config, self.pool)
136 except SubversionException as e:
136 except SubversionException as e:
137 # e.child contains a detailed error messages
137 # e.child contains a detailed error messages
138 msglist = []
138 msglist = []
139 svn_exc = e
139 svn_exc = e
140 while svn_exc:
140 while svn_exc:
141 if svn_exc.args[0]:
141 if svn_exc.args[0]:
142 msglist.append(svn_exc.args[0])
142 msglist.append(svn_exc.args[0])
143 svn_exc = svn_exc.child
143 svn_exc = svn_exc.child
144 msg = '\n'.join(msglist)
144 msg = '\n'.join(msglist)
145 raise SubversionConnectionException(msg)
145 raise SubversionConnectionException(msg)
146
146
147
147
148 class svnremoterepo(object):
148 class svnremoterepo(object):
149 """ the dumb wrapper for actual Subversion repositories """
149 """ the dumb wrapper for actual Subversion repositories """
150
150
151 def __init__(self, username: bytes = b'', password: bytes = b'', svn_url: bytes = b''):
151 def __init__(self, username: bytes = b'', password: bytes = b'', svn_url: bytes = b''):
152 self.username = username or b''
152 self.username = username or b''
153 self.password = password or b''
153 self.password = password or b''
154 self.path = normalize_url(svn_url)
154 self.path = normalize_url(svn_url)
155
155
156 def svn(self):
156 def svn(self):
157 try:
157 try:
158 return SubversionRepo(self.path, self.username, self.password)
158 return SubversionRepo(self.path, self.username, self.password)
159 except SubversionConnectionException as e:
159 except SubversionConnectionException as e:
160 raise error.Abort(safe_bytes(e))
160 raise error.Abort(safe_bytes(e))
@@ -1,417 +1,417 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import dulwich.protocol
24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver.lib.rc_json import json
28 from vcsserver.lib.rc_json import json
29 from vcsserver import hooks, subprocessio
29 from vcsserver import hooks, subprocessio
30 from vcsserver.str_utils import ascii_bytes
30 from vcsserver.str_utils import ascii_bytes
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class FileWrapper(object):
36 class FileWrapper(object):
37 """File wrapper that ensures how much data is read from it."""
37 """File wrapper that ensures how much data is read from it."""
38
38
39 def __init__(self, fd, content_length):
39 def __init__(self, fd, content_length):
40 self.fd = fd
40 self.fd = fd
41 self.content_length = content_length
41 self.content_length = content_length
42 self.remain = content_length
42 self.remain = content_length
43
43
44 def read(self, size):
44 def read(self, size):
45 if size <= self.remain:
45 if size <= self.remain:
46 try:
46 try:
47 data = self.fd.read(size)
47 data = self.fd.read(size)
48 except socket.error:
48 except socket.error:
49 raise IOError(self)
49 raise IOError(self)
50 self.remain -= size
50 self.remain -= size
51 elif self.remain:
51 elif self.remain:
52 data = self.fd.read(self.remain)
52 data = self.fd.read(self.remain)
53 self.remain = 0
53 self.remain = 0
54 else:
54 else:
55 data = None
55 data = None
56 return data
56 return data
57
57
58 def __repr__(self):
58 def __repr__(self):
59 return '<FileWrapper {} len: {}, read: {}>'.format(
59 return '<FileWrapper {} len: {}, read: {}>'.format(
60 self.fd, self.content_length, self.content_length - self.remain
60 self.fd, self.content_length, self.content_length - self.remain
61 )
61 )
62
62
63
63
64 class GitRepository(object):
64 class GitRepository(object):
65 """WSGI app for handling Git smart protocol endpoints."""
65 """WSGI app for handling Git smart protocol endpoints."""
66
66
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 FLUSH_PACKET = b"0000"
76 FLUSH_PACKET = b"0000"
77
77
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 files = frozenset(f.lower() for f in os.listdir(content_path))
82 valid_dir_signature = self.git_folder_signature.issubset(files)
82 valid_dir_signature = self.git_folder_signature.issubset(files)
83
83
84 if not valid_dir_signature:
84 if not valid_dir_signature:
85 raise OSError(f'{content_path} missing git signature')
85 raise OSError(f'{content_path} missing git signature')
86
86
87 self.content_path = content_path
87 self.content_path = content_path
88 self.repo_name = repo_name
88 self.repo_name = repo_name
89 self.extras = extras
89 self.extras = extras
90 self.git_path = git_path
90 self.git_path = git_path
91 self.update_server_info = update_server_info
91 self.update_server_info = update_server_info
92
92
93 def _get_fixedpath(self, path):
93 def _get_fixedpath(self, path):
94 """
94 """
95 Small fix for repo_path
95 Small fix for repo_path
96
96
97 :param path:
97 :param path:
98 """
98 """
99 path = path.split(self.repo_name, 1)[-1]
99 path = path.split(self.repo_name, 1)[-1]
100 if path.startswith('.git'):
100 if path.startswith('.git'):
101 # for bare repos we still get the .git prefix inside, we skip it
101 # for bare repos we still get the .git prefix inside, we skip it
102 # here, and remove from the service command
102 # here, and remove from the service command
103 path = path[4:]
103 path = path[4:]
104
104
105 return path.strip('/')
105 return path.strip('/')
106
106
107 def inforefs(self, request, unused_environ):
107 def inforefs(self, request, unused_environ):
108 """
108 """
109 WSGI Response producer for HTTP GET Git Smart
109 WSGI Response producer for HTTP GET Git Smart
110 HTTP /info/refs request.
110 HTTP /info/refs request.
111 """
111 """
112
112
113 git_command = request.GET.get('service')
113 git_command = request.GET.get('service')
114 if git_command not in self.commands:
114 if git_command not in self.commands:
115 log.debug('command %s not allowed', git_command)
115 log.debug('command %s not allowed', git_command)
116 return exc.HTTPForbidden()
116 return exc.HTTPForbidden()
117
117
118 # please, resist the urge to add '\n' to git capture and increment
118 # please, resist the urge to add '\n' to git capture and increment
119 # line count by 1.
119 # line count by 1.
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
121 # a part of protocol.
121 # a part of protocol.
122 # The code in Git client not only does NOT need '\n', but actually
122 # The code in Git client not only does NOT need '\n', but actually
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
124 # It reads binary, per number of bytes specified.
124 # It reads binary, per number of bytes specified.
125 # if you do add '\n' as part of data, count it.
125 # if you do add '\n' as part of data, count it.
126 server_advert = '# service=%s\n' % git_command
126 server_advert = '# service=%s\n' % git_command
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
128 try:
128 try:
129 gitenv = dict(os.environ)
129 gitenv = dict(os.environ)
130 # forget all configs
130 # forget all configs
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
133 '--advertise-refs', self.content_path]
133 '--advertise-refs', self.content_path]
134 out = subprocessio.SubprocessIOChunker(
134 out = subprocessio.SubprocessIOChunker(
135 command,
135 command,
136 env=gitenv,
136 env=gitenv,
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
138 shell=False
138 shell=False
139 )
139 )
140 except OSError:
140 except OSError:
141 log.exception('Error processing command')
141 log.exception('Error processing command')
142 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
143
143
144 resp = Response()
144 resp = Response()
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 resp.content_type = f'application/x-{git_command}-advertisement'
146 resp.charset = None
146 resp.charset = None
147 resp.app_iter = out
147 resp.app_iter = out
148
148
149 return resp
149 return resp
150
150
151 def _get_want_capabilities(self, request):
151 def _get_want_capabilities(self, request):
152 """Read the capabilities found in the first want line of the request."""
152 """Read the capabilities found in the first want line of the request."""
153 pos = request.body_file_seekable.tell()
153 pos = request.body_file_seekable.tell()
154 first_line = request.body_file_seekable.readline()
154 first_line = request.body_file_seekable.readline()
155 request.body_file_seekable.seek(pos)
155 request.body_file_seekable.seek(pos)
156
156
157 return frozenset(
157 return frozenset(
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
159
159
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
161 """
161 """
162 Construct a response with an empty PACK file.
162 Construct a response with an empty PACK file.
163
163
164 We use an empty PACK file, as that would trigger the failure of the pull
164 We use an empty PACK file, as that would trigger the failure of the pull
165 or clone command.
165 or clone command.
166
166
167 We also print in the error output a message explaining why the command
167 We also print in the error output a message explaining why the command
168 was aborted.
168 was aborted.
169
169
170 If additionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
171 of the pre-pull hook.
171 of the pre-pull hook.
172
172
173 Note that for clients not supporting side-band we just send them the
173 Note that for clients not supporting side-band we just send them the
174 emtpy PACK file.
174 emtpy PACK file.
175 """
175 """
176
176
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
178 response = []
178 response = []
179 proto = dulwich.protocol.Protocol(None, response.append)
179 proto = dulwich.protocol.Protocol(None, response.append)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
181
181
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
184 # produces a fatal error in the client:
184 # produces a fatal error in the client:
185 # fatal: error in sideband demultiplexer
185 # fatal: error in sideband demultiplexer
186 proto.write_sideband(
186 proto.write_sideband(
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
192
192
193 # writes b"0000" as default
193 # writes b"0000" as default
194 proto.write_pkt_line(None)
194 proto.write_pkt_line(None)
195
195
196 return response
196 return response
197 else:
197 else:
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
201 """
202 Given a list response we inject the post-pull messages.
202 Given a list response we inject the post-pull messages.
203
203
204 We only inject the messages if the client supports sideband, and the
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
205 response has the format:
206 0008NAK\n...0000
206 0008NAK\n...0000
207
207
208 Note that we do not check the no-progress capability as by default, git
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
209 sends it, which effectively would block all messages.
210 """
210 """
211
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
213 return response
214
214
215 if not start_message and not end_message:
215 if not start_message and not end_message:
216 return response
216 return response
217
217
218 try:
218 try:
219 iter(response)
219 iter(response)
220 # iterator probably will work, we continue
220 # iterator probably will work, we continue
221 except TypeError:
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
225
226 def injected_response():
226 def injected_response():
227
227
228 do_loop = 1
228 do_loop = 1
229 header_injected = 0
229 header_injected = 0
230 next_item = None
230 next_item = None
231 has_item = False
231 has_item = False
232 item = b''
232 item = b''
233
233
234 while do_loop:
234 while do_loop:
235
235
236 try:
236 try:
237 next_item = next(response)
237 next_item = next(response)
238 except StopIteration:
238 except StopIteration:
239 do_loop = 0
239 do_loop = 0
240
240
241 if has_item:
241 if has_item:
242 # last item ! alter it now
242 # last item ! alter it now
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
244 new_response = [item[:-4]]
244 new_response = [item[:-4]]
245 new_response.extend(self._get_messages(end_message, capabilities))
245 new_response.extend(self._get_messages(end_message, capabilities))
246 new_response.append(self.FLUSH_PACKET)
246 new_response.append(self.FLUSH_PACKET)
247 item = b''.join(new_response)
247 item = b''.join(new_response)
248
248
249 yield item
249 yield item
250
250
251 has_item = True
251 has_item = True
252 item = next_item
252 item = next_item
253
253
254 # alter item if it's the initial chunk
254 # alter item if it's the initial chunk
255 if not header_injected and item.startswith(b'0008NAK\n'):
255 if not header_injected and item.startswith(b'0008NAK\n'):
256 new_response = [b'0008NAK\n']
256 new_response = [b'0008NAK\n']
257 new_response.extend(self._get_messages(start_message, capabilities))
257 new_response.extend(self._get_messages(start_message, capabilities))
258 new_response.append(item[8:])
258 new_response.append(item[8:])
259 item = b''.join(new_response)
259 item = b''.join(new_response)
260 header_injected = 1
260 header_injected = 1
261
261
262 return injected_response()
262 return injected_response()
263
263
264 def _write_sideband_to_proto(self, proto, data, capabilities):
264 def _write_sideband_to_proto(self, proto, data, capabilities):
265 """
265 """
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
267
267
268 We do not use dulwich's write_sideband directly as it only supports
268 We do not use dulwich's write_sideband directly as it only supports
269 side-band-64k.
269 side-band-64k.
270 """
270 """
271 if not data:
271 if not data:
272 return
272 return
273
273
274 # N.B.(skreft): The values below are explained in the pack protocol
274 # N.B.(skreft): The values below are explained in the pack protocol
275 # documentation, section Packfile Data.
275 # documentation, section Packfile Data.
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
278 chunk_size = 65515
278 chunk_size = 65515
279 elif CAPABILITY_SIDE_BAND in capabilities:
279 elif CAPABILITY_SIDE_BAND in capabilities:
280 chunk_size = 995
280 chunk_size = 995
281 else:
281 else:
282 return
282 return
283
283
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
285
285
286 for chunk in chunker:
286 for chunk in chunker:
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
288
288
289 def _get_messages(self, data, capabilities):
289 def _get_messages(self, data, capabilities):
290 """Return a list with packets for sending data in sideband number 2."""
290 """Return a list with packets for sending data in sideband number 2."""
291 response = []
291 response = []
292 proto = dulwich.protocol.Protocol(None, response.append)
292 proto = dulwich.protocol.Protocol(None, response.append)
293
293
294 self._write_sideband_to_proto(proto, data, capabilities)
294 self._write_sideband_to_proto(proto, data, capabilities)
295
295
296 return response
296 return response
297
297
298 def backend(self, request, environ):
298 def backend(self, request, environ):
299 """
299 """
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
301 Reads commands and data from HTTP POST's body.
301 Reads commands and data from HTTP POST's body.
302 returns an iterator obj with contents of git command's
302 returns an iterator obj with contents of git command's
303 response to stdout
303 response to stdout
304 """
304 """
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
306 # we probably want to have the same mechanism used by mercurial and
306 # we probably want to have the same mechanism used by mercurial and
307 # simplevcs.
307 # simplevcs.
308 # For that we would need to parse the output of the command looking for
308 # For that we would need to parse the output of the command looking for
309 # some signs of the HTTPLockedError, parse the data and reraise it in
309 # some signs of the HTTPLockedError, parse the data and reraise it in
310 # pygrack. However, that would interfere with the streaming.
310 # pygrack. However, that would interfere with the streaming.
311 #
311 #
312 # Now the output of a blocked push is:
312 # Now the output of a blocked push is:
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
314 # POST git-receive-pack (1047 bytes)
314 # POST git-receive-pack (1047 bytes)
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
317 # ! [remote rejected] master -> master (pre-receive hook declined)
317 # ! [remote rejected] master -> master (pre-receive hook declined)
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
319
319
320 git_command = self._get_fixedpath(request.path_info)
320 git_command = self._get_fixedpath(request.path_info)
321 if git_command not in self.commands:
321 if git_command not in self.commands:
322 log.debug('command %s not allowed', git_command)
322 log.debug('command %s not allowed', git_command)
323 return exc.HTTPForbidden()
323 return exc.HTTPForbidden()
324
324
325 capabilities = None
325 capabilities = None
326 if git_command == 'git-upload-pack':
326 if git_command == 'git-upload-pack':
327 capabilities = self._get_want_capabilities(request)
327 capabilities = self._get_want_capabilities(request)
328
328
329 if 'CONTENT_LENGTH' in environ:
329 if 'CONTENT_LENGTH' in environ:
330 inputstream = FileWrapper(request.body_file_seekable,
330 inputstream = FileWrapper(request.body_file_seekable,
331 request.content_length)
331 request.content_length)
332 else:
332 else:
333 inputstream = request.body_file_seekable
333 inputstream = request.body_file_seekable
334
334
335 resp = Response()
335 resp = Response()
336 resp.content_type = f'application/x-{git_command}-result'
336 resp.content_type = f'application/x-{git_command}-result'
337 resp.charset = None
337 resp.charset = None
338
338
339 pre_pull_messages = ''
339 pre_pull_messages = ''
340 # Upload-pack == clone
340 # Upload-pack == clone
341 if git_command == 'git-upload-pack':
341 if git_command == 'git-upload-pack':
342 hook_response = hooks.git_pre_pull(self.extras)
342 hook_response = hooks.git_pre_pull(self.extras)
343 if hook_response.status != 0:
343 if hook_response.status != 0:
344 pre_pull_messages = hook_response.output
344 pre_pull_messages = hook_response.output
345 resp.app_iter = self._build_failed_pre_pull_response(
345 resp.app_iter = self._build_failed_pre_pull_response(
346 capabilities, pre_pull_messages)
346 capabilities, pre_pull_messages)
347 return resp
347 return resp
348
348
349 gitenv = dict(os.environ)
349 gitenv = dict(os.environ)
350 # forget all configs
350 # forget all configs
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
354 self.content_path]
354 self.content_path]
355 log.debug('handling cmd %s', cmd)
355 log.debug('handling cmd %s', cmd)
356
356
357 out = subprocessio.SubprocessIOChunker(
357 out = subprocessio.SubprocessIOChunker(
358 cmd,
358 cmd,
359 input_stream=inputstream,
359 input_stream=inputstream,
360 env=gitenv,
360 env=gitenv,
361 cwd=self.content_path,
361 cwd=self.content_path,
362 shell=False,
362 shell=False,
363 fail_on_stderr=False,
363 fail_on_stderr=False,
364 fail_on_return_code=False
364 fail_on_return_code=False
365 )
365 )
366
366
367 if self.update_server_info and git_command == 'git-receive-pack':
367 if self.update_server_info and git_command == 'git-receive-pack':
368 # We need to fully consume the iterator here, as the
368 # We need to fully consume the iterator here, as the
369 # update-server-info command needs to be run after the push.
369 # update-server-info command needs to be run after the push.
370 out = list(out)
370 out = list(out)
371
371
372 # Updating refs manually after each push.
372 # Updating refs manually after each push.
373 # This is required as some clients are exposing Git repos internally
373 # This is required as some clients are exposing Git repos internally
374 # with the dumb protocol.
374 # with the dumb protocol.
375 cmd = [self.git_path, 'update-server-info']
375 cmd = [self.git_path, 'update-server-info']
376 log.debug('handling cmd %s', cmd)
376 log.debug('handling cmd %s', cmd)
377 output = subprocessio.SubprocessIOChunker(
377 output = subprocessio.SubprocessIOChunker(
378 cmd,
378 cmd,
379 input_stream=inputstream,
379 input_stream=inputstream,
380 env=gitenv,
380 env=gitenv,
381 cwd=self.content_path,
381 cwd=self.content_path,
382 shell=False,
382 shell=False,
383 fail_on_stderr=False,
383 fail_on_stderr=False,
384 fail_on_return_code=False
384 fail_on_return_code=False
385 )
385 )
386 # Consume all the output so the subprocess finishes
386 # Consume all the output so the subprocess finishes
387 for _ in output:
387 for _ in output:
388 pass
388 pass
389
389
390 # Upload-pack == clone
390 # Upload-pack == clone
391 if git_command == 'git-upload-pack':
391 if git_command == 'git-upload-pack':
392 hook_response = hooks.git_post_pull(self.extras)
392 hook_response = hooks.git_post_pull(self.extras)
393 post_pull_messages = hook_response.output
393 post_pull_messages = hook_response.output
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
395 else:
395 else:
396 resp.app_iter = out
396 resp.app_iter = out
397
397
398 return resp
398 return resp
399
399
400 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
401 request = Request(environ)
401 request = Request(environ)
402 _path = self._get_fixedpath(request.path_info)
402 _path = self._get_fixedpath(request.path_info)
403 if _path.startswith('info/refs'):
403 if _path.startswith('info/refs'):
404 app = self.inforefs
404 app = self.inforefs
405 else:
405 else:
406 app = self.backend
406 app = self.backend
407
407
408 try:
408 try:
409 resp = app(request, environ)
409 resp = app(request, environ)
410 except exc.HTTPException as error:
410 except exc.HTTPException as error:
411 log.exception('HTTP Error')
411 log.exception('HTTP Error')
412 resp = error
412 resp = error
413 except Exception:
413 except Exception:
414 log.exception('Unknown error')
414 log.exception('Unknown error')
415 resp = exc.HTTPInternalServerError()
415 resp = exc.HTTPInternalServerError()
416
416
417 return resp(environ, start_response)
417 return resp(environ, start_response)
@@ -1,17 +1,17 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
@@ -1,1463 +1,1463 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib.request
24 import urllib.request
25 import urllib.parse
25 import urllib.parse
26 import urllib.error
26 import urllib.error
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54 HEAD_MARKER = b'HEAD'
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception:
72 except Exception:
73 # NOTE(marcink): because of how dulwich handles some exceptions
73 # NOTE(marcink): because of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 repo = Repository(safe_bytes(wire['path']))
110 repo = Repository(safe_bytes(wire['path']))
111 else:
111 else:
112 # dulwich mode
112 # dulwich mode
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo = Repo(repo_path)
114 repo = Repo(repo_path)
115
115
116 log.debug('repository created: got GIT object: %s', repo)
116 log.debug('repository created: got GIT object: %s', repo)
117 return repo
117 return repo
118
118
119 def repo(self, wire, create=False, use_libgit2=False):
119 def repo(self, wire, create=False, use_libgit2=False):
120 """
120 """
121 Get a repository instance for the given path.
121 Get a repository instance for the given path.
122 """
122 """
123 return self._create_repo(wire, create, use_libgit2)
123 return self._create_repo(wire, create, use_libgit2)
124
124
125 def repo_libgit2(self, wire):
125 def repo_libgit2(self, wire):
126 return self.repo(wire, use_libgit2=True)
126 return self.repo(wire, use_libgit2=True)
127
127
128
128
129 def create_signature_from_string(author_str, **kwargs):
129 def create_signature_from_string(author_str, **kwargs):
130 """
130 """
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
132
132
133 :param author_str: String of the format 'Name <email>'
133 :param author_str: String of the format 'Name <email>'
134 :return: pygit2.Signature object
134 :return: pygit2.Signature object
135 """
135 """
136 match = re.match(r'^(.+) <(.+)>$', author_str)
136 match = re.match(r'^(.+) <(.+)>$', author_str)
137 if match is None:
137 if match is None:
138 raise ValueError(f"Invalid format: {author_str}")
138 raise ValueError(f"Invalid format: {author_str}")
139
139
140 name, email = match.groups()
140 name, email = match.groups()
141 return pygit2.Signature(name, email, **kwargs)
141 return pygit2.Signature(name, email, **kwargs)
142
142
143
143
144 def get_obfuscated_url(url_obj):
144 def get_obfuscated_url(url_obj):
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
146 url_obj.query = obfuscate_qs(url_obj.query)
146 url_obj.query = obfuscate_qs(url_obj.query)
147 obfuscated_uri = str(url_obj)
147 obfuscated_uri = str(url_obj)
148 return obfuscated_uri
148 return obfuscated_uri
149
149
150
150
151 class GitRemote(RemoteBase):
151 class GitRemote(RemoteBase):
152
152
153 def __init__(self, factory):
153 def __init__(self, factory):
154 self._factory = factory
154 self._factory = factory
155 self._bulk_methods = {
155 self._bulk_methods = {
156 "date": self.date,
156 "date": self.date,
157 "author": self.author,
157 "author": self.author,
158 "branch": self.branch,
158 "branch": self.branch,
159 "message": self.message,
159 "message": self.message,
160 "parents": self.parents,
160 "parents": self.parents,
161 "_commit": self.revision,
161 "_commit": self.revision,
162 }
162 }
163 self._bulk_file_methods = {
163 self._bulk_file_methods = {
164 "size": self.get_node_size,
164 "size": self.get_node_size,
165 "data": self.get_node_data,
165 "data": self.get_node_data,
166 "flags": self.get_node_flags,
166 "flags": self.get_node_flags,
167 "is_binary": self.get_node_is_binary,
167 "is_binary": self.get_node_is_binary,
168 "md5": self.md5_hash
168 "md5": self.md5_hash
169 }
169 }
170
170
171 def _wire_to_config(self, wire):
171 def _wire_to_config(self, wire):
172 if 'config' in wire:
172 if 'config' in wire:
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
174 return {}
174 return {}
175
175
176 def _remote_conf(self, config):
176 def _remote_conf(self, config):
177 params = [
177 params = [
178 '-c', 'core.askpass=""',
178 '-c', 'core.askpass=""',
179 ]
179 ]
180 ssl_cert_dir = config.get('vcs_ssl_dir')
180 ssl_cert_dir = config.get('vcs_ssl_dir')
181 if ssl_cert_dir:
181 if ssl_cert_dir:
182 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
182 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
183 return params
183 return params
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def discover_git_version(self):
186 def discover_git_version(self):
187 stdout, _ = self.run_git_command(
187 stdout, _ = self.run_git_command(
188 {}, ['--version'], _bare=True, _safe=True)
188 {}, ['--version'], _bare=True, _safe=True)
189 prefix = b'git version'
189 prefix = b'git version'
190 if stdout.startswith(prefix):
190 if stdout.startswith(prefix):
191 stdout = stdout[len(prefix):]
191 stdout = stdout[len(prefix):]
192 return safe_str(stdout.strip())
192 return safe_str(stdout.strip())
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def is_empty(self, wire):
195 def is_empty(self, wire):
196 repo_init = self._factory.repo_libgit2(wire)
196 repo_init = self._factory.repo_libgit2(wire)
197 with repo_init as repo:
197 with repo_init as repo:
198
198
199 try:
199 try:
200 has_head = repo.head.name
200 has_head = repo.head.name
201 if has_head:
201 if has_head:
202 return False
202 return False
203
203
204 # NOTE(marcink): check again using more expensive method
204 # NOTE(marcink): check again using more expensive method
205 return repo.is_empty
205 return repo.is_empty
206 except Exception:
206 except Exception:
207 pass
207 pass
208
208
209 return True
209 return True
210
210
211 @reraise_safe_exceptions
211 @reraise_safe_exceptions
212 def assert_correct_path(self, wire):
212 def assert_correct_path(self, wire):
213 cache_on, context_uid, repo_id = self._cache_on(wire)
213 cache_on, context_uid, repo_id = self._cache_on(wire)
214 region = self._region(wire)
214 region = self._region(wire)
215
215
216 @region.conditional_cache_on_arguments(condition=cache_on)
216 @region.conditional_cache_on_arguments(condition=cache_on)
217 def _assert_correct_path(_context_uid, _repo_id, fast_check):
217 def _assert_correct_path(_context_uid, _repo_id, fast_check):
218 if fast_check:
218 if fast_check:
219 path = safe_str(wire['path'])
219 path = safe_str(wire['path'])
220 if pygit2.discover_repository(path):
220 if pygit2.discover_repository(path):
221 return True
221 return True
222 return False
222 return False
223 else:
223 else:
224 try:
224 try:
225 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
226 with repo_init:
226 with repo_init:
227 pass
227 pass
228 except pygit2.GitError:
228 except pygit2.GitError:
229 path = wire.get('path')
229 path = wire.get('path')
230 tb = traceback.format_exc()
230 tb = traceback.format_exc()
231 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
231 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
232 return False
232 return False
233 return True
233 return True
234
234
235 return _assert_correct_path(context_uid, repo_id, True)
235 return _assert_correct_path(context_uid, repo_id, True)
236
236
237 @reraise_safe_exceptions
237 @reraise_safe_exceptions
238 def bare(self, wire):
238 def bare(self, wire):
239 repo_init = self._factory.repo_libgit2(wire)
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
240 with repo_init as repo:
241 return repo.is_bare
241 return repo.is_bare
242
242
243 @reraise_safe_exceptions
243 @reraise_safe_exceptions
244 def get_node_data(self, wire, commit_id, path):
244 def get_node_data(self, wire, commit_id, path):
245 repo_init = self._factory.repo_libgit2(wire)
245 repo_init = self._factory.repo_libgit2(wire)
246 with repo_init as repo:
246 with repo_init as repo:
247 commit = repo[commit_id]
247 commit = repo[commit_id]
248 blob_obj = commit.tree[path]
248 blob_obj = commit.tree[path]
249
249
250 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
250 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
251 raise exceptions.LookupException()(
251 raise exceptions.LookupException()(
252 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
252 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
253
253
254 return BytesEnvelope(blob_obj.data)
254 return BytesEnvelope(blob_obj.data)
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def get_node_size(self, wire, commit_id, path):
257 def get_node_size(self, wire, commit_id, path):
258 repo_init = self._factory.repo_libgit2(wire)
258 repo_init = self._factory.repo_libgit2(wire)
259 with repo_init as repo:
259 with repo_init as repo:
260 commit = repo[commit_id]
260 commit = repo[commit_id]
261 blob_obj = commit.tree[path]
261 blob_obj = commit.tree[path]
262
262
263 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
263 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
264 raise exceptions.LookupException()(
264 raise exceptions.LookupException()(
265 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
265 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
266
266
267 return blob_obj.size
267 return blob_obj.size
268
268
269 @reraise_safe_exceptions
269 @reraise_safe_exceptions
270 def get_node_flags(self, wire, commit_id, path):
270 def get_node_flags(self, wire, commit_id, path):
271 repo_init = self._factory.repo_libgit2(wire)
271 repo_init = self._factory.repo_libgit2(wire)
272 with repo_init as repo:
272 with repo_init as repo:
273 commit = repo[commit_id]
273 commit = repo[commit_id]
274 blob_obj = commit.tree[path]
274 blob_obj = commit.tree[path]
275
275
276 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
276 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
277 raise exceptions.LookupException()(
277 raise exceptions.LookupException()(
278 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
278 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
279
279
280 return blob_obj.filemode
280 return blob_obj.filemode
281
281
282 @reraise_safe_exceptions
282 @reraise_safe_exceptions
283 def get_node_is_binary(self, wire, commit_id, path):
283 def get_node_is_binary(self, wire, commit_id, path):
284 repo_init = self._factory.repo_libgit2(wire)
284 repo_init = self._factory.repo_libgit2(wire)
285 with repo_init as repo:
285 with repo_init as repo:
286 commit = repo[commit_id]
286 commit = repo[commit_id]
287 blob_obj = commit.tree[path]
287 blob_obj = commit.tree[path]
288
288
289 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
289 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
290 raise exceptions.LookupException()(
290 raise exceptions.LookupException()(
291 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
291 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
292
292
293 return blob_obj.is_binary
293 return blob_obj.is_binary
294
294
295 @reraise_safe_exceptions
295 @reraise_safe_exceptions
296 def blob_as_pretty_string(self, wire, sha):
296 def blob_as_pretty_string(self, wire, sha):
297 repo_init = self._factory.repo_libgit2(wire)
297 repo_init = self._factory.repo_libgit2(wire)
298 with repo_init as repo:
298 with repo_init as repo:
299 blob_obj = repo[sha]
299 blob_obj = repo[sha]
300 return BytesEnvelope(blob_obj.data)
300 return BytesEnvelope(blob_obj.data)
301
301
302 @reraise_safe_exceptions
302 @reraise_safe_exceptions
303 def blob_raw_length(self, wire, sha):
303 def blob_raw_length(self, wire, sha):
304 cache_on, context_uid, repo_id = self._cache_on(wire)
304 cache_on, context_uid, repo_id = self._cache_on(wire)
305 region = self._region(wire)
305 region = self._region(wire)
306
306
307 @region.conditional_cache_on_arguments(condition=cache_on)
307 @region.conditional_cache_on_arguments(condition=cache_on)
308 def _blob_raw_length(_repo_id, _sha):
308 def _blob_raw_length(_repo_id, _sha):
309
309
310 repo_init = self._factory.repo_libgit2(wire)
310 repo_init = self._factory.repo_libgit2(wire)
311 with repo_init as repo:
311 with repo_init as repo:
312 blob = repo[sha]
312 blob = repo[sha]
313 return blob.size
313 return blob.size
314
314
315 return _blob_raw_length(repo_id, sha)
315 return _blob_raw_length(repo_id, sha)
316
316
317 def _parse_lfs_pointer(self, raw_content):
317 def _parse_lfs_pointer(self, raw_content):
318 spec_string = b'version https://git-lfs.github.com/spec'
318 spec_string = b'version https://git-lfs.github.com/spec'
319 if raw_content and raw_content.startswith(spec_string):
319 if raw_content and raw_content.startswith(spec_string):
320
320
321 pattern = re.compile(rb"""
321 pattern = re.compile(rb"""
322 (?:\n)?
322 (?:\n)?
323 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
323 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
324 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
324 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
325 ^size[ ](?P<oid_size>[0-9]+)\n
325 ^size[ ](?P<oid_size>[0-9]+)\n
326 (?:\n)?
326 (?:\n)?
327 """, re.VERBOSE | re.MULTILINE)
327 """, re.VERBOSE | re.MULTILINE)
328 match = pattern.match(raw_content)
328 match = pattern.match(raw_content)
329 if match:
329 if match:
330 return match.groupdict()
330 return match.groupdict()
331
331
332 return {}
332 return {}
333
333
334 @reraise_safe_exceptions
334 @reraise_safe_exceptions
335 def is_large_file(self, wire, commit_id):
335 def is_large_file(self, wire, commit_id):
336 cache_on, context_uid, repo_id = self._cache_on(wire)
336 cache_on, context_uid, repo_id = self._cache_on(wire)
337 region = self._region(wire)
337 region = self._region(wire)
338
338
339 @region.conditional_cache_on_arguments(condition=cache_on)
339 @region.conditional_cache_on_arguments(condition=cache_on)
340 def _is_large_file(_repo_id, _sha):
340 def _is_large_file(_repo_id, _sha):
341 repo_init = self._factory.repo_libgit2(wire)
341 repo_init = self._factory.repo_libgit2(wire)
342 with repo_init as repo:
342 with repo_init as repo:
343 blob = repo[commit_id]
343 blob = repo[commit_id]
344 if blob.is_binary:
344 if blob.is_binary:
345 return {}
345 return {}
346
346
347 return self._parse_lfs_pointer(blob.data)
347 return self._parse_lfs_pointer(blob.data)
348
348
349 return _is_large_file(repo_id, commit_id)
349 return _is_large_file(repo_id, commit_id)
350
350
351 @reraise_safe_exceptions
351 @reraise_safe_exceptions
352 def is_binary(self, wire, tree_id):
352 def is_binary(self, wire, tree_id):
353 cache_on, context_uid, repo_id = self._cache_on(wire)
353 cache_on, context_uid, repo_id = self._cache_on(wire)
354 region = self._region(wire)
354 region = self._region(wire)
355
355
356 @region.conditional_cache_on_arguments(condition=cache_on)
356 @region.conditional_cache_on_arguments(condition=cache_on)
357 def _is_binary(_repo_id, _tree_id):
357 def _is_binary(_repo_id, _tree_id):
358 repo_init = self._factory.repo_libgit2(wire)
358 repo_init = self._factory.repo_libgit2(wire)
359 with repo_init as repo:
359 with repo_init as repo:
360 blob_obj = repo[tree_id]
360 blob_obj = repo[tree_id]
361 return blob_obj.is_binary
361 return blob_obj.is_binary
362
362
363 return _is_binary(repo_id, tree_id)
363 return _is_binary(repo_id, tree_id)
364
364
365 @reraise_safe_exceptions
365 @reraise_safe_exceptions
366 def md5_hash(self, wire, commit_id, path):
366 def md5_hash(self, wire, commit_id, path):
367 cache_on, context_uid, repo_id = self._cache_on(wire)
367 cache_on, context_uid, repo_id = self._cache_on(wire)
368 region = self._region(wire)
368 region = self._region(wire)
369
369
370 @region.conditional_cache_on_arguments(condition=cache_on)
370 @region.conditional_cache_on_arguments(condition=cache_on)
371 def _md5_hash(_repo_id, _commit_id, _path):
371 def _md5_hash(_repo_id, _commit_id, _path):
372 repo_init = self._factory.repo_libgit2(wire)
372 repo_init = self._factory.repo_libgit2(wire)
373 with repo_init as repo:
373 with repo_init as repo:
374 commit = repo[_commit_id]
374 commit = repo[_commit_id]
375 blob_obj = commit.tree[_path]
375 blob_obj = commit.tree[_path]
376
376
377 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
377 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
378 raise exceptions.LookupException()(
378 raise exceptions.LookupException()(
379 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
379 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
380
380
381 return ''
381 return ''
382
382
383 return _md5_hash(repo_id, commit_id, path)
383 return _md5_hash(repo_id, commit_id, path)
384
384
385 @reraise_safe_exceptions
385 @reraise_safe_exceptions
386 def in_largefiles_store(self, wire, oid):
386 def in_largefiles_store(self, wire, oid):
387 conf = self._wire_to_config(wire)
387 conf = self._wire_to_config(wire)
388 repo_init = self._factory.repo_libgit2(wire)
388 repo_init = self._factory.repo_libgit2(wire)
389 with repo_init as repo:
389 with repo_init as repo:
390 repo_name = repo.path
390 repo_name = repo.path
391
391
392 store_location = conf.get('vcs_git_lfs_store_location')
392 store_location = conf.get('vcs_git_lfs_store_location')
393 if store_location:
393 if store_location:
394
394
395 store = LFSOidStore(
395 store = LFSOidStore(
396 oid=oid, repo=repo_name, store_location=store_location)
396 oid=oid, repo=repo_name, store_location=store_location)
397 return store.has_oid()
397 return store.has_oid()
398
398
399 return False
399 return False
400
400
401 @reraise_safe_exceptions
401 @reraise_safe_exceptions
402 def store_path(self, wire, oid):
402 def store_path(self, wire, oid):
403 conf = self._wire_to_config(wire)
403 conf = self._wire_to_config(wire)
404 repo_init = self._factory.repo_libgit2(wire)
404 repo_init = self._factory.repo_libgit2(wire)
405 with repo_init as repo:
405 with repo_init as repo:
406 repo_name = repo.path
406 repo_name = repo.path
407
407
408 store_location = conf.get('vcs_git_lfs_store_location')
408 store_location = conf.get('vcs_git_lfs_store_location')
409 if store_location:
409 if store_location:
410 store = LFSOidStore(
410 store = LFSOidStore(
411 oid=oid, repo=repo_name, store_location=store_location)
411 oid=oid, repo=repo_name, store_location=store_location)
412 return store.oid_path
412 return store.oid_path
413 raise ValueError(f'Unable to fetch oid with path {oid}')
413 raise ValueError(f'Unable to fetch oid with path {oid}')
414
414
415 @reraise_safe_exceptions
415 @reraise_safe_exceptions
416 def bulk_request(self, wire, rev, pre_load):
416 def bulk_request(self, wire, rev, pre_load):
417 cache_on, context_uid, repo_id = self._cache_on(wire)
417 cache_on, context_uid, repo_id = self._cache_on(wire)
418 region = self._region(wire)
418 region = self._region(wire)
419
419
420 @region.conditional_cache_on_arguments(condition=cache_on)
420 @region.conditional_cache_on_arguments(condition=cache_on)
421 def _bulk_request(_repo_id, _rev, _pre_load):
421 def _bulk_request(_repo_id, _rev, _pre_load):
422 result = {}
422 result = {}
423 for attr in pre_load:
423 for attr in pre_load:
424 try:
424 try:
425 method = self._bulk_methods[attr]
425 method = self._bulk_methods[attr]
426 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
426 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
427 args = [wire, rev]
427 args = [wire, rev]
428 result[attr] = method(*args)
428 result[attr] = method(*args)
429 except KeyError as e:
429 except KeyError as e:
430 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
430 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
431 return result
431 return result
432
432
433 return _bulk_request(repo_id, rev, sorted(pre_load))
433 return _bulk_request(repo_id, rev, sorted(pre_load))
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def bulk_file_request(self, wire, commit_id, path, pre_load):
436 def bulk_file_request(self, wire, commit_id, path, pre_load):
437 cache_on, context_uid, repo_id = self._cache_on(wire)
437 cache_on, context_uid, repo_id = self._cache_on(wire)
438 region = self._region(wire)
438 region = self._region(wire)
439
439
440 @region.conditional_cache_on_arguments(condition=cache_on)
440 @region.conditional_cache_on_arguments(condition=cache_on)
441 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
441 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
442 result = {}
442 result = {}
443 for attr in pre_load:
443 for attr in pre_load:
444 try:
444 try:
445 method = self._bulk_file_methods[attr]
445 method = self._bulk_file_methods[attr]
446 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
446 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
447 result[attr] = method(wire, _commit_id, _path)
447 result[attr] = method(wire, _commit_id, _path)
448 except KeyError as e:
448 except KeyError as e:
449 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
449 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
450 return BinaryEnvelope(result)
450 return BinaryEnvelope(result)
451
451
452 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
452 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
453
453
454 def _build_opener(self, url: str):
454 def _build_opener(self, url: str):
455 handlers = []
455 handlers = []
456 url_obj = url_parser(safe_bytes(url))
456 url_obj = url_parser(safe_bytes(url))
457 authinfo = url_obj.authinfo()[1]
457 authinfo = url_obj.authinfo()[1]
458
458
459 if authinfo:
459 if authinfo:
460 # create a password manager
460 # create a password manager
461 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
461 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
462 passmgr.add_password(*authinfo)
462 passmgr.add_password(*authinfo)
463
463
464 handlers.extend((httpbasicauthhandler(passmgr),
464 handlers.extend((httpbasicauthhandler(passmgr),
465 httpdigestauthhandler(passmgr)))
465 httpdigestauthhandler(passmgr)))
466
466
467 return urllib.request.build_opener(*handlers)
467 return urllib.request.build_opener(*handlers)
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def check_url(self, url, config):
470 def check_url(self, url, config):
471 url_obj = url_parser(safe_bytes(url))
471 url_obj = url_parser(safe_bytes(url))
472
472
473 test_uri = safe_str(url_obj.authinfo()[0])
473 test_uri = safe_str(url_obj.authinfo()[0])
474 obfuscated_uri = get_obfuscated_url(url_obj)
474 obfuscated_uri = get_obfuscated_url(url_obj)
475
475
476 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
476 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
477
477
478 if not test_uri.endswith('info/refs'):
478 if not test_uri.endswith('info/refs'):
479 test_uri = test_uri.rstrip('/') + '/info/refs'
479 test_uri = test_uri.rstrip('/') + '/info/refs'
480
480
481 o = self._build_opener(test_uri)
481 o = self._build_opener(test_uri)
482 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
482 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
483
483
484 q = {"service": 'git-upload-pack'}
484 q = {"service": 'git-upload-pack'}
485 qs = '?%s' % urllib.parse.urlencode(q)
485 qs = '?%s' % urllib.parse.urlencode(q)
486 cu = "{}{}".format(test_uri, qs)
486 cu = "{}{}".format(test_uri, qs)
487 req = urllib.request.Request(cu, None, {})
487 req = urllib.request.Request(cu, None, {})
488
488
489 try:
489 try:
490 log.debug("Trying to open URL %s", obfuscated_uri)
490 log.debug("Trying to open URL %s", obfuscated_uri)
491 resp = o.open(req)
491 resp = o.open(req)
492 if resp.code != 200:
492 if resp.code != 200:
493 raise exceptions.URLError()('Return Code is not 200')
493 raise exceptions.URLError()('Return Code is not 200')
494 except Exception as e:
494 except Exception as e:
495 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
495 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
496 # means it cannot be cloned
496 # means it cannot be cloned
497 raise exceptions.URLError(e)("[{}] org_exc: {}".format(obfuscated_uri, e))
497 raise exceptions.URLError(e)("[{}] org_exc: {}".format(obfuscated_uri, e))
498
498
499 # now detect if it's proper git repo
499 # now detect if it's proper git repo
500 gitdata: bytes = resp.read()
500 gitdata: bytes = resp.read()
501
501
502 if b'service=git-upload-pack' in gitdata:
502 if b'service=git-upload-pack' in gitdata:
503 pass
503 pass
504 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
504 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
505 # old style git can return some other format !
505 # old style git can return some other format !
506 pass
506 pass
507 else:
507 else:
508 e = None
508 e = None
509 raise exceptions.URLError(e)(
509 raise exceptions.URLError(e)(
510 "url [%s] does not look like an hg repo org_exc: %s"
510 "url [%s] does not look like an hg repo org_exc: %s"
511 % (obfuscated_uri, e))
511 % (obfuscated_uri, e))
512
512
513 return True
513 return True
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
518 remote_refs = self.pull(wire, url, apply_refs=False)
518 remote_refs = self.pull(wire, url, apply_refs=False)
519 repo = self._factory.repo(wire)
519 repo = self._factory.repo(wire)
520 if isinstance(valid_refs, list):
520 if isinstance(valid_refs, list):
521 valid_refs = tuple(valid_refs)
521 valid_refs = tuple(valid_refs)
522
522
523 for k in remote_refs:
523 for k in remote_refs:
524 # only parse heads/tags and skip so called deferred tags
524 # only parse heads/tags and skip so called deferred tags
525 if k.startswith(valid_refs) and not k.endswith(deferred):
525 if k.startswith(valid_refs) and not k.endswith(deferred):
526 repo[k] = remote_refs[k]
526 repo[k] = remote_refs[k]
527
527
528 if update_after_clone:
528 if update_after_clone:
529 # we want to checkout HEAD
529 # we want to checkout HEAD
530 repo["HEAD"] = remote_refs["HEAD"]
530 repo["HEAD"] = remote_refs["HEAD"]
531 index.build_index_from_tree(repo.path, repo.index_path(),
531 index.build_index_from_tree(repo.path, repo.index_path(),
532 repo.object_store, repo["HEAD"].tree)
532 repo.object_store, repo["HEAD"].tree)
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def branch(self, wire, commit_id):
535 def branch(self, wire, commit_id):
536 cache_on, context_uid, repo_id = self._cache_on(wire)
536 cache_on, context_uid, repo_id = self._cache_on(wire)
537 region = self._region(wire)
537 region = self._region(wire)
538
538
539 @region.conditional_cache_on_arguments(condition=cache_on)
539 @region.conditional_cache_on_arguments(condition=cache_on)
540 def _branch(_context_uid, _repo_id, _commit_id):
540 def _branch(_context_uid, _repo_id, _commit_id):
541 regex = re.compile('^refs/heads')
541 regex = re.compile('^refs/heads')
542
542
543 def filter_with(ref):
543 def filter_with(ref):
544 return regex.match(ref[0]) and ref[1] == _commit_id
544 return regex.match(ref[0]) and ref[1] == _commit_id
545
545
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
547 return [x[0].split('refs/heads/')[-1] for x in branches]
547 return [x[0].split('refs/heads/')[-1] for x in branches]
548
548
549 return _branch(context_uid, repo_id, commit_id)
549 return _branch(context_uid, repo_id, commit_id)
550
550
551 @reraise_safe_exceptions
551 @reraise_safe_exceptions
552 def commit_branches(self, wire, commit_id):
552 def commit_branches(self, wire, commit_id):
553 cache_on, context_uid, repo_id = self._cache_on(wire)
553 cache_on, context_uid, repo_id = self._cache_on(wire)
554 region = self._region(wire)
554 region = self._region(wire)
555
555
556 @region.conditional_cache_on_arguments(condition=cache_on)
556 @region.conditional_cache_on_arguments(condition=cache_on)
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
558 repo_init = self._factory.repo_libgit2(wire)
558 repo_init = self._factory.repo_libgit2(wire)
559 with repo_init as repo:
559 with repo_init as repo:
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
561 return branches
561 return branches
562
562
563 return _commit_branches(context_uid, repo_id, commit_id)
563 return _commit_branches(context_uid, repo_id, commit_id)
564
564
565 @reraise_safe_exceptions
565 @reraise_safe_exceptions
566 def add_object(self, wire, content):
566 def add_object(self, wire, content):
567 repo_init = self._factory.repo_libgit2(wire)
567 repo_init = self._factory.repo_libgit2(wire)
568 with repo_init as repo:
568 with repo_init as repo:
569 blob = objects.Blob()
569 blob = objects.Blob()
570 blob.set_raw_string(content)
570 blob.set_raw_string(content)
571 repo.object_store.add_object(blob)
571 repo.object_store.add_object(blob)
572 return blob.id
572 return blob.id
573
573
574 @reraise_safe_exceptions
574 @reraise_safe_exceptions
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id, date_args: list[int, int] = None):
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id, date_args: list[int, int] = None):
576 repo_init = self._factory.repo_libgit2(wire)
576 repo_init = self._factory.repo_libgit2(wire)
577 with repo_init as repo:
577 with repo_init as repo:
578
578
579 if date_args:
579 if date_args:
580 current_time, offset = date_args
580 current_time, offset = date_args
581
581
582 kw = {
582 kw = {
583 'time': current_time,
583 'time': current_time,
584 'offset': offset
584 'offset': offset
585 }
585 }
586 author = create_signature_from_string(author, **kw)
586 author = create_signature_from_string(author, **kw)
587 committer = create_signature_from_string(committer, **kw)
587 committer = create_signature_from_string(committer, **kw)
588
588
589 tree = new_tree_id
589 tree = new_tree_id
590 if isinstance(tree, (bytes, str)):
590 if isinstance(tree, (bytes, str)):
591 # validate this tree is in the repo...
591 # validate this tree is in the repo...
592 tree = repo[safe_str(tree)].id
592 tree = repo[safe_str(tree)].id
593
593
594 parents = []
594 parents = []
595 # ensure we COMMIT on top of given branch head
595 # ensure we COMMIT on top of given branch head
596 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
596 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
597 if branch in repo.branches.local:
597 if branch in repo.branches.local:
598 parents += [repo.branches[branch].target]
598 parents += [repo.branches[branch].target]
599 elif [x for x in repo.branches.local]:
599 elif [x for x in repo.branches.local]:
600 parents += [repo.head.target]
600 parents += [repo.head.target]
601 #else:
601 #else:
602 # in case we want to commit on new branch we create it on top of HEAD
602 # in case we want to commit on new branch we create it on top of HEAD
603 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
603 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
604
604
605 # # Create a new commit
605 # # Create a new commit
606 commit_oid = repo.create_commit(
606 commit_oid = repo.create_commit(
607 f'refs/heads/{branch}', # the name of the reference to update
607 f'refs/heads/{branch}', # the name of the reference to update
608 author, # the author of the commit
608 author, # the author of the commit
609 committer, # the committer of the commit
609 committer, # the committer of the commit
610 message, # the commit message
610 message, # the commit message
611 tree, # the tree produced by the index
611 tree, # the tree produced by the index
612 parents # list of parents for the new commit, usually just one,
612 parents # list of parents for the new commit, usually just one,
613 )
613 )
614
614
615 new_commit_id = safe_str(commit_oid)
615 new_commit_id = safe_str(commit_oid)
616
616
617 return new_commit_id
617 return new_commit_id
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
620 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
621
621
622 def mode2pygit(mode):
622 def mode2pygit(mode):
623 """
623 """
624 git only supports two filemode 644 and 755
624 git only supports two filemode 644 and 755
625
625
626 0o100755 -> 33261
626 0o100755 -> 33261
627 0o100644 -> 33188
627 0o100644 -> 33188
628 """
628 """
629 return {
629 return {
630 0o100644: pygit2.GIT_FILEMODE_BLOB,
630 0o100644: pygit2.GIT_FILEMODE_BLOB,
631 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
631 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
632 0o120000: pygit2.GIT_FILEMODE_LINK
632 0o120000: pygit2.GIT_FILEMODE_LINK
633 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
633 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
634
634
635 repo_init = self._factory.repo_libgit2(wire)
635 repo_init = self._factory.repo_libgit2(wire)
636 with repo_init as repo:
636 with repo_init as repo:
637 repo_index = repo.index
637 repo_index = repo.index
638
638
639 for pathspec in updated:
639 for pathspec in updated:
640 blob_id = repo.create_blob(pathspec['content'])
640 blob_id = repo.create_blob(pathspec['content'])
641 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
641 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
642 repo_index.add(ie)
642 repo_index.add(ie)
643
643
644 for pathspec in removed:
644 for pathspec in removed:
645 repo_index.remove(pathspec)
645 repo_index.remove(pathspec)
646
646
647 # Write changes to the index
647 # Write changes to the index
648 repo_index.write()
648 repo_index.write()
649
649
650 # Create a tree from the updated index
650 # Create a tree from the updated index
651 commit_tree = repo_index.write_tree()
651 commit_tree = repo_index.write_tree()
652
652
653 new_tree_id = commit_tree
653 new_tree_id = commit_tree
654
654
655 author = commit_data['author']
655 author = commit_data['author']
656 committer = commit_data['committer']
656 committer = commit_data['committer']
657 message = commit_data['message']
657 message = commit_data['message']
658
658
659 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
659 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
660
660
661 new_commit_id = self.create_commit(wire, author, committer, message, branch,
661 new_commit_id = self.create_commit(wire, author, committer, message, branch,
662 new_tree_id, date_args=date_args)
662 new_tree_id, date_args=date_args)
663
663
664 # libgit2, ensure the branch is there and exists
664 # libgit2, ensure the branch is there and exists
665 self.create_branch(wire, branch, new_commit_id)
665 self.create_branch(wire, branch, new_commit_id)
666
666
667 # libgit2, set new ref to this created commit
667 # libgit2, set new ref to this created commit
668 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
668 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
669
669
670 return new_commit_id
670 return new_commit_id
671
671
672 @reraise_safe_exceptions
672 @reraise_safe_exceptions
673 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
673 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
674 if url != 'default' and '://' not in url:
674 if url != 'default' and '://' not in url:
675 client = LocalGitClient(url)
675 client = LocalGitClient(url)
676 else:
676 else:
677 url_obj = url_parser(safe_bytes(url))
677 url_obj = url_parser(safe_bytes(url))
678 o = self._build_opener(url)
678 o = self._build_opener(url)
679 url = url_obj.authinfo()[0]
679 url = url_obj.authinfo()[0]
680 client = HttpGitClient(base_url=url, opener=o)
680 client = HttpGitClient(base_url=url, opener=o)
681 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
682
682
683 determine_wants = repo.object_store.determine_wants_all
683 determine_wants = repo.object_store.determine_wants_all
684 if refs:
684 if refs:
685 refs = [ascii_bytes(x) for x in refs]
685 refs = [ascii_bytes(x) for x in refs]
686
686
687 def determine_wants_requested(remote_refs):
687 def determine_wants_requested(remote_refs):
688 determined = []
688 determined = []
689 for ref_name, ref_hash in remote_refs.items():
689 for ref_name, ref_hash in remote_refs.items():
690 bytes_ref_name = safe_bytes(ref_name)
690 bytes_ref_name = safe_bytes(ref_name)
691
691
692 if bytes_ref_name in refs:
692 if bytes_ref_name in refs:
693 bytes_ref_hash = safe_bytes(ref_hash)
693 bytes_ref_hash = safe_bytes(ref_hash)
694 determined.append(bytes_ref_hash)
694 determined.append(bytes_ref_hash)
695 return determined
695 return determined
696
696
697 # swap with our custom requested wants
697 # swap with our custom requested wants
698 determine_wants = determine_wants_requested
698 determine_wants = determine_wants_requested
699
699
700 try:
700 try:
701 remote_refs = client.fetch(
701 remote_refs = client.fetch(
702 path=url, target=repo, determine_wants=determine_wants)
702 path=url, target=repo, determine_wants=determine_wants)
703
703
704 except NotGitRepository as e:
704 except NotGitRepository as e:
705 log.warning(
705 log.warning(
706 'Trying to fetch from "%s" failed, not a Git repository.', url)
706 'Trying to fetch from "%s" failed, not a Git repository.', url)
707 # Exception can contain unicode which we convert
707 # Exception can contain unicode which we convert
708 raise exceptions.AbortException(e)(repr(e))
708 raise exceptions.AbortException(e)(repr(e))
709
709
710 # mikhail: client.fetch() returns all the remote refs, but fetches only
710 # mikhail: client.fetch() returns all the remote refs, but fetches only
711 # refs filtered by `determine_wants` function. We need to filter result
711 # refs filtered by `determine_wants` function. We need to filter result
712 # as well
712 # as well
713 if refs:
713 if refs:
714 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
714 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
715
715
716 if apply_refs:
716 if apply_refs:
717 # TODO: johbo: Needs proper test coverage with a git repository
717 # TODO: johbo: Needs proper test coverage with a git repository
718 # that contains a tag object, so that we would end up with
718 # that contains a tag object, so that we would end up with
719 # a peeled ref at this point.
719 # a peeled ref at this point.
720 for k in remote_refs:
720 for k in remote_refs:
721 if k.endswith(PEELED_REF_MARKER):
721 if k.endswith(PEELED_REF_MARKER):
722 log.debug("Skipping peeled reference %s", k)
722 log.debug("Skipping peeled reference %s", k)
723 continue
723 continue
724 repo[k] = remote_refs[k]
724 repo[k] = remote_refs[k]
725
725
726 if refs and not update_after:
726 if refs and not update_after:
727 # mikhail: explicitly set the head to the last ref.
727 # mikhail: explicitly set the head to the last ref.
728 repo[HEAD_MARKER] = remote_refs[refs[-1]]
728 repo[HEAD_MARKER] = remote_refs[refs[-1]]
729
729
730 if update_after:
730 if update_after:
731 # we want to check out HEAD
731 # we want to check out HEAD
732 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
732 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
733 index.build_index_from_tree(repo.path, repo.index_path(),
733 index.build_index_from_tree(repo.path, repo.index_path(),
734 repo.object_store, repo[HEAD_MARKER].tree)
734 repo.object_store, repo[HEAD_MARKER].tree)
735
735
736 if isinstance(remote_refs, FetchPackResult):
736 if isinstance(remote_refs, FetchPackResult):
737 return remote_refs.refs
737 return remote_refs.refs
738 return remote_refs
738 return remote_refs
739
739
740 @reraise_safe_exceptions
740 @reraise_safe_exceptions
741 def sync_fetch(self, wire, url, refs=None, all_refs=False):
741 def sync_fetch(self, wire, url, refs=None, all_refs=False):
742 self._factory.repo(wire)
742 self._factory.repo(wire)
743 if refs and not isinstance(refs, (list, tuple)):
743 if refs and not isinstance(refs, (list, tuple)):
744 refs = [refs]
744 refs = [refs]
745
745
746 config = self._wire_to_config(wire)
746 config = self._wire_to_config(wire)
747 # get all remote refs we'll use to fetch later
747 # get all remote refs we'll use to fetch later
748 cmd = ['ls-remote']
748 cmd = ['ls-remote']
749 if not all_refs:
749 if not all_refs:
750 cmd += ['--heads', '--tags']
750 cmd += ['--heads', '--tags']
751 cmd += [url]
751 cmd += [url]
752 output, __ = self.run_git_command(
752 output, __ = self.run_git_command(
753 wire, cmd, fail_on_stderr=False,
753 wire, cmd, fail_on_stderr=False,
754 _copts=self._remote_conf(config),
754 _copts=self._remote_conf(config),
755 extra_env={'GIT_TERMINAL_PROMPT': '0'})
755 extra_env={'GIT_TERMINAL_PROMPT': '0'})
756
756
757 remote_refs = collections.OrderedDict()
757 remote_refs = collections.OrderedDict()
758 fetch_refs = []
758 fetch_refs = []
759
759
760 for ref_line in output.splitlines():
760 for ref_line in output.splitlines():
761 sha, ref = ref_line.split(b'\t')
761 sha, ref = ref_line.split(b'\t')
762 sha = sha.strip()
762 sha = sha.strip()
763 if ref in remote_refs:
763 if ref in remote_refs:
764 # duplicate, skip
764 # duplicate, skip
765 continue
765 continue
766 if ref.endswith(PEELED_REF_MARKER):
766 if ref.endswith(PEELED_REF_MARKER):
767 log.debug("Skipping peeled reference %s", ref)
767 log.debug("Skipping peeled reference %s", ref)
768 continue
768 continue
769 # don't sync HEAD
769 # don't sync HEAD
770 if ref in [HEAD_MARKER]:
770 if ref in [HEAD_MARKER]:
771 continue
771 continue
772
772
773 remote_refs[ref] = sha
773 remote_refs[ref] = sha
774
774
775 if refs and sha in refs:
775 if refs and sha in refs:
776 # we filter fetch using our specified refs
776 # we filter fetch using our specified refs
777 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
777 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
778 elif not refs:
778 elif not refs:
779 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
779 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
780 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
780 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
781
781
782 if fetch_refs:
782 if fetch_refs:
783 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
783 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
784 fetch_refs_chunks = list(chunk)
784 fetch_refs_chunks = list(chunk)
785 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
785 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
786 self.run_git_command(
786 self.run_git_command(
787 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
787 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
788 fail_on_stderr=False,
788 fail_on_stderr=False,
789 _copts=self._remote_conf(config),
789 _copts=self._remote_conf(config),
790 extra_env={'GIT_TERMINAL_PROMPT': '0'})
790 extra_env={'GIT_TERMINAL_PROMPT': '0'})
791
791
792 return remote_refs
792 return remote_refs
793
793
794 @reraise_safe_exceptions
794 @reraise_safe_exceptions
795 def sync_push(self, wire, url, refs=None):
795 def sync_push(self, wire, url, refs=None):
796 if not self.check_url(url, wire):
796 if not self.check_url(url, wire):
797 return
797 return
798 config = self._wire_to_config(wire)
798 config = self._wire_to_config(wire)
799 self._factory.repo(wire)
799 self._factory.repo(wire)
800 self.run_git_command(
800 self.run_git_command(
801 wire, ['push', url, '--mirror'], fail_on_stderr=False,
801 wire, ['push', url, '--mirror'], fail_on_stderr=False,
802 _copts=self._remote_conf(config),
802 _copts=self._remote_conf(config),
803 extra_env={'GIT_TERMINAL_PROMPT': '0'})
803 extra_env={'GIT_TERMINAL_PROMPT': '0'})
804
804
805 @reraise_safe_exceptions
805 @reraise_safe_exceptions
806 def get_remote_refs(self, wire, url):
806 def get_remote_refs(self, wire, url):
807 repo = Repo(url)
807 repo = Repo(url)
808 return repo.get_refs()
808 return repo.get_refs()
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def get_description(self, wire):
811 def get_description(self, wire):
812 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
813 return repo.get_description()
813 return repo.get_description()
814
814
815 @reraise_safe_exceptions
815 @reraise_safe_exceptions
816 def get_missing_revs(self, wire, rev1, rev2, path2):
816 def get_missing_revs(self, wire, rev1, rev2, path2):
817 repo = self._factory.repo(wire)
817 repo = self._factory.repo(wire)
818 LocalGitClient(thin_packs=False).fetch(path2, repo)
818 LocalGitClient(thin_packs=False).fetch(path2, repo)
819
819
820 wire_remote = wire.copy()
820 wire_remote = wire.copy()
821 wire_remote['path'] = path2
821 wire_remote['path'] = path2
822 repo_remote = self._factory.repo(wire_remote)
822 repo_remote = self._factory.repo(wire_remote)
823 LocalGitClient(thin_packs=False).fetch(path2, repo_remote)
823 LocalGitClient(thin_packs=False).fetch(path2, repo_remote)
824
824
825 revs = [
825 revs = [
826 x.commit.id
826 x.commit.id
827 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
827 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
828 return revs
828 return revs
829
829
830 @reraise_safe_exceptions
830 @reraise_safe_exceptions
831 def get_object(self, wire, sha, maybe_unreachable=False):
831 def get_object(self, wire, sha, maybe_unreachable=False):
832 cache_on, context_uid, repo_id = self._cache_on(wire)
832 cache_on, context_uid, repo_id = self._cache_on(wire)
833 region = self._region(wire)
833 region = self._region(wire)
834
834
835 @region.conditional_cache_on_arguments(condition=cache_on)
835 @region.conditional_cache_on_arguments(condition=cache_on)
836 def _get_object(_context_uid, _repo_id, _sha):
836 def _get_object(_context_uid, _repo_id, _sha):
837 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
838 with repo_init as repo:
838 with repo_init as repo:
839
839
840 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
840 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
841 try:
841 try:
842 commit = repo.revparse_single(sha)
842 commit = repo.revparse_single(sha)
843 except KeyError:
843 except KeyError:
844 # NOTE(marcink): KeyError doesn't give us any meaningful information
844 # NOTE(marcink): KeyError doesn't give us any meaningful information
845 # here, we instead give something more explicit
845 # here, we instead give something more explicit
846 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
846 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
847 raise exceptions.LookupException(e)(missing_commit_err)
847 raise exceptions.LookupException(e)(missing_commit_err)
848 except ValueError as e:
848 except ValueError as e:
849 raise exceptions.LookupException(e)(missing_commit_err)
849 raise exceptions.LookupException(e)(missing_commit_err)
850
850
851 is_tag = False
851 is_tag = False
852 if isinstance(commit, pygit2.Tag):
852 if isinstance(commit, pygit2.Tag):
853 commit = repo.get(commit.target)
853 commit = repo.get(commit.target)
854 is_tag = True
854 is_tag = True
855
855
856 check_dangling = True
856 check_dangling = True
857 if is_tag:
857 if is_tag:
858 check_dangling = False
858 check_dangling = False
859
859
860 if check_dangling and maybe_unreachable:
860 if check_dangling and maybe_unreachable:
861 check_dangling = False
861 check_dangling = False
862
862
863 # we used a reference and it parsed means we're not having a dangling commit
863 # we used a reference and it parsed means we're not having a dangling commit
864 if sha != commit.hex:
864 if sha != commit.hex:
865 check_dangling = False
865 check_dangling = False
866
866
867 if check_dangling:
867 if check_dangling:
868 # check for dangling commit
868 # check for dangling commit
869 for branch in repo.branches.with_commit(commit.hex):
869 for branch in repo.branches.with_commit(commit.hex):
870 if branch:
870 if branch:
871 break
871 break
872 else:
872 else:
873 # NOTE(marcink): Empty error doesn't give us any meaningful information
873 # NOTE(marcink): Empty error doesn't give us any meaningful information
874 # here, we instead give something more explicit
874 # here, we instead give something more explicit
875 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
875 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
876 raise exceptions.LookupException(e)(missing_commit_err)
876 raise exceptions.LookupException(e)(missing_commit_err)
877
877
878 commit_id = commit.hex
878 commit_id = commit.hex
879 type_str = commit.type_str
879 type_str = commit.type_str
880
880
881 return {
881 return {
882 'id': commit_id,
882 'id': commit_id,
883 'type': type_str,
883 'type': type_str,
884 'commit_id': commit_id,
884 'commit_id': commit_id,
885 'idx': 0
885 'idx': 0
886 }
886 }
887
887
888 return _get_object(context_uid, repo_id, sha)
888 return _get_object(context_uid, repo_id, sha)
889
889
890 @reraise_safe_exceptions
890 @reraise_safe_exceptions
891 def get_refs(self, wire):
891 def get_refs(self, wire):
892 cache_on, context_uid, repo_id = self._cache_on(wire)
892 cache_on, context_uid, repo_id = self._cache_on(wire)
893 region = self._region(wire)
893 region = self._region(wire)
894
894
895 @region.conditional_cache_on_arguments(condition=cache_on)
895 @region.conditional_cache_on_arguments(condition=cache_on)
896 def _get_refs(_context_uid, _repo_id):
896 def _get_refs(_context_uid, _repo_id):
897
897
898 repo_init = self._factory.repo_libgit2(wire)
898 repo_init = self._factory.repo_libgit2(wire)
899 with repo_init as repo:
899 with repo_init as repo:
900 regex = re.compile('^refs/(heads|tags)/')
900 regex = re.compile('^refs/(heads|tags)/')
901 return {x.name: x.target.hex for x in
901 return {x.name: x.target.hex for x in
902 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
902 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
903
903
904 return _get_refs(context_uid, repo_id)
904 return _get_refs(context_uid, repo_id)
905
905
906 @reraise_safe_exceptions
906 @reraise_safe_exceptions
907 def get_branch_pointers(self, wire):
907 def get_branch_pointers(self, wire):
908 cache_on, context_uid, repo_id = self._cache_on(wire)
908 cache_on, context_uid, repo_id = self._cache_on(wire)
909 region = self._region(wire)
909 region = self._region(wire)
910
910
911 @region.conditional_cache_on_arguments(condition=cache_on)
911 @region.conditional_cache_on_arguments(condition=cache_on)
912 def _get_branch_pointers(_context_uid, _repo_id):
912 def _get_branch_pointers(_context_uid, _repo_id):
913
913
914 repo_init = self._factory.repo_libgit2(wire)
914 repo_init = self._factory.repo_libgit2(wire)
915 regex = re.compile('^refs/heads')
915 regex = re.compile('^refs/heads')
916 with repo_init as repo:
916 with repo_init as repo:
917 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
917 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
918 return {x.target.hex: x.shorthand for x in branches}
918 return {x.target.hex: x.shorthand for x in branches}
919
919
920 return _get_branch_pointers(context_uid, repo_id)
920 return _get_branch_pointers(context_uid, repo_id)
921
921
922 @reraise_safe_exceptions
922 @reraise_safe_exceptions
923 def head(self, wire, show_exc=True):
923 def head(self, wire, show_exc=True):
924 cache_on, context_uid, repo_id = self._cache_on(wire)
924 cache_on, context_uid, repo_id = self._cache_on(wire)
925 region = self._region(wire)
925 region = self._region(wire)
926
926
927 @region.conditional_cache_on_arguments(condition=cache_on)
927 @region.conditional_cache_on_arguments(condition=cache_on)
928 def _head(_context_uid, _repo_id, _show_exc):
928 def _head(_context_uid, _repo_id, _show_exc):
929 repo_init = self._factory.repo_libgit2(wire)
929 repo_init = self._factory.repo_libgit2(wire)
930 with repo_init as repo:
930 with repo_init as repo:
931 try:
931 try:
932 return repo.head.peel().hex
932 return repo.head.peel().hex
933 except Exception:
933 except Exception:
934 if show_exc:
934 if show_exc:
935 raise
935 raise
936 return _head(context_uid, repo_id, show_exc)
936 return _head(context_uid, repo_id, show_exc)
937
937
938 @reraise_safe_exceptions
938 @reraise_safe_exceptions
939 def init(self, wire):
939 def init(self, wire):
940 repo_path = safe_str(wire['path'])
940 repo_path = safe_str(wire['path'])
941 self.repo = Repo.init(repo_path)
941 self.repo = Repo.init(repo_path)
942
942
943 @reraise_safe_exceptions
943 @reraise_safe_exceptions
944 def init_bare(self, wire):
944 def init_bare(self, wire):
945 repo_path = safe_str(wire['path'])
945 repo_path = safe_str(wire['path'])
946 self.repo = Repo.init_bare(repo_path)
946 self.repo = Repo.init_bare(repo_path)
947
947
948 @reraise_safe_exceptions
948 @reraise_safe_exceptions
949 def revision(self, wire, rev):
949 def revision(self, wire, rev):
950
950
951 cache_on, context_uid, repo_id = self._cache_on(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
952 region = self._region(wire)
952 region = self._region(wire)
953
953
954 @region.conditional_cache_on_arguments(condition=cache_on)
954 @region.conditional_cache_on_arguments(condition=cache_on)
955 def _revision(_context_uid, _repo_id, _rev):
955 def _revision(_context_uid, _repo_id, _rev):
956 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
957 with repo_init as repo:
957 with repo_init as repo:
958 commit = repo[rev]
958 commit = repo[rev]
959 obj_data = {
959 obj_data = {
960 'id': commit.id.hex,
960 'id': commit.id.hex,
961 }
961 }
962 # tree objects itself don't have tree_id attribute
962 # tree objects itself don't have tree_id attribute
963 if hasattr(commit, 'tree_id'):
963 if hasattr(commit, 'tree_id'):
964 obj_data['tree'] = commit.tree_id.hex
964 obj_data['tree'] = commit.tree_id.hex
965
965
966 return obj_data
966 return obj_data
967 return _revision(context_uid, repo_id, rev)
967 return _revision(context_uid, repo_id, rev)
968
968
969 @reraise_safe_exceptions
969 @reraise_safe_exceptions
970 def date(self, wire, commit_id):
970 def date(self, wire, commit_id):
971 cache_on, context_uid, repo_id = self._cache_on(wire)
971 cache_on, context_uid, repo_id = self._cache_on(wire)
972 region = self._region(wire)
972 region = self._region(wire)
973
973
974 @region.conditional_cache_on_arguments(condition=cache_on)
974 @region.conditional_cache_on_arguments(condition=cache_on)
975 def _date(_repo_id, _commit_id):
975 def _date(_repo_id, _commit_id):
976 repo_init = self._factory.repo_libgit2(wire)
976 repo_init = self._factory.repo_libgit2(wire)
977 with repo_init as repo:
977 with repo_init as repo:
978 commit = repo[commit_id]
978 commit = repo[commit_id]
979
979
980 if hasattr(commit, 'commit_time'):
980 if hasattr(commit, 'commit_time'):
981 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
981 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
982 else:
982 else:
983 commit = commit.get_object()
983 commit = commit.get_object()
984 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
984 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
985
985
986 # TODO(marcink): check dulwich difference of offset vs timezone
986 # TODO(marcink): check dulwich difference of offset vs timezone
987 return [commit_time, commit_time_offset]
987 return [commit_time, commit_time_offset]
988 return _date(repo_id, commit_id)
988 return _date(repo_id, commit_id)
989
989
990 @reraise_safe_exceptions
990 @reraise_safe_exceptions
991 def author(self, wire, commit_id):
991 def author(self, wire, commit_id):
992 cache_on, context_uid, repo_id = self._cache_on(wire)
992 cache_on, context_uid, repo_id = self._cache_on(wire)
993 region = self._region(wire)
993 region = self._region(wire)
994
994
995 @region.conditional_cache_on_arguments(condition=cache_on)
995 @region.conditional_cache_on_arguments(condition=cache_on)
996 def _author(_repo_id, _commit_id):
996 def _author(_repo_id, _commit_id):
997 repo_init = self._factory.repo_libgit2(wire)
997 repo_init = self._factory.repo_libgit2(wire)
998 with repo_init as repo:
998 with repo_init as repo:
999 commit = repo[commit_id]
999 commit = repo[commit_id]
1000
1000
1001 if hasattr(commit, 'author'):
1001 if hasattr(commit, 'author'):
1002 author = commit.author
1002 author = commit.author
1003 else:
1003 else:
1004 author = commit.get_object().author
1004 author = commit.get_object().author
1005
1005
1006 if author.email:
1006 if author.email:
1007 return f"{author.name} <{author.email}>"
1007 return f"{author.name} <{author.email}>"
1008
1008
1009 try:
1009 try:
1010 return f"{author.name}"
1010 return f"{author.name}"
1011 except Exception:
1011 except Exception:
1012 return f"{safe_str(author.raw_name)}"
1012 return f"{safe_str(author.raw_name)}"
1013
1013
1014 return _author(repo_id, commit_id)
1014 return _author(repo_id, commit_id)
1015
1015
1016 @reraise_safe_exceptions
1016 @reraise_safe_exceptions
1017 def message(self, wire, commit_id):
1017 def message(self, wire, commit_id):
1018 cache_on, context_uid, repo_id = self._cache_on(wire)
1018 cache_on, context_uid, repo_id = self._cache_on(wire)
1019 region = self._region(wire)
1019 region = self._region(wire)
1020
1020
1021 @region.conditional_cache_on_arguments(condition=cache_on)
1021 @region.conditional_cache_on_arguments(condition=cache_on)
1022 def _message(_repo_id, _commit_id):
1022 def _message(_repo_id, _commit_id):
1023 repo_init = self._factory.repo_libgit2(wire)
1023 repo_init = self._factory.repo_libgit2(wire)
1024 with repo_init as repo:
1024 with repo_init as repo:
1025 commit = repo[commit_id]
1025 commit = repo[commit_id]
1026 return commit.message
1026 return commit.message
1027 return _message(repo_id, commit_id)
1027 return _message(repo_id, commit_id)
1028
1028
1029 @reraise_safe_exceptions
1029 @reraise_safe_exceptions
1030 def parents(self, wire, commit_id):
1030 def parents(self, wire, commit_id):
1031 cache_on, context_uid, repo_id = self._cache_on(wire)
1031 cache_on, context_uid, repo_id = self._cache_on(wire)
1032 region = self._region(wire)
1032 region = self._region(wire)
1033
1033
1034 @region.conditional_cache_on_arguments(condition=cache_on)
1034 @region.conditional_cache_on_arguments(condition=cache_on)
1035 def _parents(_repo_id, _commit_id):
1035 def _parents(_repo_id, _commit_id):
1036 repo_init = self._factory.repo_libgit2(wire)
1036 repo_init = self._factory.repo_libgit2(wire)
1037 with repo_init as repo:
1037 with repo_init as repo:
1038 commit = repo[commit_id]
1038 commit = repo[commit_id]
1039 if hasattr(commit, 'parent_ids'):
1039 if hasattr(commit, 'parent_ids'):
1040 parent_ids = commit.parent_ids
1040 parent_ids = commit.parent_ids
1041 else:
1041 else:
1042 parent_ids = commit.get_object().parent_ids
1042 parent_ids = commit.get_object().parent_ids
1043
1043
1044 return [x.hex for x in parent_ids]
1044 return [x.hex for x in parent_ids]
1045 return _parents(repo_id, commit_id)
1045 return _parents(repo_id, commit_id)
1046
1046
1047 @reraise_safe_exceptions
1047 @reraise_safe_exceptions
1048 def children(self, wire, commit_id):
1048 def children(self, wire, commit_id):
1049 cache_on, context_uid, repo_id = self._cache_on(wire)
1049 cache_on, context_uid, repo_id = self._cache_on(wire)
1050 region = self._region(wire)
1050 region = self._region(wire)
1051
1051
1052 head = self.head(wire)
1052 head = self.head(wire)
1053
1053
1054 @region.conditional_cache_on_arguments(condition=cache_on)
1054 @region.conditional_cache_on_arguments(condition=cache_on)
1055 def _children(_repo_id, _commit_id):
1055 def _children(_repo_id, _commit_id):
1056
1056
1057 output, __ = self.run_git_command(
1057 output, __ = self.run_git_command(
1058 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1058 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1059
1059
1060 child_ids = []
1060 child_ids = []
1061 pat = re.compile(fr'^{commit_id}')
1061 pat = re.compile(fr'^{commit_id}')
1062 for line in output.splitlines():
1062 for line in output.splitlines():
1063 line = safe_str(line)
1063 line = safe_str(line)
1064 if pat.match(line):
1064 if pat.match(line):
1065 found_ids = line.split(' ')[1:]
1065 found_ids = line.split(' ')[1:]
1066 child_ids.extend(found_ids)
1066 child_ids.extend(found_ids)
1067 break
1067 break
1068
1068
1069 return child_ids
1069 return child_ids
1070 return _children(repo_id, commit_id)
1070 return _children(repo_id, commit_id)
1071
1071
1072 @reraise_safe_exceptions
1072 @reraise_safe_exceptions
1073 def set_refs(self, wire, key, value):
1073 def set_refs(self, wire, key, value):
1074 repo_init = self._factory.repo_libgit2(wire)
1074 repo_init = self._factory.repo_libgit2(wire)
1075 with repo_init as repo:
1075 with repo_init as repo:
1076 repo.references.create(key, value, force=True)
1076 repo.references.create(key, value, force=True)
1077
1077
1078 @reraise_safe_exceptions
1078 @reraise_safe_exceptions
1079 def create_branch(self, wire, branch_name, commit_id, force=False):
1079 def create_branch(self, wire, branch_name, commit_id, force=False):
1080 repo_init = self._factory.repo_libgit2(wire)
1080 repo_init = self._factory.repo_libgit2(wire)
1081 with repo_init as repo:
1081 with repo_init as repo:
1082 if commit_id:
1082 if commit_id:
1083 commit = repo[commit_id]
1083 commit = repo[commit_id]
1084 else:
1084 else:
1085 # if commit is not given just use the HEAD
1085 # if commit is not given just use the HEAD
1086 commit = repo.head()
1086 commit = repo.head()
1087
1087
1088 if force:
1088 if force:
1089 repo.branches.local.create(branch_name, commit, force=force)
1089 repo.branches.local.create(branch_name, commit, force=force)
1090 elif not repo.branches.get(branch_name):
1090 elif not repo.branches.get(branch_name):
1091 # create only if that branch isn't existing
1091 # create only if that branch isn't existing
1092 repo.branches.local.create(branch_name, commit, force=force)
1092 repo.branches.local.create(branch_name, commit, force=force)
1093
1093
1094 @reraise_safe_exceptions
1094 @reraise_safe_exceptions
1095 def remove_ref(self, wire, key):
1095 def remove_ref(self, wire, key):
1096 repo_init = self._factory.repo_libgit2(wire)
1096 repo_init = self._factory.repo_libgit2(wire)
1097 with repo_init as repo:
1097 with repo_init as repo:
1098 repo.references.delete(key)
1098 repo.references.delete(key)
1099
1099
1100 @reraise_safe_exceptions
1100 @reraise_safe_exceptions
1101 def tag_remove(self, wire, tag_name):
1101 def tag_remove(self, wire, tag_name):
1102 repo_init = self._factory.repo_libgit2(wire)
1102 repo_init = self._factory.repo_libgit2(wire)
1103 with repo_init as repo:
1103 with repo_init as repo:
1104 key = f'refs/tags/{tag_name}'
1104 key = f'refs/tags/{tag_name}'
1105 repo.references.delete(key)
1105 repo.references.delete(key)
1106
1106
1107 @reraise_safe_exceptions
1107 @reraise_safe_exceptions
1108 def tree_changes(self, wire, source_id, target_id):
1108 def tree_changes(self, wire, source_id, target_id):
1109 repo = self._factory.repo(wire)
1109 repo = self._factory.repo(wire)
1110 # source can be empty
1110 # source can be empty
1111 source_id = safe_bytes(source_id if source_id else b'')
1111 source_id = safe_bytes(source_id if source_id else b'')
1112 target_id = safe_bytes(target_id)
1112 target_id = safe_bytes(target_id)
1113
1113
1114 source = repo[source_id].tree if source_id else None
1114 source = repo[source_id].tree if source_id else None
1115 target = repo[target_id].tree
1115 target = repo[target_id].tree
1116 result = repo.object_store.tree_changes(source, target)
1116 result = repo.object_store.tree_changes(source, target)
1117
1117
1118 added = set()
1118 added = set()
1119 modified = set()
1119 modified = set()
1120 deleted = set()
1120 deleted = set()
1121 for (old_path, new_path), (_, _), (_, _) in list(result):
1121 for (old_path, new_path), (_, _), (_, _) in list(result):
1122 if new_path and old_path:
1122 if new_path and old_path:
1123 modified.add(new_path)
1123 modified.add(new_path)
1124 elif new_path and not old_path:
1124 elif new_path and not old_path:
1125 added.add(new_path)
1125 added.add(new_path)
1126 elif not new_path and old_path:
1126 elif not new_path and old_path:
1127 deleted.add(old_path)
1127 deleted.add(old_path)
1128
1128
1129 return list(added), list(modified), list(deleted)
1129 return list(added), list(modified), list(deleted)
1130
1130
1131 @reraise_safe_exceptions
1131 @reraise_safe_exceptions
1132 def tree_and_type_for_path(self, wire, commit_id, path):
1132 def tree_and_type_for_path(self, wire, commit_id, path):
1133
1133
1134 cache_on, context_uid, repo_id = self._cache_on(wire)
1134 cache_on, context_uid, repo_id = self._cache_on(wire)
1135 region = self._region(wire)
1135 region = self._region(wire)
1136
1136
1137 @region.conditional_cache_on_arguments(condition=cache_on)
1137 @region.conditional_cache_on_arguments(condition=cache_on)
1138 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1138 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1139 repo_init = self._factory.repo_libgit2(wire)
1139 repo_init = self._factory.repo_libgit2(wire)
1140
1140
1141 with repo_init as repo:
1141 with repo_init as repo:
1142 commit = repo[commit_id]
1142 commit = repo[commit_id]
1143 try:
1143 try:
1144 tree = commit.tree[path]
1144 tree = commit.tree[path]
1145 except KeyError:
1145 except KeyError:
1146 return None, None, None
1146 return None, None, None
1147
1147
1148 return tree.id.hex, tree.type_str, tree.filemode
1148 return tree.id.hex, tree.type_str, tree.filemode
1149 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1149 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1150
1150
1151 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1152 def tree_items(self, wire, tree_id):
1152 def tree_items(self, wire, tree_id):
1153 cache_on, context_uid, repo_id = self._cache_on(wire)
1153 cache_on, context_uid, repo_id = self._cache_on(wire)
1154 region = self._region(wire)
1154 region = self._region(wire)
1155
1155
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1157 def _tree_items(_repo_id, _tree_id):
1157 def _tree_items(_repo_id, _tree_id):
1158
1158
1159 repo_init = self._factory.repo_libgit2(wire)
1159 repo_init = self._factory.repo_libgit2(wire)
1160 with repo_init as repo:
1160 with repo_init as repo:
1161 try:
1161 try:
1162 tree = repo[tree_id]
1162 tree = repo[tree_id]
1163 except KeyError:
1163 except KeyError:
1164 raise ObjectMissing(f'No tree with id: {tree_id}')
1164 raise ObjectMissing(f'No tree with id: {tree_id}')
1165
1165
1166 result = []
1166 result = []
1167 for item in tree:
1167 for item in tree:
1168 item_sha = item.hex
1168 item_sha = item.hex
1169 item_mode = item.filemode
1169 item_mode = item.filemode
1170 item_type = item.type_str
1170 item_type = item.type_str
1171
1171
1172 if item_type == 'commit':
1172 if item_type == 'commit':
1173 # NOTE(marcink): submodules we translate to 'link' for backward compat
1173 # NOTE(marcink): submodules we translate to 'link' for backward compat
1174 item_type = 'link'
1174 item_type = 'link'
1175
1175
1176 result.append((item.name, item_mode, item_sha, item_type))
1176 result.append((item.name, item_mode, item_sha, item_type))
1177 return result
1177 return result
1178 return _tree_items(repo_id, tree_id)
1178 return _tree_items(repo_id, tree_id)
1179
1179
1180 @reraise_safe_exceptions
1180 @reraise_safe_exceptions
1181 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1181 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1182 """
1182 """
1183 Old version that uses subprocess to call diff
1183 Old version that uses subprocess to call diff
1184 """
1184 """
1185
1185
1186 flags = [
1186 flags = [
1187 '-U%s' % context, '--patch',
1187 '-U%s' % context, '--patch',
1188 '--binary',
1188 '--binary',
1189 '--find-renames',
1189 '--find-renames',
1190 '--no-indent-heuristic',
1190 '--no-indent-heuristic',
1191 # '--indent-heuristic',
1191 # '--indent-heuristic',
1192 #'--full-index',
1192 #'--full-index',
1193 #'--abbrev=40'
1193 #'--abbrev=40'
1194 ]
1194 ]
1195
1195
1196 if opt_ignorews:
1196 if opt_ignorews:
1197 flags.append('--ignore-all-space')
1197 flags.append('--ignore-all-space')
1198
1198
1199 if commit_id_1 == self.EMPTY_COMMIT:
1199 if commit_id_1 == self.EMPTY_COMMIT:
1200 cmd = ['show'] + flags + [commit_id_2]
1200 cmd = ['show'] + flags + [commit_id_2]
1201 else:
1201 else:
1202 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1202 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1203
1203
1204 if file_filter:
1204 if file_filter:
1205 cmd.extend(['--', file_filter])
1205 cmd.extend(['--', file_filter])
1206
1206
1207 diff, __ = self.run_git_command(wire, cmd)
1207 diff, __ = self.run_git_command(wire, cmd)
1208 # If we used 'show' command, strip first few lines (until actual diff
1208 # If we used 'show' command, strip first few lines (until actual diff
1209 # starts)
1209 # starts)
1210 if commit_id_1 == self.EMPTY_COMMIT:
1210 if commit_id_1 == self.EMPTY_COMMIT:
1211 lines = diff.splitlines()
1211 lines = diff.splitlines()
1212 x = 0
1212 x = 0
1213 for line in lines:
1213 for line in lines:
1214 if line.startswith(b'diff'):
1214 if line.startswith(b'diff'):
1215 break
1215 break
1216 x += 1
1216 x += 1
1217 # Append new line just like 'diff' command do
1217 # Append new line just like 'diff' command do
1218 diff = '\n'.join(lines[x:]) + '\n'
1218 diff = '\n'.join(lines[x:]) + '\n'
1219 return diff
1219 return diff
1220
1220
1221 @reraise_safe_exceptions
1221 @reraise_safe_exceptions
1222 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1222 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1223 repo_init = self._factory.repo_libgit2(wire)
1223 repo_init = self._factory.repo_libgit2(wire)
1224
1224
1225 with repo_init as repo:
1225 with repo_init as repo:
1226 swap = True
1226 swap = True
1227 flags = 0
1227 flags = 0
1228 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1228 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1229
1229
1230 if opt_ignorews:
1230 if opt_ignorews:
1231 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1231 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1232
1232
1233 if commit_id_1 == self.EMPTY_COMMIT:
1233 if commit_id_1 == self.EMPTY_COMMIT:
1234 comm1 = repo[commit_id_2]
1234 comm1 = repo[commit_id_2]
1235 diff_obj = comm1.tree.diff_to_tree(
1235 diff_obj = comm1.tree.diff_to_tree(
1236 flags=flags, context_lines=context, swap=swap)
1236 flags=flags, context_lines=context, swap=swap)
1237
1237
1238 else:
1238 else:
1239 comm1 = repo[commit_id_2]
1239 comm1 = repo[commit_id_2]
1240 comm2 = repo[commit_id_1]
1240 comm2 = repo[commit_id_1]
1241 diff_obj = comm1.tree.diff_to_tree(
1241 diff_obj = comm1.tree.diff_to_tree(
1242 comm2.tree, flags=flags, context_lines=context, swap=swap)
1242 comm2.tree, flags=flags, context_lines=context, swap=swap)
1243 similar_flags = 0
1243 similar_flags = 0
1244 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1244 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1245 diff_obj.find_similar(flags=similar_flags)
1245 diff_obj.find_similar(flags=similar_flags)
1246
1246
1247 if file_filter:
1247 if file_filter:
1248 for p in diff_obj:
1248 for p in diff_obj:
1249 if p.delta.old_file.path == file_filter:
1249 if p.delta.old_file.path == file_filter:
1250 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1250 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1251 # fo matching path == no diff
1251 # fo matching path == no diff
1252 return BytesEnvelope(b'')
1252 return BytesEnvelope(b'')
1253
1253
1254 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1254 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1255
1255
1256 @reraise_safe_exceptions
1256 @reraise_safe_exceptions
1257 def node_history(self, wire, commit_id, path, limit):
1257 def node_history(self, wire, commit_id, path, limit):
1258 cache_on, context_uid, repo_id = self._cache_on(wire)
1258 cache_on, context_uid, repo_id = self._cache_on(wire)
1259 region = self._region(wire)
1259 region = self._region(wire)
1260
1260
1261 @region.conditional_cache_on_arguments(condition=cache_on)
1261 @region.conditional_cache_on_arguments(condition=cache_on)
1262 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1262 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1263 # optimize for n==1, rev-list is much faster for that use-case
1263 # optimize for n==1, rev-list is much faster for that use-case
1264 if limit == 1:
1264 if limit == 1:
1265 cmd = ['rev-list', '-1', commit_id, '--', path]
1265 cmd = ['rev-list', '-1', commit_id, '--', path]
1266 else:
1266 else:
1267 cmd = ['log']
1267 cmd = ['log']
1268 if limit:
1268 if limit:
1269 cmd.extend(['-n', str(safe_int(limit, 0))])
1269 cmd.extend(['-n', str(safe_int(limit, 0))])
1270 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1270 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1271
1271
1272 output, __ = self.run_git_command(wire, cmd)
1272 output, __ = self.run_git_command(wire, cmd)
1273 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1273 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1274
1274
1275 return [x for x in commit_ids]
1275 return [x for x in commit_ids]
1276 return _node_history(context_uid, repo_id, commit_id, path, limit)
1276 return _node_history(context_uid, repo_id, commit_id, path, limit)
1277
1277
1278 @reraise_safe_exceptions
1278 @reraise_safe_exceptions
1279 def node_annotate_legacy(self, wire, commit_id, path):
1279 def node_annotate_legacy(self, wire, commit_id, path):
1280 # note: replaced by pygit2 implementation
1280 # note: replaced by pygit2 implementation
1281 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1281 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1282 # -l ==> outputs long shas (and we need all 40 characters)
1282 # -l ==> outputs long shas (and we need all 40 characters)
1283 # --root ==> doesn't put '^' character for boundaries
1283 # --root ==> doesn't put '^' character for boundaries
1284 # -r commit_id ==> blames for the given commit
1284 # -r commit_id ==> blames for the given commit
1285 output, __ = self.run_git_command(wire, cmd)
1285 output, __ = self.run_git_command(wire, cmd)
1286
1286
1287 result = []
1287 result = []
1288 for i, blame_line in enumerate(output.splitlines()[:-1]):
1288 for i, blame_line in enumerate(output.splitlines()[:-1]):
1289 line_no = i + 1
1289 line_no = i + 1
1290 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1290 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1291 result.append((line_no, blame_commit_id, line))
1291 result.append((line_no, blame_commit_id, line))
1292
1292
1293 return result
1293 return result
1294
1294
1295 @reraise_safe_exceptions
1295 @reraise_safe_exceptions
1296 def node_annotate(self, wire, commit_id, path):
1296 def node_annotate(self, wire, commit_id, path):
1297
1297
1298 result_libgit = []
1298 result_libgit = []
1299 repo_init = self._factory.repo_libgit2(wire)
1299 repo_init = self._factory.repo_libgit2(wire)
1300 with repo_init as repo:
1300 with repo_init as repo:
1301 commit = repo[commit_id]
1301 commit = repo[commit_id]
1302 blame_obj = repo.blame(path, newest_commit=commit_id)
1302 blame_obj = repo.blame(path, newest_commit=commit_id)
1303 for i, line in enumerate(commit.tree[path].data.splitlines()):
1303 for i, line in enumerate(commit.tree[path].data.splitlines()):
1304 line_no = i + 1
1304 line_no = i + 1
1305 hunk = blame_obj.for_line(line_no)
1305 hunk = blame_obj.for_line(line_no)
1306 blame_commit_id = hunk.final_commit_id.hex
1306 blame_commit_id = hunk.final_commit_id.hex
1307
1307
1308 result_libgit.append((line_no, blame_commit_id, line))
1308 result_libgit.append((line_no, blame_commit_id, line))
1309
1309
1310 return result_libgit
1310 return result_libgit
1311
1311
1312 @reraise_safe_exceptions
1312 @reraise_safe_exceptions
1313 def update_server_info(self, wire):
1313 def update_server_info(self, wire):
1314 repo = self._factory.repo(wire)
1314 repo = self._factory.repo(wire)
1315 update_server_info(repo)
1315 update_server_info(repo)
1316
1316
1317 @reraise_safe_exceptions
1317 @reraise_safe_exceptions
1318 def get_all_commit_ids(self, wire):
1318 def get_all_commit_ids(self, wire):
1319
1319
1320 cache_on, context_uid, repo_id = self._cache_on(wire)
1320 cache_on, context_uid, repo_id = self._cache_on(wire)
1321 region = self._region(wire)
1321 region = self._region(wire)
1322
1322
1323 @region.conditional_cache_on_arguments(condition=cache_on)
1323 @region.conditional_cache_on_arguments(condition=cache_on)
1324 def _get_all_commit_ids(_context_uid, _repo_id):
1324 def _get_all_commit_ids(_context_uid, _repo_id):
1325
1325
1326 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1326 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1327 try:
1327 try:
1328 output, __ = self.run_git_command(wire, cmd)
1328 output, __ = self.run_git_command(wire, cmd)
1329 return output.splitlines()
1329 return output.splitlines()
1330 except Exception:
1330 except Exception:
1331 # Can be raised for empty repositories
1331 # Can be raised for empty repositories
1332 return []
1332 return []
1333
1333
1334 @region.conditional_cache_on_arguments(condition=cache_on)
1334 @region.conditional_cache_on_arguments(condition=cache_on)
1335 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1335 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1336 repo_init = self._factory.repo_libgit2(wire)
1336 repo_init = self._factory.repo_libgit2(wire)
1337 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1337 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1338 results = []
1338 results = []
1339 with repo_init as repo:
1339 with repo_init as repo:
1340 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1340 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1341 results.append(commit.id.hex)
1341 results.append(commit.id.hex)
1342
1342
1343 return _get_all_commit_ids(context_uid, repo_id)
1343 return _get_all_commit_ids(context_uid, repo_id)
1344
1344
1345 @reraise_safe_exceptions
1345 @reraise_safe_exceptions
1346 def run_git_command(self, wire, cmd, **opts):
1346 def run_git_command(self, wire, cmd, **opts):
1347 path = wire.get('path', None)
1347 path = wire.get('path', None)
1348
1348
1349 if path and os.path.isdir(path):
1349 if path and os.path.isdir(path):
1350 opts['cwd'] = path
1350 opts['cwd'] = path
1351
1351
1352 if '_bare' in opts:
1352 if '_bare' in opts:
1353 _copts = []
1353 _copts = []
1354 del opts['_bare']
1354 del opts['_bare']
1355 else:
1355 else:
1356 _copts = ['-c', 'core.quotepath=false',]
1356 _copts = ['-c', 'core.quotepath=false',]
1357 safe_call = False
1357 safe_call = False
1358 if '_safe' in opts:
1358 if '_safe' in opts:
1359 # no exc on failure
1359 # no exc on failure
1360 del opts['_safe']
1360 del opts['_safe']
1361 safe_call = True
1361 safe_call = True
1362
1362
1363 if '_copts' in opts:
1363 if '_copts' in opts:
1364 _copts.extend(opts['_copts'] or [])
1364 _copts.extend(opts['_copts'] or [])
1365 del opts['_copts']
1365 del opts['_copts']
1366
1366
1367 gitenv = os.environ.copy()
1367 gitenv = os.environ.copy()
1368 gitenv.update(opts.pop('extra_env', {}))
1368 gitenv.update(opts.pop('extra_env', {}))
1369 # need to clean fix GIT_DIR !
1369 # need to clean fix GIT_DIR !
1370 if 'GIT_DIR' in gitenv:
1370 if 'GIT_DIR' in gitenv:
1371 del gitenv['GIT_DIR']
1371 del gitenv['GIT_DIR']
1372 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1372 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1373 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1373 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1374
1374
1375 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1375 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1376 _opts = {'env': gitenv, 'shell': False}
1376 _opts = {'env': gitenv, 'shell': False}
1377
1377
1378 proc = None
1378 proc = None
1379 try:
1379 try:
1380 _opts.update(opts)
1380 _opts.update(opts)
1381 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1381 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1382
1382
1383 return b''.join(proc), b''.join(proc.stderr)
1383 return b''.join(proc), b''.join(proc.stderr)
1384 except OSError as err:
1384 except OSError as err:
1385 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1385 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1386 tb_err = ("Couldn't run git command (%s).\n"
1386 tb_err = ("Couldn't run git command (%s).\n"
1387 "Original error was:%s\n"
1387 "Original error was:%s\n"
1388 "Call options:%s\n"
1388 "Call options:%s\n"
1389 % (cmd, err, _opts))
1389 % (cmd, err, _opts))
1390 log.exception(tb_err)
1390 log.exception(tb_err)
1391 if safe_call:
1391 if safe_call:
1392 return '', err
1392 return '', err
1393 else:
1393 else:
1394 raise exceptions.VcsException()(tb_err)
1394 raise exceptions.VcsException()(tb_err)
1395 finally:
1395 finally:
1396 if proc:
1396 if proc:
1397 proc.close()
1397 proc.close()
1398
1398
1399 @reraise_safe_exceptions
1399 @reraise_safe_exceptions
1400 def install_hooks(self, wire, force=False):
1400 def install_hooks(self, wire, force=False):
1401 from vcsserver.hook_utils import install_git_hooks
1401 from vcsserver.hook_utils import install_git_hooks
1402 bare = self.bare(wire)
1402 bare = self.bare(wire)
1403 path = wire['path']
1403 path = wire['path']
1404 binary_dir = settings.BINARY_DIR
1404 binary_dir = settings.BINARY_DIR
1405 if binary_dir:
1405 if binary_dir:
1406 os.path.join(binary_dir, 'python3')
1406 os.path.join(binary_dir, 'python3')
1407 return install_git_hooks(path, bare, force_create=force)
1407 return install_git_hooks(path, bare, force_create=force)
1408
1408
1409 @reraise_safe_exceptions
1409 @reraise_safe_exceptions
1410 def get_hooks_info(self, wire):
1410 def get_hooks_info(self, wire):
1411 from vcsserver.hook_utils import (
1411 from vcsserver.hook_utils import (
1412 get_git_pre_hook_version, get_git_post_hook_version)
1412 get_git_pre_hook_version, get_git_post_hook_version)
1413 bare = self.bare(wire)
1413 bare = self.bare(wire)
1414 path = wire['path']
1414 path = wire['path']
1415 return {
1415 return {
1416 'pre_version': get_git_pre_hook_version(path, bare),
1416 'pre_version': get_git_pre_hook_version(path, bare),
1417 'post_version': get_git_post_hook_version(path, bare),
1417 'post_version': get_git_post_hook_version(path, bare),
1418 }
1418 }
1419
1419
1420 @reraise_safe_exceptions
1420 @reraise_safe_exceptions
1421 def set_head_ref(self, wire, head_name):
1421 def set_head_ref(self, wire, head_name):
1422 log.debug('Setting refs/head to `%s`', head_name)
1422 log.debug('Setting refs/head to `%s`', head_name)
1423 repo_init = self._factory.repo_libgit2(wire)
1423 repo_init = self._factory.repo_libgit2(wire)
1424 with repo_init as repo:
1424 with repo_init as repo:
1425 repo.set_head(f'refs/heads/{head_name}')
1425 repo.set_head(f'refs/heads/{head_name}')
1426
1426
1427 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1427 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1428
1428
1429 @reraise_safe_exceptions
1429 @reraise_safe_exceptions
1430 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1430 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1431 archive_dir_name, commit_id, cache_config):
1431 archive_dir_name, commit_id, cache_config):
1432
1432
1433 def file_walker(_commit_id, path):
1433 def file_walker(_commit_id, path):
1434 repo_init = self._factory.repo_libgit2(wire)
1434 repo_init = self._factory.repo_libgit2(wire)
1435
1435
1436 with repo_init as repo:
1436 with repo_init as repo:
1437 commit = repo[commit_id]
1437 commit = repo[commit_id]
1438
1438
1439 if path in ['', '/']:
1439 if path in ['', '/']:
1440 tree = commit.tree
1440 tree = commit.tree
1441 else:
1441 else:
1442 tree = commit.tree[path.rstrip('/')]
1442 tree = commit.tree[path.rstrip('/')]
1443 tree_id = tree.id.hex
1443 tree_id = tree.id.hex
1444 try:
1444 try:
1445 tree = repo[tree_id]
1445 tree = repo[tree_id]
1446 except KeyError:
1446 except KeyError:
1447 raise ObjectMissing(f'No tree with id: {tree_id}')
1447 raise ObjectMissing(f'No tree with id: {tree_id}')
1448
1448
1449 index = LibGit2Index.Index()
1449 index = LibGit2Index.Index()
1450 index.read_tree(tree)
1450 index.read_tree(tree)
1451 file_iter = index
1451 file_iter = index
1452
1452
1453 for file_node in file_iter:
1453 for file_node in file_iter:
1454 file_path = file_node.path
1454 file_path = file_node.path
1455 mode = file_node.mode
1455 mode = file_node.mode
1456 is_link = stat.S_ISLNK(mode)
1456 is_link = stat.S_ISLNK(mode)
1457 if mode == pygit2.GIT_FILEMODE_COMMIT:
1457 if mode == pygit2.GIT_FILEMODE_COMMIT:
1458 log.debug('Skipping path %s as a commit node', file_path)
1458 log.debug('Skipping path %s as a commit node', file_path)
1459 continue
1459 continue
1460 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1460 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1461
1461
1462 return store_archive_in_cache(
1462 return store_archive_in_cache(
1463 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1463 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
@@ -1,1159 +1,1159 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import binascii
17 import binascii
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request
21 import urllib.request
22 import urllib.parse
22 import urllib.parse
23 import traceback
23 import traceback
24 import hashlib
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
36 BinaryEnvelope
36 BinaryEnvelope
37 from vcsserver.hgcompat import (
37 from vcsserver.hgcompat import (
38 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
39 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
40 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
41 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
42 RepoLookupError, InterventionRequired, RequirementError,
42 RepoLookupError, InterventionRequired, RequirementError,
43 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
43 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
44 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
44 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
45 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.vcs_base import RemoteBase
46 from vcsserver.config import hooks as hooks_config
46 from vcsserver.config import hooks as hooks_config
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def make_ui_from_config(repo_config):
52 def make_ui_from_config(repo_config):
53
53
54 class LoggingUI(ui.ui):
54 class LoggingUI(ui.ui):
55
55
56 def status(self, *msg, **opts):
56 def status(self, *msg, **opts):
57 str_msg = map(safe_str, msg)
57 str_msg = map(safe_str, msg)
58 log.info(' '.join(str_msg).rstrip('\n'))
58 log.info(' '.join(str_msg).rstrip('\n'))
59 #super(LoggingUI, self).status(*msg, **opts)
59 #super(LoggingUI, self).status(*msg, **opts)
60
60
61 def warn(self, *msg, **opts):
61 def warn(self, *msg, **opts):
62 str_msg = map(safe_str, msg)
62 str_msg = map(safe_str, msg)
63 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
63 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
64 #super(LoggingUI, self).warn(*msg, **opts)
64 #super(LoggingUI, self).warn(*msg, **opts)
65
65
66 def error(self, *msg, **opts):
66 def error(self, *msg, **opts):
67 str_msg = map(safe_str, msg)
67 str_msg = map(safe_str, msg)
68 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
68 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
69 #super(LoggingUI, self).error(*msg, **opts)
69 #super(LoggingUI, self).error(*msg, **opts)
70
70
71 def note(self, *msg, **opts):
71 def note(self, *msg, **opts):
72 str_msg = map(safe_str, msg)
72 str_msg = map(safe_str, msg)
73 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
73 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
74 #super(LoggingUI, self).note(*msg, **opts)
74 #super(LoggingUI, self).note(*msg, **opts)
75
75
76 def debug(self, *msg, **opts):
76 def debug(self, *msg, **opts):
77 str_msg = map(safe_str, msg)
77 str_msg = map(safe_str, msg)
78 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
78 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
79 #super(LoggingUI, self).debug(*msg, **opts)
79 #super(LoggingUI, self).debug(*msg, **opts)
80
80
81 baseui = LoggingUI()
81 baseui = LoggingUI()
82
82
83 # clean the baseui object
83 # clean the baseui object
84 baseui._ocfg = hgconfig.config()
84 baseui._ocfg = hgconfig.config()
85 baseui._ucfg = hgconfig.config()
85 baseui._ucfg = hgconfig.config()
86 baseui._tcfg = hgconfig.config()
86 baseui._tcfg = hgconfig.config()
87
87
88 for section, option, value in repo_config:
88 for section, option, value in repo_config:
89 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
89 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
90
90
91 # make our hgweb quiet so it doesn't print output
91 # make our hgweb quiet so it doesn't print output
92 baseui.setconfig(b'ui', b'quiet', b'true')
92 baseui.setconfig(b'ui', b'quiet', b'true')
93
93
94 baseui.setconfig(b'ui', b'paginate', b'never')
94 baseui.setconfig(b'ui', b'paginate', b'never')
95 # for better Error reporting of Mercurial
95 # for better Error reporting of Mercurial
96 baseui.setconfig(b'ui', b'message-output', b'stderr')
96 baseui.setconfig(b'ui', b'message-output', b'stderr')
97
97
98 # force mercurial to only use 1 thread, otherwise it may try to set a
98 # force mercurial to only use 1 thread, otherwise it may try to set a
99 # signal in a non-main thread, thus generating a ValueError.
99 # signal in a non-main thread, thus generating a ValueError.
100 baseui.setconfig(b'worker', b'numcpus', 1)
100 baseui.setconfig(b'worker', b'numcpus', 1)
101
101
102 # If there is no config for the largefiles extension, we explicitly disable
102 # If there is no config for the largefiles extension, we explicitly disable
103 # it here. This overrides settings from repositories hgrc file. Recent
103 # it here. This overrides settings from repositories hgrc file. Recent
104 # mercurial versions enable largefiles in hgrc on clone from largefile
104 # mercurial versions enable largefiles in hgrc on clone from largefile
105 # repo.
105 # repo.
106 if not baseui.hasconfig(b'extensions', b'largefiles'):
106 if not baseui.hasconfig(b'extensions', b'largefiles'):
107 log.debug('Explicitly disable largefiles extension for repo.')
107 log.debug('Explicitly disable largefiles extension for repo.')
108 baseui.setconfig(b'extensions', b'largefiles', b'!')
108 baseui.setconfig(b'extensions', b'largefiles', b'!')
109
109
110 return baseui
110 return baseui
111
111
112
112
113 def reraise_safe_exceptions(func):
113 def reraise_safe_exceptions(func):
114 """Decorator for converting mercurial exceptions to something neutral."""
114 """Decorator for converting mercurial exceptions to something neutral."""
115
115
116 def wrapper(*args, **kwargs):
116 def wrapper(*args, **kwargs):
117 try:
117 try:
118 return func(*args, **kwargs)
118 return func(*args, **kwargs)
119 except (Abort, InterventionRequired) as e:
119 except (Abort, InterventionRequired) as e:
120 raise_from_original(exceptions.AbortException(e), e)
120 raise_from_original(exceptions.AbortException(e), e)
121 except RepoLookupError as e:
121 except RepoLookupError as e:
122 raise_from_original(exceptions.LookupException(e), e)
122 raise_from_original(exceptions.LookupException(e), e)
123 except RequirementError as e:
123 except RequirementError as e:
124 raise_from_original(exceptions.RequirementException(e), e)
124 raise_from_original(exceptions.RequirementException(e), e)
125 except RepoError as e:
125 except RepoError as e:
126 raise_from_original(exceptions.VcsException(e), e)
126 raise_from_original(exceptions.VcsException(e), e)
127 except LookupError as e:
127 except LookupError as e:
128 raise_from_original(exceptions.LookupException(e), e)
128 raise_from_original(exceptions.LookupException(e), e)
129 except Exception as e:
129 except Exception as e:
130 if not hasattr(e, '_vcs_kind'):
130 if not hasattr(e, '_vcs_kind'):
131 log.exception("Unhandled exception in hg remote call")
131 log.exception("Unhandled exception in hg remote call")
132 raise_from_original(exceptions.UnhandledException(e), e)
132 raise_from_original(exceptions.UnhandledException(e), e)
133
133
134 raise
134 raise
135 return wrapper
135 return wrapper
136
136
137
137
138 class MercurialFactory(RepoFactory):
138 class MercurialFactory(RepoFactory):
139 repo_type = 'hg'
139 repo_type = 'hg'
140
140
141 def _create_config(self, config, hooks=True):
141 def _create_config(self, config, hooks=True):
142 if not hooks:
142 if not hooks:
143
143
144 hooks_to_clean = {
144 hooks_to_clean = {
145
145
146 hooks_config.HOOK_REPO_SIZE,
146 hooks_config.HOOK_REPO_SIZE,
147 hooks_config.HOOK_PRE_PULL,
147 hooks_config.HOOK_PRE_PULL,
148 hooks_config.HOOK_PULL,
148 hooks_config.HOOK_PULL,
149
149
150 hooks_config.HOOK_PRE_PUSH,
150 hooks_config.HOOK_PRE_PUSH,
151 # TODO: what about PRETXT, this was disabled in pre 5.0.0
151 # TODO: what about PRETXT, this was disabled in pre 5.0.0
152 hooks_config.HOOK_PRETX_PUSH,
152 hooks_config.HOOK_PRETX_PUSH,
153
153
154 }
154 }
155 new_config = []
155 new_config = []
156 for section, option, value in config:
156 for section, option, value in config:
157 if section == 'hooks' and option in hooks_to_clean:
157 if section == 'hooks' and option in hooks_to_clean:
158 continue
158 continue
159 new_config.append((section, option, value))
159 new_config.append((section, option, value))
160 config = new_config
160 config = new_config
161
161
162 baseui = make_ui_from_config(config)
162 baseui = make_ui_from_config(config)
163 return baseui
163 return baseui
164
164
165 def _create_repo(self, wire, create):
165 def _create_repo(self, wire, create):
166 baseui = self._create_config(wire["config"])
166 baseui = self._create_config(wire["config"])
167 repo = instance(baseui, safe_bytes(wire["path"]), create)
167 repo = instance(baseui, safe_bytes(wire["path"]), create)
168 log.debug('repository created: got HG object: %s', repo)
168 log.debug('repository created: got HG object: %s', repo)
169 return repo
169 return repo
170
170
171 def repo(self, wire, create=False):
171 def repo(self, wire, create=False):
172 """
172 """
173 Get a repository instance for the given path.
173 Get a repository instance for the given path.
174 """
174 """
175 return self._create_repo(wire, create)
175 return self._create_repo(wire, create)
176
176
177
177
178 def patch_ui_message_output(baseui):
178 def patch_ui_message_output(baseui):
179 baseui.setconfig(b'ui', b'quiet', b'false')
179 baseui.setconfig(b'ui', b'quiet', b'false')
180 output = io.BytesIO()
180 output = io.BytesIO()
181
181
182 def write(data, **unused_kwargs):
182 def write(data, **unused_kwargs):
183 output.write(data)
183 output.write(data)
184
184
185 baseui.status = write
185 baseui.status = write
186 baseui.write = write
186 baseui.write = write
187 baseui.warn = write
187 baseui.warn = write
188 baseui.debug = write
188 baseui.debug = write
189
189
190 return baseui, output
190 return baseui, output
191
191
192
192
193 def get_obfuscated_url(url_obj):
193 def get_obfuscated_url(url_obj):
194 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
194 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
195 url_obj.query = obfuscate_qs(url_obj.query)
195 url_obj.query = obfuscate_qs(url_obj.query)
196 obfuscated_uri = str(url_obj)
196 obfuscated_uri = str(url_obj)
197 return obfuscated_uri
197 return obfuscated_uri
198
198
199
199
200 def normalize_url_for_hg(url: str):
200 def normalize_url_for_hg(url: str):
201 _proto = None
201 _proto = None
202
202
203 if '+' in url[:url.find('://')]:
203 if '+' in url[:url.find('://')]:
204 _proto = url[0:url.find('+')]
204 _proto = url[0:url.find('+')]
205 url = url[url.find('+') + 1:]
205 url = url[url.find('+') + 1:]
206 return url, _proto
206 return url, _proto
207
207
208
208
209 class HgRemote(RemoteBase):
209 class HgRemote(RemoteBase):
210
210
211 def __init__(self, factory):
211 def __init__(self, factory):
212 self._factory = factory
212 self._factory = factory
213 self._bulk_methods = {
213 self._bulk_methods = {
214 "affected_files": self.ctx_files,
214 "affected_files": self.ctx_files,
215 "author": self.ctx_user,
215 "author": self.ctx_user,
216 "branch": self.ctx_branch,
216 "branch": self.ctx_branch,
217 "children": self.ctx_children,
217 "children": self.ctx_children,
218 "date": self.ctx_date,
218 "date": self.ctx_date,
219 "message": self.ctx_description,
219 "message": self.ctx_description,
220 "parents": self.ctx_parents,
220 "parents": self.ctx_parents,
221 "status": self.ctx_status,
221 "status": self.ctx_status,
222 "obsolete": self.ctx_obsolete,
222 "obsolete": self.ctx_obsolete,
223 "phase": self.ctx_phase,
223 "phase": self.ctx_phase,
224 "hidden": self.ctx_hidden,
224 "hidden": self.ctx_hidden,
225 "_file_paths": self.ctx_list,
225 "_file_paths": self.ctx_list,
226 }
226 }
227 self._bulk_file_methods = {
227 self._bulk_file_methods = {
228 "size": self.fctx_size,
228 "size": self.fctx_size,
229 "data": self.fctx_node_data,
229 "data": self.fctx_node_data,
230 "flags": self.fctx_flags,
230 "flags": self.fctx_flags,
231 "is_binary": self.is_binary,
231 "is_binary": self.is_binary,
232 "md5": self.md5_hash,
232 "md5": self.md5_hash,
233 }
233 }
234
234
235 def _get_ctx(self, repo, ref):
235 def _get_ctx(self, repo, ref):
236 return get_ctx(repo, ref)
236 return get_ctx(repo, ref)
237
237
238 @reraise_safe_exceptions
238 @reraise_safe_exceptions
239 def discover_hg_version(self):
239 def discover_hg_version(self):
240 from mercurial import util
240 from mercurial import util
241 return safe_str(util.version())
241 return safe_str(util.version())
242
242
243 @reraise_safe_exceptions
243 @reraise_safe_exceptions
244 def is_empty(self, wire):
244 def is_empty(self, wire):
245 repo = self._factory.repo(wire)
245 repo = self._factory.repo(wire)
246
246
247 try:
247 try:
248 return len(repo) == 0
248 return len(repo) == 0
249 except Exception:
249 except Exception:
250 log.exception("failed to read object_store")
250 log.exception("failed to read object_store")
251 return False
251 return False
252
252
253 @reraise_safe_exceptions
253 @reraise_safe_exceptions
254 def bookmarks(self, wire):
254 def bookmarks(self, wire):
255 cache_on, context_uid, repo_id = self._cache_on(wire)
255 cache_on, context_uid, repo_id = self._cache_on(wire)
256 region = self._region(wire)
256 region = self._region(wire)
257
257
258 @region.conditional_cache_on_arguments(condition=cache_on)
258 @region.conditional_cache_on_arguments(condition=cache_on)
259 def _bookmarks(_context_uid, _repo_id):
259 def _bookmarks(_context_uid, _repo_id):
260 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
261 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
261 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
262
262
263 return _bookmarks(context_uid, repo_id)
263 return _bookmarks(context_uid, repo_id)
264
264
265 @reraise_safe_exceptions
265 @reraise_safe_exceptions
266 def branches(self, wire, normal, closed):
266 def branches(self, wire, normal, closed):
267 cache_on, context_uid, repo_id = self._cache_on(wire)
267 cache_on, context_uid, repo_id = self._cache_on(wire)
268 region = self._region(wire)
268 region = self._region(wire)
269
269
270 @region.conditional_cache_on_arguments(condition=cache_on)
270 @region.conditional_cache_on_arguments(condition=cache_on)
271 def _branches(_context_uid, _repo_id, _normal, _closed):
271 def _branches(_context_uid, _repo_id, _normal, _closed):
272 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
273 iter_branches = repo.branchmap().iterbranches()
273 iter_branches = repo.branchmap().iterbranches()
274 bt = {}
274 bt = {}
275 for branch_name, _heads, tip_node, is_closed in iter_branches:
275 for branch_name, _heads, tip_node, is_closed in iter_branches:
276 if normal and not is_closed:
276 if normal and not is_closed:
277 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
277 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
278 if closed and is_closed:
278 if closed and is_closed:
279 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
279 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
280
280
281 return bt
281 return bt
282
282
283 return _branches(context_uid, repo_id, normal, closed)
283 return _branches(context_uid, repo_id, normal, closed)
284
284
285 @reraise_safe_exceptions
285 @reraise_safe_exceptions
286 def bulk_request(self, wire, commit_id, pre_load):
286 def bulk_request(self, wire, commit_id, pre_load):
287 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
288 region = self._region(wire)
288 region = self._region(wire)
289
289
290 @region.conditional_cache_on_arguments(condition=cache_on)
290 @region.conditional_cache_on_arguments(condition=cache_on)
291 def _bulk_request(_repo_id, _commit_id, _pre_load):
291 def _bulk_request(_repo_id, _commit_id, _pre_load):
292 result = {}
292 result = {}
293 for attr in pre_load:
293 for attr in pre_load:
294 try:
294 try:
295 method = self._bulk_methods[attr]
295 method = self._bulk_methods[attr]
296 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
296 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
297 result[attr] = method(wire, commit_id)
297 result[attr] = method(wire, commit_id)
298 except KeyError as e:
298 except KeyError as e:
299 raise exceptions.VcsException(e)(
299 raise exceptions.VcsException(e)(
300 'Unknown bulk attribute: "%s"' % attr)
300 'Unknown bulk attribute: "%s"' % attr)
301 return result
301 return result
302
302
303 return _bulk_request(repo_id, commit_id, sorted(pre_load))
303 return _bulk_request(repo_id, commit_id, sorted(pre_load))
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_branch(self, wire, commit_id):
306 def ctx_branch(self, wire, commit_id):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 region = self._region(wire)
308 region = self._region(wire)
309
309
310 @region.conditional_cache_on_arguments(condition=cache_on)
310 @region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_branch(_repo_id, _commit_id):
311 def _ctx_branch(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
313 ctx = self._get_ctx(repo, commit_id)
314 return ctx.branch()
314 return ctx.branch()
315 return _ctx_branch(repo_id, commit_id)
315 return _ctx_branch(repo_id, commit_id)
316
316
317 @reraise_safe_exceptions
317 @reraise_safe_exceptions
318 def ctx_date(self, wire, commit_id):
318 def ctx_date(self, wire, commit_id):
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
320 region = self._region(wire)
321
321
322 @region.conditional_cache_on_arguments(condition=cache_on)
322 @region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_date(_repo_id, _commit_id):
323 def _ctx_date(_repo_id, _commit_id):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
325 ctx = self._get_ctx(repo, commit_id)
326 return ctx.date()
326 return ctx.date()
327 return _ctx_date(repo_id, commit_id)
327 return _ctx_date(repo_id, commit_id)
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def ctx_description(self, wire, revision):
330 def ctx_description(self, wire, revision):
331 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
332 ctx = self._get_ctx(repo, revision)
332 ctx = self._get_ctx(repo, revision)
333 return ctx.description()
333 return ctx.description()
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def ctx_files(self, wire, commit_id):
336 def ctx_files(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 region = self._region(wire)
338 region = self._region(wire)
339
339
340 @region.conditional_cache_on_arguments(condition=cache_on)
340 @region.conditional_cache_on_arguments(condition=cache_on)
341 def _ctx_files(_repo_id, _commit_id):
341 def _ctx_files(_repo_id, _commit_id):
342 repo = self._factory.repo(wire)
342 repo = self._factory.repo(wire)
343 ctx = self._get_ctx(repo, commit_id)
343 ctx = self._get_ctx(repo, commit_id)
344 return ctx.files()
344 return ctx.files()
345
345
346 return _ctx_files(repo_id, commit_id)
346 return _ctx_files(repo_id, commit_id)
347
347
348 @reraise_safe_exceptions
348 @reraise_safe_exceptions
349 def ctx_list(self, path, revision):
349 def ctx_list(self, path, revision):
350 repo = self._factory.repo(path)
350 repo = self._factory.repo(path)
351 ctx = self._get_ctx(repo, revision)
351 ctx = self._get_ctx(repo, revision)
352 return list(ctx)
352 return list(ctx)
353
353
354 @reraise_safe_exceptions
354 @reraise_safe_exceptions
355 def ctx_parents(self, wire, commit_id):
355 def ctx_parents(self, wire, commit_id):
356 cache_on, context_uid, repo_id = self._cache_on(wire)
356 cache_on, context_uid, repo_id = self._cache_on(wire)
357 region = self._region(wire)
357 region = self._region(wire)
358
358
359 @region.conditional_cache_on_arguments(condition=cache_on)
359 @region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_parents(_repo_id, _commit_id):
360 def _ctx_parents(_repo_id, _commit_id):
361 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
363 return [parent.hex() for parent in ctx.parents()
363 return [parent.hex() for parent in ctx.parents()
364 if not (parent.hidden() or parent.obsolete())]
364 if not (parent.hidden() or parent.obsolete())]
365
365
366 return _ctx_parents(repo_id, commit_id)
366 return _ctx_parents(repo_id, commit_id)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def ctx_children(self, wire, commit_id):
369 def ctx_children(self, wire, commit_id):
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _ctx_children(_repo_id, _commit_id):
374 def _ctx_children(_repo_id, _commit_id):
375 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
376 ctx = self._get_ctx(repo, commit_id)
376 ctx = self._get_ctx(repo, commit_id)
377 return [child.hex() for child in ctx.children()
377 return [child.hex() for child in ctx.children()
378 if not (child.hidden() or child.obsolete())]
378 if not (child.hidden() or child.obsolete())]
379
379
380 return _ctx_children(repo_id, commit_id)
380 return _ctx_children(repo_id, commit_id)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_phase(self, wire, commit_id):
383 def ctx_phase(self, wire, commit_id):
384 cache_on, context_uid, repo_id = self._cache_on(wire)
384 cache_on, context_uid, repo_id = self._cache_on(wire)
385 region = self._region(wire)
385 region = self._region(wire)
386
386
387 @region.conditional_cache_on_arguments(condition=cache_on)
387 @region.conditional_cache_on_arguments(condition=cache_on)
388 def _ctx_phase(_context_uid, _repo_id, _commit_id):
388 def _ctx_phase(_context_uid, _repo_id, _commit_id):
389 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
390 ctx = self._get_ctx(repo, commit_id)
390 ctx = self._get_ctx(repo, commit_id)
391 # public=0, draft=1, secret=3
391 # public=0, draft=1, secret=3
392 return ctx.phase()
392 return ctx.phase()
393 return _ctx_phase(context_uid, repo_id, commit_id)
393 return _ctx_phase(context_uid, repo_id, commit_id)
394
394
395 @reraise_safe_exceptions
395 @reraise_safe_exceptions
396 def ctx_obsolete(self, wire, commit_id):
396 def ctx_obsolete(self, wire, commit_id):
397 cache_on, context_uid, repo_id = self._cache_on(wire)
397 cache_on, context_uid, repo_id = self._cache_on(wire)
398 region = self._region(wire)
398 region = self._region(wire)
399
399
400 @region.conditional_cache_on_arguments(condition=cache_on)
400 @region.conditional_cache_on_arguments(condition=cache_on)
401 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
401 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
402 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
403 ctx = self._get_ctx(repo, commit_id)
403 ctx = self._get_ctx(repo, commit_id)
404 return ctx.obsolete()
404 return ctx.obsolete()
405 return _ctx_obsolete(context_uid, repo_id, commit_id)
405 return _ctx_obsolete(context_uid, repo_id, commit_id)
406
406
407 @reraise_safe_exceptions
407 @reraise_safe_exceptions
408 def ctx_hidden(self, wire, commit_id):
408 def ctx_hidden(self, wire, commit_id):
409 cache_on, context_uid, repo_id = self._cache_on(wire)
409 cache_on, context_uid, repo_id = self._cache_on(wire)
410 region = self._region(wire)
410 region = self._region(wire)
411
411
412 @region.conditional_cache_on_arguments(condition=cache_on)
412 @region.conditional_cache_on_arguments(condition=cache_on)
413 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
413 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
414 repo = self._factory.repo(wire)
414 repo = self._factory.repo(wire)
415 ctx = self._get_ctx(repo, commit_id)
415 ctx = self._get_ctx(repo, commit_id)
416 return ctx.hidden()
416 return ctx.hidden()
417 return _ctx_hidden(context_uid, repo_id, commit_id)
417 return _ctx_hidden(context_uid, repo_id, commit_id)
418
418
419 @reraise_safe_exceptions
419 @reraise_safe_exceptions
420 def ctx_substate(self, wire, revision):
420 def ctx_substate(self, wire, revision):
421 repo = self._factory.repo(wire)
421 repo = self._factory.repo(wire)
422 ctx = self._get_ctx(repo, revision)
422 ctx = self._get_ctx(repo, revision)
423 return ctx.substate
423 return ctx.substate
424
424
425 @reraise_safe_exceptions
425 @reraise_safe_exceptions
426 def ctx_status(self, wire, revision):
426 def ctx_status(self, wire, revision):
427 repo = self._factory.repo(wire)
427 repo = self._factory.repo(wire)
428 ctx = self._get_ctx(repo, revision)
428 ctx = self._get_ctx(repo, revision)
429 status = repo[ctx.p1().node()].status(other=ctx.node())
429 status = repo[ctx.p1().node()].status(other=ctx.node())
430 # object of status (odd, custom named tuple in mercurial) is not
430 # object of status (odd, custom named tuple in mercurial) is not
431 # correctly serializable, we make it a list, as the underling
431 # correctly serializable, we make it a list, as the underling
432 # API expects this to be a list
432 # API expects this to be a list
433 return list(status)
433 return list(status)
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def ctx_user(self, wire, revision):
436 def ctx_user(self, wire, revision):
437 repo = self._factory.repo(wire)
437 repo = self._factory.repo(wire)
438 ctx = self._get_ctx(repo, revision)
438 ctx = self._get_ctx(repo, revision)
439 return ctx.user()
439 return ctx.user()
440
440
441 @reraise_safe_exceptions
441 @reraise_safe_exceptions
442 def check_url(self, url, config):
442 def check_url(self, url, config):
443 url, _proto = normalize_url_for_hg(url)
443 url, _proto = normalize_url_for_hg(url)
444 url_obj = url_parser(safe_bytes(url))
444 url_obj = url_parser(safe_bytes(url))
445
445
446 test_uri = safe_str(url_obj.authinfo()[0])
446 test_uri = safe_str(url_obj.authinfo()[0])
447 authinfo = url_obj.authinfo()[1]
447 authinfo = url_obj.authinfo()[1]
448 obfuscated_uri = get_obfuscated_url(url_obj)
448 obfuscated_uri = get_obfuscated_url(url_obj)
449 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
449 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
450
450
451 handlers = []
451 handlers = []
452 if authinfo:
452 if authinfo:
453 # create a password manager
453 # create a password manager
454 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
454 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
455 passmgr.add_password(*authinfo)
455 passmgr.add_password(*authinfo)
456
456
457 handlers.extend((httpbasicauthhandler(passmgr),
457 handlers.extend((httpbasicauthhandler(passmgr),
458 httpdigestauthhandler(passmgr)))
458 httpdigestauthhandler(passmgr)))
459
459
460 o = urllib.request.build_opener(*handlers)
460 o = urllib.request.build_opener(*handlers)
461 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
461 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
462 ('Accept', 'application/mercurial-0.1')]
462 ('Accept', 'application/mercurial-0.1')]
463
463
464 q = {"cmd": 'between'}
464 q = {"cmd": 'between'}
465 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
465 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
466 qs = '?%s' % urllib.parse.urlencode(q)
466 qs = '?%s' % urllib.parse.urlencode(q)
467 cu = "{}{}".format(test_uri, qs)
467 cu = "{}{}".format(test_uri, qs)
468 req = urllib.request.Request(cu, None, {})
468 req = urllib.request.Request(cu, None, {})
469
469
470 try:
470 try:
471 log.debug("Trying to open URL %s", obfuscated_uri)
471 log.debug("Trying to open URL %s", obfuscated_uri)
472 resp = o.open(req)
472 resp = o.open(req)
473 if resp.code != 200:
473 if resp.code != 200:
474 raise exceptions.URLError()('Return Code is not 200')
474 raise exceptions.URLError()('Return Code is not 200')
475 except Exception as e:
475 except Exception as e:
476 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
476 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
477 # means it cannot be cloned
477 # means it cannot be cloned
478 raise exceptions.URLError(e)("[{}] org_exc: {}".format(obfuscated_uri, e))
478 raise exceptions.URLError(e)("[{}] org_exc: {}".format(obfuscated_uri, e))
479
479
480 # now check if it's a proper hg repo, but don't do it for svn
480 # now check if it's a proper hg repo, but don't do it for svn
481 try:
481 try:
482 if _proto == 'svn':
482 if _proto == 'svn':
483 pass
483 pass
484 else:
484 else:
485 # check for pure hg repos
485 # check for pure hg repos
486 log.debug(
486 log.debug(
487 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
487 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
488 ui = make_ui_from_config(config)
488 ui = make_ui_from_config(config)
489 peer_checker = makepeer(ui, safe_bytes(url))
489 peer_checker = makepeer(ui, safe_bytes(url))
490 peer_checker.lookup(b'tip')
490 peer_checker.lookup(b'tip')
491 except Exception as e:
491 except Exception as e:
492 log.warning("URL is not a valid Mercurial repository: %s",
492 log.warning("URL is not a valid Mercurial repository: %s",
493 obfuscated_uri)
493 obfuscated_uri)
494 raise exceptions.URLError(e)(
494 raise exceptions.URLError(e)(
495 "url [%s] does not look like an hg repo org_exc: %s"
495 "url [%s] does not look like an hg repo org_exc: %s"
496 % (obfuscated_uri, e))
496 % (obfuscated_uri, e))
497
497
498 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
498 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
499 return True
499 return True
500
500
501 @reraise_safe_exceptions
501 @reraise_safe_exceptions
502 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
502 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
503 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
504
504
505 if file_filter:
505 if file_filter:
506 # unpack the file-filter
506 # unpack the file-filter
507 repo_path, node_path = file_filter
507 repo_path, node_path = file_filter
508 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
508 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
509 else:
509 else:
510 match_filter = file_filter
510 match_filter = file_filter
511 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
511 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
512
512
513 try:
513 try:
514 diff_iter = patch.diff(
514 diff_iter = patch.diff(
515 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
515 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
516 return BytesEnvelope(b"".join(diff_iter))
516 return BytesEnvelope(b"".join(diff_iter))
517 except RepoLookupError as e:
517 except RepoLookupError as e:
518 raise exceptions.LookupException(e)()
518 raise exceptions.LookupException(e)()
519
519
520 @reraise_safe_exceptions
520 @reraise_safe_exceptions
521 def node_history(self, wire, revision, path, limit):
521 def node_history(self, wire, revision, path, limit):
522 cache_on, context_uid, repo_id = self._cache_on(wire)
522 cache_on, context_uid, repo_id = self._cache_on(wire)
523 region = self._region(wire)
523 region = self._region(wire)
524
524
525 @region.conditional_cache_on_arguments(condition=cache_on)
525 @region.conditional_cache_on_arguments(condition=cache_on)
526 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
526 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
527 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
528
528
529 ctx = self._get_ctx(repo, revision)
529 ctx = self._get_ctx(repo, revision)
530 fctx = ctx.filectx(safe_bytes(path))
530 fctx = ctx.filectx(safe_bytes(path))
531
531
532 def history_iter():
532 def history_iter():
533 limit_rev = fctx.rev()
533 limit_rev = fctx.rev()
534 for obj in reversed(list(fctx.filelog())):
534 for obj in reversed(list(fctx.filelog())):
535 obj = fctx.filectx(obj)
535 obj = fctx.filectx(obj)
536 ctx = obj.changectx()
536 ctx = obj.changectx()
537 if ctx.hidden() or ctx.obsolete():
537 if ctx.hidden() or ctx.obsolete():
538 continue
538 continue
539
539
540 if limit_rev >= obj.rev():
540 if limit_rev >= obj.rev():
541 yield obj
541 yield obj
542
542
543 history = []
543 history = []
544 for cnt, obj in enumerate(history_iter()):
544 for cnt, obj in enumerate(history_iter()):
545 if limit and cnt >= limit:
545 if limit and cnt >= limit:
546 break
546 break
547 history.append(hex(obj.node()))
547 history.append(hex(obj.node()))
548
548
549 return [x for x in history]
549 return [x for x in history]
550 return _node_history(context_uid, repo_id, revision, path, limit)
550 return _node_history(context_uid, repo_id, revision, path, limit)
551
551
552 @reraise_safe_exceptions
552 @reraise_safe_exceptions
553 def node_history_untill(self, wire, revision, path, limit):
553 def node_history_untill(self, wire, revision, path, limit):
554 cache_on, context_uid, repo_id = self._cache_on(wire)
554 cache_on, context_uid, repo_id = self._cache_on(wire)
555 region = self._region(wire)
555 region = self._region(wire)
556
556
557 @region.conditional_cache_on_arguments(condition=cache_on)
557 @region.conditional_cache_on_arguments(condition=cache_on)
558 def _node_history_until(_context_uid, _repo_id):
558 def _node_history_until(_context_uid, _repo_id):
559 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
560 ctx = self._get_ctx(repo, revision)
560 ctx = self._get_ctx(repo, revision)
561 fctx = ctx.filectx(safe_bytes(path))
561 fctx = ctx.filectx(safe_bytes(path))
562
562
563 file_log = list(fctx.filelog())
563 file_log = list(fctx.filelog())
564 if limit:
564 if limit:
565 # Limit to the last n items
565 # Limit to the last n items
566 file_log = file_log[-limit:]
566 file_log = file_log[-limit:]
567
567
568 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
568 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
569 return _node_history_until(context_uid, repo_id, revision, path, limit)
569 return _node_history_until(context_uid, repo_id, revision, path, limit)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def bulk_file_request(self, wire, commit_id, path, pre_load):
572 def bulk_file_request(self, wire, commit_id, path, pre_load):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
573 cache_on, context_uid, repo_id = self._cache_on(wire)
574 region = self._region(wire)
574 region = self._region(wire)
575
575
576 @region.conditional_cache_on_arguments(condition=cache_on)
576 @region.conditional_cache_on_arguments(condition=cache_on)
577 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
577 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
578 result = {}
578 result = {}
579 for attr in pre_load:
579 for attr in pre_load:
580 try:
580 try:
581 method = self._bulk_file_methods[attr]
581 method = self._bulk_file_methods[attr]
582 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
582 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
583 result[attr] = method(wire, _commit_id, _path)
583 result[attr] = method(wire, _commit_id, _path)
584 except KeyError as e:
584 except KeyError as e:
585 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
585 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
586 return BinaryEnvelope(result)
586 return BinaryEnvelope(result)
587
587
588 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
588 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
589
589
590 @reraise_safe_exceptions
590 @reraise_safe_exceptions
591 def fctx_annotate(self, wire, revision, path):
591 def fctx_annotate(self, wire, revision, path):
592 repo = self._factory.repo(wire)
592 repo = self._factory.repo(wire)
593 ctx = self._get_ctx(repo, revision)
593 ctx = self._get_ctx(repo, revision)
594 fctx = ctx.filectx(safe_bytes(path))
594 fctx = ctx.filectx(safe_bytes(path))
595
595
596 result = []
596 result = []
597 for i, annotate_obj in enumerate(fctx.annotate(), 1):
597 for i, annotate_obj in enumerate(fctx.annotate(), 1):
598 ln_no = i
598 ln_no = i
599 sha = hex(annotate_obj.fctx.node())
599 sha = hex(annotate_obj.fctx.node())
600 content = annotate_obj.text
600 content = annotate_obj.text
601 result.append((ln_no, sha, content))
601 result.append((ln_no, sha, content))
602 return result
602 return result
603
603
604 @reraise_safe_exceptions
604 @reraise_safe_exceptions
605 def fctx_node_data(self, wire, revision, path):
605 def fctx_node_data(self, wire, revision, path):
606 repo = self._factory.repo(wire)
606 repo = self._factory.repo(wire)
607 ctx = self._get_ctx(repo, revision)
607 ctx = self._get_ctx(repo, revision)
608 fctx = ctx.filectx(safe_bytes(path))
608 fctx = ctx.filectx(safe_bytes(path))
609 return BytesEnvelope(fctx.data())
609 return BytesEnvelope(fctx.data())
610
610
611 @reraise_safe_exceptions
611 @reraise_safe_exceptions
612 def fctx_flags(self, wire, commit_id, path):
612 def fctx_flags(self, wire, commit_id, path):
613 cache_on, context_uid, repo_id = self._cache_on(wire)
613 cache_on, context_uid, repo_id = self._cache_on(wire)
614 region = self._region(wire)
614 region = self._region(wire)
615
615
616 @region.conditional_cache_on_arguments(condition=cache_on)
616 @region.conditional_cache_on_arguments(condition=cache_on)
617 def _fctx_flags(_repo_id, _commit_id, _path):
617 def _fctx_flags(_repo_id, _commit_id, _path):
618 repo = self._factory.repo(wire)
618 repo = self._factory.repo(wire)
619 ctx = self._get_ctx(repo, commit_id)
619 ctx = self._get_ctx(repo, commit_id)
620 fctx = ctx.filectx(safe_bytes(path))
620 fctx = ctx.filectx(safe_bytes(path))
621 return fctx.flags()
621 return fctx.flags()
622
622
623 return _fctx_flags(repo_id, commit_id, path)
623 return _fctx_flags(repo_id, commit_id, path)
624
624
625 @reraise_safe_exceptions
625 @reraise_safe_exceptions
626 def fctx_size(self, wire, commit_id, path):
626 def fctx_size(self, wire, commit_id, path):
627 cache_on, context_uid, repo_id = self._cache_on(wire)
627 cache_on, context_uid, repo_id = self._cache_on(wire)
628 region = self._region(wire)
628 region = self._region(wire)
629
629
630 @region.conditional_cache_on_arguments(condition=cache_on)
630 @region.conditional_cache_on_arguments(condition=cache_on)
631 def _fctx_size(_repo_id, _revision, _path):
631 def _fctx_size(_repo_id, _revision, _path):
632 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
633 ctx = self._get_ctx(repo, commit_id)
633 ctx = self._get_ctx(repo, commit_id)
634 fctx = ctx.filectx(safe_bytes(path))
634 fctx = ctx.filectx(safe_bytes(path))
635 return fctx.size()
635 return fctx.size()
636 return _fctx_size(repo_id, commit_id, path)
636 return _fctx_size(repo_id, commit_id, path)
637
637
638 @reraise_safe_exceptions
638 @reraise_safe_exceptions
639 def get_all_commit_ids(self, wire, name):
639 def get_all_commit_ids(self, wire, name):
640 cache_on, context_uid, repo_id = self._cache_on(wire)
640 cache_on, context_uid, repo_id = self._cache_on(wire)
641 region = self._region(wire)
641 region = self._region(wire)
642
642
643 @region.conditional_cache_on_arguments(condition=cache_on)
643 @region.conditional_cache_on_arguments(condition=cache_on)
644 def _get_all_commit_ids(_context_uid, _repo_id, _name):
644 def _get_all_commit_ids(_context_uid, _repo_id, _name):
645 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
646 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
646 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
647 return revs
647 return revs
648 return _get_all_commit_ids(context_uid, repo_id, name)
648 return _get_all_commit_ids(context_uid, repo_id, name)
649
649
650 @reraise_safe_exceptions
650 @reraise_safe_exceptions
651 def get_config_value(self, wire, section, name, untrusted=False):
651 def get_config_value(self, wire, section, name, untrusted=False):
652 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
653 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
653 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
654
654
655 @reraise_safe_exceptions
655 @reraise_safe_exceptions
656 def is_large_file(self, wire, commit_id, path):
656 def is_large_file(self, wire, commit_id, path):
657 cache_on, context_uid, repo_id = self._cache_on(wire)
657 cache_on, context_uid, repo_id = self._cache_on(wire)
658 region = self._region(wire)
658 region = self._region(wire)
659
659
660 @region.conditional_cache_on_arguments(condition=cache_on)
660 @region.conditional_cache_on_arguments(condition=cache_on)
661 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
661 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
662 return largefiles.lfutil.isstandin(safe_bytes(path))
662 return largefiles.lfutil.isstandin(safe_bytes(path))
663
663
664 return _is_large_file(context_uid, repo_id, commit_id, path)
664 return _is_large_file(context_uid, repo_id, commit_id, path)
665
665
666 @reraise_safe_exceptions
666 @reraise_safe_exceptions
667 def is_binary(self, wire, revision, path):
667 def is_binary(self, wire, revision, path):
668 cache_on, context_uid, repo_id = self._cache_on(wire)
668 cache_on, context_uid, repo_id = self._cache_on(wire)
669 region = self._region(wire)
669 region = self._region(wire)
670
670
671 @region.conditional_cache_on_arguments(condition=cache_on)
671 @region.conditional_cache_on_arguments(condition=cache_on)
672 def _is_binary(_repo_id, _sha, _path):
672 def _is_binary(_repo_id, _sha, _path):
673 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
674 ctx = self._get_ctx(repo, revision)
674 ctx = self._get_ctx(repo, revision)
675 fctx = ctx.filectx(safe_bytes(path))
675 fctx = ctx.filectx(safe_bytes(path))
676 return fctx.isbinary()
676 return fctx.isbinary()
677
677
678 return _is_binary(repo_id, revision, path)
678 return _is_binary(repo_id, revision, path)
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def md5_hash(self, wire, revision, path):
681 def md5_hash(self, wire, revision, path):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 region = self._region(wire)
683 region = self._region(wire)
684
684
685 @region.conditional_cache_on_arguments(condition=cache_on)
685 @region.conditional_cache_on_arguments(condition=cache_on)
686 def _md5_hash(_repo_id, _sha, _path):
686 def _md5_hash(_repo_id, _sha, _path):
687 repo = self._factory.repo(wire)
687 repo = self._factory.repo(wire)
688 ctx = self._get_ctx(repo, revision)
688 ctx = self._get_ctx(repo, revision)
689 fctx = ctx.filectx(safe_bytes(path))
689 fctx = ctx.filectx(safe_bytes(path))
690 return hashlib.md5(fctx.data()).hexdigest()
690 return hashlib.md5(fctx.data()).hexdigest()
691
691
692 return _md5_hash(repo_id, revision, path)
692 return _md5_hash(repo_id, revision, path)
693
693
694 @reraise_safe_exceptions
694 @reraise_safe_exceptions
695 def in_largefiles_store(self, wire, sha):
695 def in_largefiles_store(self, wire, sha):
696 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
697 return largefiles.lfutil.instore(repo, sha)
697 return largefiles.lfutil.instore(repo, sha)
698
698
699 @reraise_safe_exceptions
699 @reraise_safe_exceptions
700 def in_user_cache(self, wire, sha):
700 def in_user_cache(self, wire, sha):
701 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
702 return largefiles.lfutil.inusercache(repo.ui, sha)
702 return largefiles.lfutil.inusercache(repo.ui, sha)
703
703
704 @reraise_safe_exceptions
704 @reraise_safe_exceptions
705 def store_path(self, wire, sha):
705 def store_path(self, wire, sha):
706 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
707 return largefiles.lfutil.storepath(repo, sha)
707 return largefiles.lfutil.storepath(repo, sha)
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def link(self, wire, sha, path):
710 def link(self, wire, sha, path):
711 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
712 largefiles.lfutil.link(
712 largefiles.lfutil.link(
713 largefiles.lfutil.usercachepath(repo.ui, sha), path)
713 largefiles.lfutil.usercachepath(repo.ui, sha), path)
714
714
715 @reraise_safe_exceptions
715 @reraise_safe_exceptions
716 def localrepository(self, wire, create=False):
716 def localrepository(self, wire, create=False):
717 self._factory.repo(wire, create=create)
717 self._factory.repo(wire, create=create)
718
718
719 @reraise_safe_exceptions
719 @reraise_safe_exceptions
720 def lookup(self, wire, revision, both):
720 def lookup(self, wire, revision, both):
721 cache_on, context_uid, repo_id = self._cache_on(wire)
721 cache_on, context_uid, repo_id = self._cache_on(wire)
722 region = self._region(wire)
722 region = self._region(wire)
723
723
724 @region.conditional_cache_on_arguments(condition=cache_on)
724 @region.conditional_cache_on_arguments(condition=cache_on)
725 def _lookup(_context_uid, _repo_id, _revision, _both):
725 def _lookup(_context_uid, _repo_id, _revision, _both):
726 repo = self._factory.repo(wire)
726 repo = self._factory.repo(wire)
727 rev = _revision
727 rev = _revision
728 if isinstance(rev, int):
728 if isinstance(rev, int):
729 # NOTE(marcink):
729 # NOTE(marcink):
730 # since Mercurial doesn't support negative indexes properly
730 # since Mercurial doesn't support negative indexes properly
731 # we need to shift accordingly by one to get proper index, e.g
731 # we need to shift accordingly by one to get proper index, e.g
732 # repo[-1] => repo[-2]
732 # repo[-1] => repo[-2]
733 # repo[0] => repo[-1]
733 # repo[0] => repo[-1]
734 if rev <= 0:
734 if rev <= 0:
735 rev = rev + -1
735 rev = rev + -1
736 try:
736 try:
737 ctx = self._get_ctx(repo, rev)
737 ctx = self._get_ctx(repo, rev)
738 except (TypeError, RepoLookupError, binascii.Error) as e:
738 except (TypeError, RepoLookupError, binascii.Error) as e:
739 e._org_exc_tb = traceback.format_exc()
739 e._org_exc_tb = traceback.format_exc()
740 raise exceptions.LookupException(e)(rev)
740 raise exceptions.LookupException(e)(rev)
741 except LookupError as e:
741 except LookupError as e:
742 e._org_exc_tb = traceback.format_exc()
742 e._org_exc_tb = traceback.format_exc()
743 raise exceptions.LookupException(e)(e.name)
743 raise exceptions.LookupException(e)(e.name)
744
744
745 if not both:
745 if not both:
746 return ctx.hex()
746 return ctx.hex()
747
747
748 ctx = repo[ctx.hex()]
748 ctx = repo[ctx.hex()]
749 return ctx.hex(), ctx.rev()
749 return ctx.hex(), ctx.rev()
750
750
751 return _lookup(context_uid, repo_id, revision, both)
751 return _lookup(context_uid, repo_id, revision, both)
752
752
753 @reraise_safe_exceptions
753 @reraise_safe_exceptions
754 def sync_push(self, wire, url):
754 def sync_push(self, wire, url):
755 if not self.check_url(url, wire['config']):
755 if not self.check_url(url, wire['config']):
756 return
756 return
757
757
758 repo = self._factory.repo(wire)
758 repo = self._factory.repo(wire)
759
759
760 # Disable any prompts for this repo
760 # Disable any prompts for this repo
761 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
761 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
762
762
763 bookmarks = list(dict(repo._bookmarks).keys())
763 bookmarks = list(dict(repo._bookmarks).keys())
764 remote = peer(repo, {}, safe_bytes(url))
764 remote = peer(repo, {}, safe_bytes(url))
765 # Disable any prompts for this remote
765 # Disable any prompts for this remote
766 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
766 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
767
767
768 return exchange.push(
768 return exchange.push(
769 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
769 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
770
770
771 @reraise_safe_exceptions
771 @reraise_safe_exceptions
772 def revision(self, wire, rev):
772 def revision(self, wire, rev):
773 repo = self._factory.repo(wire)
773 repo = self._factory.repo(wire)
774 ctx = self._get_ctx(repo, rev)
774 ctx = self._get_ctx(repo, rev)
775 return ctx.rev()
775 return ctx.rev()
776
776
777 @reraise_safe_exceptions
777 @reraise_safe_exceptions
778 def rev_range(self, wire, commit_filter):
778 def rev_range(self, wire, commit_filter):
779 cache_on, context_uid, repo_id = self._cache_on(wire)
779 cache_on, context_uid, repo_id = self._cache_on(wire)
780 region = self._region(wire)
780 region = self._region(wire)
781
781
782 @region.conditional_cache_on_arguments(condition=cache_on)
782 @region.conditional_cache_on_arguments(condition=cache_on)
783 def _rev_range(_context_uid, _repo_id, _filter):
783 def _rev_range(_context_uid, _repo_id, _filter):
784 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
785 revisions = [
785 revisions = [
786 ascii_str(repo[rev].hex())
786 ascii_str(repo[rev].hex())
787 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
787 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
788 ]
788 ]
789 return revisions
789 return revisions
790
790
791 return _rev_range(context_uid, repo_id, sorted(commit_filter))
791 return _rev_range(context_uid, repo_id, sorted(commit_filter))
792
792
793 @reraise_safe_exceptions
793 @reraise_safe_exceptions
794 def rev_range_hash(self, wire, node):
794 def rev_range_hash(self, wire, node):
795 repo = self._factory.repo(wire)
795 repo = self._factory.repo(wire)
796
796
797 def get_revs(repo, rev_opt):
797 def get_revs(repo, rev_opt):
798 if rev_opt:
798 if rev_opt:
799 revs = revrange(repo, rev_opt)
799 revs = revrange(repo, rev_opt)
800 if len(revs) == 0:
800 if len(revs) == 0:
801 return (nullrev, nullrev)
801 return (nullrev, nullrev)
802 return max(revs), min(revs)
802 return max(revs), min(revs)
803 else:
803 else:
804 return len(repo) - 1, 0
804 return len(repo) - 1, 0
805
805
806 stop, start = get_revs(repo, [node + ':'])
806 stop, start = get_revs(repo, [node + ':'])
807 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
807 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
808 return revs
808 return revs
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
811 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
812 org_path = safe_bytes(wire["path"])
812 org_path = safe_bytes(wire["path"])
813 other_path = safe_bytes(kwargs.pop('other_path', ''))
813 other_path = safe_bytes(kwargs.pop('other_path', ''))
814
814
815 # case when we want to compare two independent repositories
815 # case when we want to compare two independent repositories
816 if other_path and other_path != wire["path"]:
816 if other_path and other_path != wire["path"]:
817 baseui = self._factory._create_config(wire["config"])
817 baseui = self._factory._create_config(wire["config"])
818 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
818 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
819 else:
819 else:
820 repo = self._factory.repo(wire)
820 repo = self._factory.repo(wire)
821 return list(repo.revs(rev_spec, *args))
821 return list(repo.revs(rev_spec, *args))
822
822
823 @reraise_safe_exceptions
823 @reraise_safe_exceptions
824 def verify(self, wire,):
824 def verify(self, wire,):
825 repo = self._factory.repo(wire)
825 repo = self._factory.repo(wire)
826 baseui = self._factory._create_config(wire['config'])
826 baseui = self._factory._create_config(wire['config'])
827
827
828 baseui, output = patch_ui_message_output(baseui)
828 baseui, output = patch_ui_message_output(baseui)
829
829
830 repo.ui = baseui
830 repo.ui = baseui
831 verify.verify(repo)
831 verify.verify(repo)
832 return output.getvalue()
832 return output.getvalue()
833
833
834 @reraise_safe_exceptions
834 @reraise_safe_exceptions
835 def hg_update_cache(self, wire,):
835 def hg_update_cache(self, wire,):
836 repo = self._factory.repo(wire)
836 repo = self._factory.repo(wire)
837 baseui = self._factory._create_config(wire['config'])
837 baseui = self._factory._create_config(wire['config'])
838 baseui, output = patch_ui_message_output(baseui)
838 baseui, output = patch_ui_message_output(baseui)
839
839
840 repo.ui = baseui
840 repo.ui = baseui
841 with repo.wlock(), repo.lock():
841 with repo.wlock(), repo.lock():
842 repo.updatecaches(full=True)
842 repo.updatecaches(full=True)
843
843
844 return output.getvalue()
844 return output.getvalue()
845
845
846 @reraise_safe_exceptions
846 @reraise_safe_exceptions
847 def hg_rebuild_fn_cache(self, wire,):
847 def hg_rebuild_fn_cache(self, wire,):
848 repo = self._factory.repo(wire)
848 repo = self._factory.repo(wire)
849 baseui = self._factory._create_config(wire['config'])
849 baseui = self._factory._create_config(wire['config'])
850 baseui, output = patch_ui_message_output(baseui)
850 baseui, output = patch_ui_message_output(baseui)
851
851
852 repo.ui = baseui
852 repo.ui = baseui
853
853
854 repair.rebuildfncache(baseui, repo)
854 repair.rebuildfncache(baseui, repo)
855
855
856 return output.getvalue()
856 return output.getvalue()
857
857
858 @reraise_safe_exceptions
858 @reraise_safe_exceptions
859 def tags(self, wire):
859 def tags(self, wire):
860 cache_on, context_uid, repo_id = self._cache_on(wire)
860 cache_on, context_uid, repo_id = self._cache_on(wire)
861 region = self._region(wire)
861 region = self._region(wire)
862
862
863 @region.conditional_cache_on_arguments(condition=cache_on)
863 @region.conditional_cache_on_arguments(condition=cache_on)
864 def _tags(_context_uid, _repo_id):
864 def _tags(_context_uid, _repo_id):
865 repo = self._factory.repo(wire)
865 repo = self._factory.repo(wire)
866 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
866 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
867
867
868 return _tags(context_uid, repo_id)
868 return _tags(context_uid, repo_id)
869
869
870 @reraise_safe_exceptions
870 @reraise_safe_exceptions
871 def update(self, wire, node='', clean=False):
871 def update(self, wire, node='', clean=False):
872 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
873 baseui = self._factory._create_config(wire['config'])
873 baseui = self._factory._create_config(wire['config'])
874 node = safe_bytes(node)
874 node = safe_bytes(node)
875
875
876 commands.update(baseui, repo, node=node, clean=clean)
876 commands.update(baseui, repo, node=node, clean=clean)
877
877
878 @reraise_safe_exceptions
878 @reraise_safe_exceptions
879 def identify(self, wire):
879 def identify(self, wire):
880 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
881 baseui = self._factory._create_config(wire['config'])
881 baseui = self._factory._create_config(wire['config'])
882 output = io.BytesIO()
882 output = io.BytesIO()
883 baseui.write = output.write
883 baseui.write = output.write
884 # This is required to get a full node id
884 # This is required to get a full node id
885 baseui.debugflag = True
885 baseui.debugflag = True
886 commands.identify(baseui, repo, id=True)
886 commands.identify(baseui, repo, id=True)
887
887
888 return output.getvalue()
888 return output.getvalue()
889
889
890 @reraise_safe_exceptions
890 @reraise_safe_exceptions
891 def heads(self, wire, branch=None):
891 def heads(self, wire, branch=None):
892 repo = self._factory.repo(wire)
892 repo = self._factory.repo(wire)
893 baseui = self._factory._create_config(wire['config'])
893 baseui = self._factory._create_config(wire['config'])
894 output = io.BytesIO()
894 output = io.BytesIO()
895
895
896 def write(data, **unused_kwargs):
896 def write(data, **unused_kwargs):
897 output.write(data)
897 output.write(data)
898
898
899 baseui.write = write
899 baseui.write = write
900 if branch:
900 if branch:
901 args = [safe_bytes(branch)]
901 args = [safe_bytes(branch)]
902 else:
902 else:
903 args = []
903 args = []
904 commands.heads(baseui, repo, template=b'{node} ', *args)
904 commands.heads(baseui, repo, template=b'{node} ', *args)
905
905
906 return output.getvalue()
906 return output.getvalue()
907
907
908 @reraise_safe_exceptions
908 @reraise_safe_exceptions
909 def ancestor(self, wire, revision1, revision2):
909 def ancestor(self, wire, revision1, revision2):
910 repo = self._factory.repo(wire)
910 repo = self._factory.repo(wire)
911 changelog = repo.changelog
911 changelog = repo.changelog
912 lookup = repo.lookup
912 lookup = repo.lookup
913 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
913 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
914 return hex(a)
914 return hex(a)
915
915
916 @reraise_safe_exceptions
916 @reraise_safe_exceptions
917 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
917 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
918 baseui = self._factory._create_config(wire["config"], hooks=hooks)
918 baseui = self._factory._create_config(wire["config"], hooks=hooks)
919 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
919 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
920
920
921 @reraise_safe_exceptions
921 @reraise_safe_exceptions
922 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
922 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
923
923
924 repo = self._factory.repo(wire)
924 repo = self._factory.repo(wire)
925 baseui = self._factory._create_config(wire['config'])
925 baseui = self._factory._create_config(wire['config'])
926 publishing = baseui.configbool(b'phases', b'publish')
926 publishing = baseui.configbool(b'phases', b'publish')
927
927
928 def _filectxfn(_repo, ctx, path: bytes):
928 def _filectxfn(_repo, ctx, path: bytes):
929 """
929 """
930 Marks given path as added/changed/removed in a given _repo. This is
930 Marks given path as added/changed/removed in a given _repo. This is
931 for internal mercurial commit function.
931 for internal mercurial commit function.
932 """
932 """
933
933
934 # check if this path is removed
934 # check if this path is removed
935 if safe_str(path) in removed:
935 if safe_str(path) in removed:
936 # returning None is a way to mark node for removal
936 # returning None is a way to mark node for removal
937 return None
937 return None
938
938
939 # check if this path is added
939 # check if this path is added
940 for node in updated:
940 for node in updated:
941 if safe_bytes(node['path']) == path:
941 if safe_bytes(node['path']) == path:
942 return memfilectx(
942 return memfilectx(
943 _repo,
943 _repo,
944 changectx=ctx,
944 changectx=ctx,
945 path=safe_bytes(node['path']),
945 path=safe_bytes(node['path']),
946 data=safe_bytes(node['content']),
946 data=safe_bytes(node['content']),
947 islink=False,
947 islink=False,
948 isexec=bool(node['mode'] & stat.S_IXUSR),
948 isexec=bool(node['mode'] & stat.S_IXUSR),
949 copysource=False)
949 copysource=False)
950 abort_exc = exceptions.AbortException()
950 abort_exc = exceptions.AbortException()
951 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
951 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
952
952
953 if publishing:
953 if publishing:
954 new_commit_phase = b'public'
954 new_commit_phase = b'public'
955 else:
955 else:
956 new_commit_phase = b'draft'
956 new_commit_phase = b'draft'
957 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
957 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
958 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
958 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
959 commit_ctx = memctx(
959 commit_ctx = memctx(
960 repo=repo,
960 repo=repo,
961 parents=parents,
961 parents=parents,
962 text=safe_bytes(message),
962 text=safe_bytes(message),
963 files=[safe_bytes(x) for x in files],
963 files=[safe_bytes(x) for x in files],
964 filectxfn=_filectxfn,
964 filectxfn=_filectxfn,
965 user=safe_bytes(user),
965 user=safe_bytes(user),
966 date=(commit_time, commit_timezone),
966 date=(commit_time, commit_timezone),
967 extra=kwargs)
967 extra=kwargs)
968
968
969 n = repo.commitctx(commit_ctx)
969 n = repo.commitctx(commit_ctx)
970 new_id = hex(n)
970 new_id = hex(n)
971
971
972 return new_id
972 return new_id
973
973
974 @reraise_safe_exceptions
974 @reraise_safe_exceptions
975 def pull(self, wire, url, commit_ids=None):
975 def pull(self, wire, url, commit_ids=None):
976 repo = self._factory.repo(wire)
976 repo = self._factory.repo(wire)
977 # Disable any prompts for this repo
977 # Disable any prompts for this repo
978 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
978 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
979
979
980 remote = peer(repo, {}, safe_bytes(url))
980 remote = peer(repo, {}, safe_bytes(url))
981 # Disable any prompts for this remote
981 # Disable any prompts for this remote
982 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
982 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
983
983
984 if commit_ids:
984 if commit_ids:
985 commit_ids = [bin(commit_id) for commit_id in commit_ids]
985 commit_ids = [bin(commit_id) for commit_id in commit_ids]
986
986
987 return exchange.pull(
987 return exchange.pull(
988 repo, remote, heads=commit_ids, force=None).cgresult
988 repo, remote, heads=commit_ids, force=None).cgresult
989
989
990 @reraise_safe_exceptions
990 @reraise_safe_exceptions
991 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
991 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
992 repo = self._factory.repo(wire)
992 repo = self._factory.repo(wire)
993 baseui = self._factory._create_config(wire['config'], hooks=hooks)
993 baseui = self._factory._create_config(wire['config'], hooks=hooks)
994
994
995 source = safe_bytes(source)
995 source = safe_bytes(source)
996
996
997 # Mercurial internally has a lot of logic that checks ONLY if
997 # Mercurial internally has a lot of logic that checks ONLY if
998 # option is defined, we just pass those if they are defined then
998 # option is defined, we just pass those if they are defined then
999 opts = {}
999 opts = {}
1000
1000
1001 if bookmark:
1001 if bookmark:
1002 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1002 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1003 if isinstance(bookmark, list) else safe_bytes(bookmark)
1003 if isinstance(bookmark, list) else safe_bytes(bookmark)
1004
1004
1005 if branch:
1005 if branch:
1006 opts['branch'] = [safe_bytes(x) for x in branch] \
1006 opts['branch'] = [safe_bytes(x) for x in branch] \
1007 if isinstance(branch, list) else safe_bytes(branch)
1007 if isinstance(branch, list) else safe_bytes(branch)
1008
1008
1009 if revision:
1009 if revision:
1010 opts['rev'] = [safe_bytes(x) for x in revision] \
1010 opts['rev'] = [safe_bytes(x) for x in revision] \
1011 if isinstance(revision, list) else safe_bytes(revision)
1011 if isinstance(revision, list) else safe_bytes(revision)
1012
1012
1013 commands.pull(baseui, repo, source, **opts)
1013 commands.pull(baseui, repo, source, **opts)
1014
1014
1015 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1016 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1016 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1017 repo = self._factory.repo(wire)
1017 repo = self._factory.repo(wire)
1018 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1018 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1019
1019
1020 revisions = [safe_bytes(x) for x in revisions] \
1020 revisions = [safe_bytes(x) for x in revisions] \
1021 if isinstance(revisions, list) else safe_bytes(revisions)
1021 if isinstance(revisions, list) else safe_bytes(revisions)
1022
1022
1023 commands.push(baseui, repo, safe_bytes(dest_path),
1023 commands.push(baseui, repo, safe_bytes(dest_path),
1024 rev=revisions,
1024 rev=revisions,
1025 new_branch=push_branches)
1025 new_branch=push_branches)
1026
1026
1027 @reraise_safe_exceptions
1027 @reraise_safe_exceptions
1028 def strip(self, wire, revision, update, backup):
1028 def strip(self, wire, revision, update, backup):
1029 repo = self._factory.repo(wire)
1029 repo = self._factory.repo(wire)
1030 ctx = self._get_ctx(repo, revision)
1030 ctx = self._get_ctx(repo, revision)
1031 hgext_strip.strip(
1031 hgext_strip.strip(
1032 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1032 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1033
1033
1034 @reraise_safe_exceptions
1034 @reraise_safe_exceptions
1035 def get_unresolved_files(self, wire):
1035 def get_unresolved_files(self, wire):
1036 repo = self._factory.repo(wire)
1036 repo = self._factory.repo(wire)
1037
1037
1038 log.debug('Calculating unresolved files for repo: %s', repo)
1038 log.debug('Calculating unresolved files for repo: %s', repo)
1039 output = io.BytesIO()
1039 output = io.BytesIO()
1040
1040
1041 def write(data, **unused_kwargs):
1041 def write(data, **unused_kwargs):
1042 output.write(data)
1042 output.write(data)
1043
1043
1044 baseui = self._factory._create_config(wire['config'])
1044 baseui = self._factory._create_config(wire['config'])
1045 baseui.write = write
1045 baseui.write = write
1046
1046
1047 commands.resolve(baseui, repo, list=True)
1047 commands.resolve(baseui, repo, list=True)
1048 unresolved = output.getvalue().splitlines(0)
1048 unresolved = output.getvalue().splitlines(0)
1049 return unresolved
1049 return unresolved
1050
1050
1051 @reraise_safe_exceptions
1051 @reraise_safe_exceptions
1052 def merge(self, wire, revision):
1052 def merge(self, wire, revision):
1053 repo = self._factory.repo(wire)
1053 repo = self._factory.repo(wire)
1054 baseui = self._factory._create_config(wire['config'])
1054 baseui = self._factory._create_config(wire['config'])
1055 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1055 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1056
1056
1057 # In case of sub repositories are used mercurial prompts the user in
1057 # In case of sub repositories are used mercurial prompts the user in
1058 # case of merge conflicts or different sub repository sources. By
1058 # case of merge conflicts or different sub repository sources. By
1059 # setting the interactive flag to `False` mercurial doesn't prompt the
1059 # setting the interactive flag to `False` mercurial doesn't prompt the
1060 # used but instead uses a default value.
1060 # used but instead uses a default value.
1061 repo.ui.setconfig(b'ui', b'interactive', False)
1061 repo.ui.setconfig(b'ui', b'interactive', False)
1062 commands.merge(baseui, repo, rev=safe_bytes(revision))
1062 commands.merge(baseui, repo, rev=safe_bytes(revision))
1063
1063
1064 @reraise_safe_exceptions
1064 @reraise_safe_exceptions
1065 def merge_state(self, wire):
1065 def merge_state(self, wire):
1066 repo = self._factory.repo(wire)
1066 repo = self._factory.repo(wire)
1067 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1067 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1068
1068
1069 # In case of sub repositories are used mercurial prompts the user in
1069 # In case of sub repositories are used mercurial prompts the user in
1070 # case of merge conflicts or different sub repository sources. By
1070 # case of merge conflicts or different sub repository sources. By
1071 # setting the interactive flag to `False` mercurial doesn't prompt the
1071 # setting the interactive flag to `False` mercurial doesn't prompt the
1072 # used but instead uses a default value.
1072 # used but instead uses a default value.
1073 repo.ui.setconfig(b'ui', b'interactive', False)
1073 repo.ui.setconfig(b'ui', b'interactive', False)
1074 ms = hg_merge.mergestate(repo)
1074 ms = hg_merge.mergestate(repo)
1075 return [x for x in ms.unresolved()]
1075 return [x for x in ms.unresolved()]
1076
1076
1077 @reraise_safe_exceptions
1077 @reraise_safe_exceptions
1078 def commit(self, wire, message, username, close_branch=False):
1078 def commit(self, wire, message, username, close_branch=False):
1079 repo = self._factory.repo(wire)
1079 repo = self._factory.repo(wire)
1080 baseui = self._factory._create_config(wire['config'])
1080 baseui = self._factory._create_config(wire['config'])
1081 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1081 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1082 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1082 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1083
1083
1084 @reraise_safe_exceptions
1084 @reraise_safe_exceptions
1085 def rebase(self, wire, source='', dest='', abort=False):
1085 def rebase(self, wire, source='', dest='', abort=False):
1086 repo = self._factory.repo(wire)
1086 repo = self._factory.repo(wire)
1087 baseui = self._factory._create_config(wire['config'])
1087 baseui = self._factory._create_config(wire['config'])
1088 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1088 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1089 # In case of sub repositories are used mercurial prompts the user in
1089 # In case of sub repositories are used mercurial prompts the user in
1090 # case of merge conflicts or different sub repository sources. By
1090 # case of merge conflicts or different sub repository sources. By
1091 # setting the interactive flag to `False` mercurial doesn't prompt the
1091 # setting the interactive flag to `False` mercurial doesn't prompt the
1092 # used but instead uses a default value.
1092 # used but instead uses a default value.
1093 repo.ui.setconfig(b'ui', b'interactive', False)
1093 repo.ui.setconfig(b'ui', b'interactive', False)
1094
1094
1095 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1095 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1096 abort=abort, keep=not abort)
1096 abort=abort, keep=not abort)
1097
1097
1098 @reraise_safe_exceptions
1098 @reraise_safe_exceptions
1099 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1099 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1100 repo = self._factory.repo(wire)
1100 repo = self._factory.repo(wire)
1101 ctx = self._get_ctx(repo, revision)
1101 ctx = self._get_ctx(repo, revision)
1102 node = ctx.node()
1102 node = ctx.node()
1103
1103
1104 date = (tag_time, tag_timezone)
1104 date = (tag_time, tag_timezone)
1105 try:
1105 try:
1106 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1106 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1107 except Abort as e:
1107 except Abort as e:
1108 log.exception("Tag operation aborted")
1108 log.exception("Tag operation aborted")
1109 # Exception can contain unicode which we convert
1109 # Exception can contain unicode which we convert
1110 raise exceptions.AbortException(e)(repr(e))
1110 raise exceptions.AbortException(e)(repr(e))
1111
1111
1112 @reraise_safe_exceptions
1112 @reraise_safe_exceptions
1113 def bookmark(self, wire, bookmark, revision=''):
1113 def bookmark(self, wire, bookmark, revision=''):
1114 repo = self._factory.repo(wire)
1114 repo = self._factory.repo(wire)
1115 baseui = self._factory._create_config(wire['config'])
1115 baseui = self._factory._create_config(wire['config'])
1116 revision = revision or ''
1116 revision = revision or ''
1117 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1117 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1118
1118
1119 @reraise_safe_exceptions
1119 @reraise_safe_exceptions
1120 def install_hooks(self, wire, force=False):
1120 def install_hooks(self, wire, force=False):
1121 # we don't need any special hooks for Mercurial
1121 # we don't need any special hooks for Mercurial
1122 pass
1122 pass
1123
1123
1124 @reraise_safe_exceptions
1124 @reraise_safe_exceptions
1125 def get_hooks_info(self, wire):
1125 def get_hooks_info(self, wire):
1126 return {
1126 return {
1127 'pre_version': vcsserver.__version__,
1127 'pre_version': vcsserver.__version__,
1128 'post_version': vcsserver.__version__,
1128 'post_version': vcsserver.__version__,
1129 }
1129 }
1130
1130
1131 @reraise_safe_exceptions
1131 @reraise_safe_exceptions
1132 def set_head_ref(self, wire, head_name):
1132 def set_head_ref(self, wire, head_name):
1133 pass
1133 pass
1134
1134
1135 @reraise_safe_exceptions
1135 @reraise_safe_exceptions
1136 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1136 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1137 archive_dir_name, commit_id, cache_config):
1137 archive_dir_name, commit_id, cache_config):
1138
1138
1139 def file_walker(_commit_id, path):
1139 def file_walker(_commit_id, path):
1140 repo = self._factory.repo(wire)
1140 repo = self._factory.repo(wire)
1141 ctx = repo[_commit_id]
1141 ctx = repo[_commit_id]
1142 is_root = path in ['', '/']
1142 is_root = path in ['', '/']
1143 if is_root:
1143 if is_root:
1144 matcher = alwaysmatcher(badfn=None)
1144 matcher = alwaysmatcher(badfn=None)
1145 else:
1145 else:
1146 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1146 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1147 file_iter = ctx.manifest().walk(matcher)
1147 file_iter = ctx.manifest().walk(matcher)
1148
1148
1149 for fn in file_iter:
1149 for fn in file_iter:
1150 file_path = fn
1150 file_path = fn
1151 flags = ctx.flags(fn)
1151 flags = ctx.flags(fn)
1152 mode = b'x' in flags and 0o755 or 0o644
1152 mode = b'x' in flags and 0o755 or 0o644
1153 is_link = b'l' in flags
1153 is_link = b'l' in flags
1154
1154
1155 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1155 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1156
1156
1157 return store_archive_in_cache(
1157 return store_archive_in_cache(
1158 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1158 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1159
1159
@@ -1,935 +1,935 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
41 from vcsserver.exceptions import NoContentException
41 from vcsserver.exceptions import NoContentException
42 from vcsserver.str_utils import safe_str, safe_bytes
42 from vcsserver.str_utils import safe_str, safe_bytes
43 from vcsserver.type_utils import assert_bytes
43 from vcsserver.type_utils import assert_bytes
44 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.lib.svnremoterepo import svnremoterepo
45 from vcsserver.lib.svnremoterepo import svnremoterepo
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 svn_compatible_versions_map = {
49 svn_compatible_versions_map = {
50 'pre-1.4-compatible': '1.3',
50 'pre-1.4-compatible': '1.3',
51 'pre-1.5-compatible': '1.4',
51 'pre-1.5-compatible': '1.4',
52 'pre-1.6-compatible': '1.5',
52 'pre-1.6-compatible': '1.5',
53 'pre-1.8-compatible': '1.7',
53 'pre-1.8-compatible': '1.7',
54 'pre-1.9-compatible': '1.8',
54 'pre-1.9-compatible': '1.8',
55 }
55 }
56
56
57 current_compatible_version = '1.14'
57 current_compatible_version = '1.14'
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in svn remote call")
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException(e), e)
68 raise_from_original(exceptions.UnhandledException(e), e)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74 repo_type = 'svn'
75
75
76 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
77 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
78 if create:
78 if create:
79 fs_config = {'compatible-version': current_compatible_version}
79 fs_config = {'compatible-version': current_compatible_version}
80 if compatible_version:
80 if compatible_version:
81
81
82 compatible_version_string = \
82 compatible_version_string = \
83 svn_compatible_versions_map.get(compatible_version) \
83 svn_compatible_versions_map.get(compatible_version) \
84 or compatible_version
84 or compatible_version
85 fs_config['compatible-version'] = compatible_version_string
85 fs_config['compatible-version'] = compatible_version_string
86
86
87 log.debug('Create SVN repo with config `%s`', fs_config)
87 log.debug('Create SVN repo with config `%s`', fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
89 else:
89 else:
90 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
91
91
92 log.debug('repository created: got SVN object: %s', repo)
92 log.debug('repository created: got SVN object: %s', repo)
93 return repo
93 return repo
94
94
95 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
96 """
96 """
97 Get a repository instance for the given path.
97 Get a repository instance for the given path.
98 """
98 """
99 return self._create_repo(wire, create, compatible_version)
99 return self._create_repo(wire, create, compatible_version)
100
100
101
101
102 NODE_TYPE_MAPPING = {
102 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
104 svn.core.svn_node_dir: 'dir',
105 }
105 }
106
106
107
107
108 class SvnRemote(RemoteBase):
108 class SvnRemote(RemoteBase):
109
109
110 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
111 self._factory = factory
112
112
113 self._bulk_methods = {
113 self._bulk_methods = {
114 # NOT supported in SVN ATM...
114 # NOT supported in SVN ATM...
115 }
115 }
116 self._bulk_file_methods = {
116 self._bulk_file_methods = {
117 "size": self.get_file_size,
117 "size": self.get_file_size,
118 "data": self.get_file_content,
118 "data": self.get_file_content,
119 "flags": self.get_node_type,
119 "flags": self.get_node_type,
120 "is_binary": self.is_binary,
120 "is_binary": self.is_binary,
121 "md5": self.md5_hash
121 "md5": self.md5_hash
122 }
122 }
123
123
124 @reraise_safe_exceptions
124 @reraise_safe_exceptions
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
126 cache_on, context_uid, repo_id = self._cache_on(wire)
126 cache_on, context_uid, repo_id = self._cache_on(wire)
127 region = self._region(wire)
127 region = self._region(wire)
128
128
129 # since we use unified API, we need to cast from str to in for SVN
129 # since we use unified API, we need to cast from str to in for SVN
130 commit_id = int(commit_id)
130 commit_id = int(commit_id)
131
131
132 @region.conditional_cache_on_arguments(condition=cache_on)
132 @region.conditional_cache_on_arguments(condition=cache_on)
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
134 result = {}
134 result = {}
135 for attr in pre_load:
135 for attr in pre_load:
136 try:
136 try:
137 method = self._bulk_file_methods[attr]
137 method = self._bulk_file_methods[attr]
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
139 result[attr] = method(wire, _commit_id, _path)
139 result[attr] = method(wire, _commit_id, _path)
140 except KeyError as e:
140 except KeyError as e:
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
142 return BinaryEnvelope(result)
142 return BinaryEnvelope(result)
143
143
144 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
144 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
145
145
146 @reraise_safe_exceptions
146 @reraise_safe_exceptions
147 def discover_svn_version(self):
147 def discover_svn_version(self):
148 try:
148 try:
149 import svn.core
149 import svn.core
150 svn_ver = svn.core.SVN_VERSION
150 svn_ver = svn.core.SVN_VERSION
151 except ImportError:
151 except ImportError:
152 svn_ver = None
152 svn_ver = None
153 return safe_str(svn_ver)
153 return safe_str(svn_ver)
154
154
155 @reraise_safe_exceptions
155 @reraise_safe_exceptions
156 def is_empty(self, wire):
156 def is_empty(self, wire):
157 try:
157 try:
158 return self.lookup(wire, -1) == 0
158 return self.lookup(wire, -1) == 0
159 except Exception:
159 except Exception:
160 log.exception("failed to read object_store")
160 log.exception("failed to read object_store")
161 return False
161 return False
162
162
163 def check_url(self, url, config):
163 def check_url(self, url, config):
164
164
165 # uuid function gets only valid UUID from proper repo, else
165 # uuid function gets only valid UUID from proper repo, else
166 # throws exception
166 # throws exception
167 username, password, src_url = self.get_url_and_credentials(url)
167 username, password, src_url = self.get_url_and_credentials(url)
168 try:
168 try:
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
170 except Exception:
170 except Exception:
171 tb = traceback.format_exc()
171 tb = traceback.format_exc()
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
174 return True
174 return True
175
175
176 def is_path_valid_repository(self, wire, path):
176 def is_path_valid_repository(self, wire, path):
177
177
178 # NOTE(marcink): short circuit the check for SVN repo
178 # NOTE(marcink): short circuit the check for SVN repo
179 # the repos.open might be expensive to check, but we have one cheap
179 # the repos.open might be expensive to check, but we have one cheap
180 # pre condition that we can use, to check for 'format' file
180 # pre condition that we can use, to check for 'format' file
181
181
182 if not os.path.isfile(os.path.join(path, 'format')):
182 if not os.path.isfile(os.path.join(path, 'format')):
183 return False
183 return False
184
184
185 try:
185 try:
186 svn.repos.open(path)
186 svn.repos.open(path)
187 except svn.core.SubversionException:
187 except svn.core.SubversionException:
188 tb = traceback.format_exc()
188 tb = traceback.format_exc()
189 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
189 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
190 return False
190 return False
191 return True
191 return True
192
192
193 @reraise_safe_exceptions
193 @reraise_safe_exceptions
194 def verify(self, wire,):
194 def verify(self, wire,):
195 repo_path = wire['path']
195 repo_path = wire['path']
196 if not self.is_path_valid_repository(wire, repo_path):
196 if not self.is_path_valid_repository(wire, repo_path):
197 raise Exception(
197 raise Exception(
198 "Path %s is not a valid Subversion repository." % repo_path)
198 "Path %s is not a valid Subversion repository." % repo_path)
199
199
200 cmd = ['svnadmin', 'info', repo_path]
200 cmd = ['svnadmin', 'info', repo_path]
201 stdout, stderr = subprocessio.run_command(cmd)
201 stdout, stderr = subprocessio.run_command(cmd)
202 return stdout
202 return stdout
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def lookup(self, wire, revision):
205 def lookup(self, wire, revision):
206 if revision not in [-1, None, 'HEAD']:
206 if revision not in [-1, None, 'HEAD']:
207 raise NotImplementedError
207 raise NotImplementedError
208 repo = self._factory.repo(wire)
208 repo = self._factory.repo(wire)
209 fs_ptr = svn.repos.fs(repo)
209 fs_ptr = svn.repos.fs(repo)
210 head = svn.fs.youngest_rev(fs_ptr)
210 head = svn.fs.youngest_rev(fs_ptr)
211 return head
211 return head
212
212
213 @reraise_safe_exceptions
213 @reraise_safe_exceptions
214 def lookup_interval(self, wire, start_ts, end_ts):
214 def lookup_interval(self, wire, start_ts, end_ts):
215 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
216 fsobj = svn.repos.fs(repo)
216 fsobj = svn.repos.fs(repo)
217 start_rev = None
217 start_rev = None
218 end_rev = None
218 end_rev = None
219 if start_ts:
219 if start_ts:
220 start_ts_svn = apr_time_t(start_ts)
220 start_ts_svn = apr_time_t(start_ts)
221 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
221 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
222 else:
222 else:
223 start_rev = 1
223 start_rev = 1
224 if end_ts:
224 if end_ts:
225 end_ts_svn = apr_time_t(end_ts)
225 end_ts_svn = apr_time_t(end_ts)
226 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
226 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
227 else:
227 else:
228 end_rev = svn.fs.youngest_rev(fsobj)
228 end_rev = svn.fs.youngest_rev(fsobj)
229 return start_rev, end_rev
229 return start_rev, end_rev
230
230
231 @reraise_safe_exceptions
231 @reraise_safe_exceptions
232 def revision_properties(self, wire, revision):
232 def revision_properties(self, wire, revision):
233
233
234 cache_on, context_uid, repo_id = self._cache_on(wire)
234 cache_on, context_uid, repo_id = self._cache_on(wire)
235 region = self._region(wire)
235 region = self._region(wire)
236
236
237 @region.conditional_cache_on_arguments(condition=cache_on)
237 @region.conditional_cache_on_arguments(condition=cache_on)
238 def _revision_properties(_repo_id, _revision):
238 def _revision_properties(_repo_id, _revision):
239 repo = self._factory.repo(wire)
239 repo = self._factory.repo(wire)
240 fs_ptr = svn.repos.fs(repo)
240 fs_ptr = svn.repos.fs(repo)
241 return svn.fs.revision_proplist(fs_ptr, revision)
241 return svn.fs.revision_proplist(fs_ptr, revision)
242 return _revision_properties(repo_id, revision)
242 return _revision_properties(repo_id, revision)
243
243
244 def revision_changes(self, wire, revision):
244 def revision_changes(self, wire, revision):
245
245
246 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
247 fsobj = svn.repos.fs(repo)
247 fsobj = svn.repos.fs(repo)
248 rev_root = svn.fs.revision_root(fsobj, revision)
248 rev_root = svn.fs.revision_root(fsobj, revision)
249
249
250 editor = svn.repos.ChangeCollector(fsobj, rev_root)
250 editor = svn.repos.ChangeCollector(fsobj, rev_root)
251 editor_ptr, editor_baton = svn.delta.make_editor(editor)
251 editor_ptr, editor_baton = svn.delta.make_editor(editor)
252 base_dir = ""
252 base_dir = ""
253 send_deltas = False
253 send_deltas = False
254 svn.repos.replay2(
254 svn.repos.replay2(
255 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
255 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
256 editor_ptr, editor_baton, None)
256 editor_ptr, editor_baton, None)
257
257
258 added = []
258 added = []
259 changed = []
259 changed = []
260 removed = []
260 removed = []
261
261
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
263 for path, change in editor.changes.items():
263 for path, change in editor.changes.items():
264 # TODO: Decide what to do with directory nodes. Subversion can add
264 # TODO: Decide what to do with directory nodes. Subversion can add
265 # empty directories.
265 # empty directories.
266
266
267 if change.item_kind == svn.core.svn_node_dir:
267 if change.item_kind == svn.core.svn_node_dir:
268 continue
268 continue
269 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
269 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
270 added.append(path)
270 added.append(path)
271 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
271 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
272 svn.repos.CHANGE_ACTION_REPLACE]:
272 svn.repos.CHANGE_ACTION_REPLACE]:
273 changed.append(path)
273 changed.append(path)
274 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
274 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
275 removed.append(path)
275 removed.append(path)
276 else:
276 else:
277 raise NotImplementedError(
277 raise NotImplementedError(
278 "Action {} not supported on path {}".format(
278 "Action {} not supported on path {}".format(
279 change.action, path))
279 change.action, path))
280
280
281 changes = {
281 changes = {
282 'added': added,
282 'added': added,
283 'changed': changed,
283 'changed': changed,
284 'removed': removed,
284 'removed': removed,
285 }
285 }
286 return changes
286 return changes
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def node_history(self, wire, path, revision, limit):
289 def node_history(self, wire, path, revision, limit):
290 cache_on, context_uid, repo_id = self._cache_on(wire)
290 cache_on, context_uid, repo_id = self._cache_on(wire)
291 region = self._region(wire)
291 region = self._region(wire)
292
292
293 @region.conditional_cache_on_arguments(condition=cache_on)
293 @region.conditional_cache_on_arguments(condition=cache_on)
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
295 cross_copies = False
295 cross_copies = False
296 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
297 fsobj = svn.repos.fs(repo)
297 fsobj = svn.repos.fs(repo)
298 rev_root = svn.fs.revision_root(fsobj, revision)
298 rev_root = svn.fs.revision_root(fsobj, revision)
299
299
300 history_revisions = []
300 history_revisions = []
301 history = svn.fs.node_history(rev_root, path)
301 history = svn.fs.node_history(rev_root, path)
302 history = svn.fs.history_prev(history, cross_copies)
302 history = svn.fs.history_prev(history, cross_copies)
303 while history:
303 while history:
304 __, node_revision = svn.fs.history_location(history)
304 __, node_revision = svn.fs.history_location(history)
305 history_revisions.append(node_revision)
305 history_revisions.append(node_revision)
306 if limit and len(history_revisions) >= limit:
306 if limit and len(history_revisions) >= limit:
307 break
307 break
308 history = svn.fs.history_prev(history, cross_copies)
308 history = svn.fs.history_prev(history, cross_copies)
309 return history_revisions
309 return history_revisions
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
311
311
312 @reraise_safe_exceptions
312 @reraise_safe_exceptions
313 def node_properties(self, wire, path, revision):
313 def node_properties(self, wire, path, revision):
314 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
315 region = self._region(wire)
316
316
317 @region.conditional_cache_on_arguments(condition=cache_on)
317 @region.conditional_cache_on_arguments(condition=cache_on)
318 def _node_properties(_repo_id, _path, _revision):
318 def _node_properties(_repo_id, _path, _revision):
319 repo = self._factory.repo(wire)
319 repo = self._factory.repo(wire)
320 fsobj = svn.repos.fs(repo)
320 fsobj = svn.repos.fs(repo)
321 rev_root = svn.fs.revision_root(fsobj, revision)
321 rev_root = svn.fs.revision_root(fsobj, revision)
322 return svn.fs.node_proplist(rev_root, path)
322 return svn.fs.node_proplist(rev_root, path)
323 return _node_properties(repo_id, path, revision)
323 return _node_properties(repo_id, path, revision)
324
324
325 def file_annotate(self, wire, path, revision):
325 def file_annotate(self, wire, path, revision):
326 abs_path = 'file://' + urllib.request.pathname2url(
326 abs_path = 'file://' + urllib.request.pathname2url(
327 vcspath.join(wire['path'], path))
327 vcspath.join(wire['path'], path))
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
329
329
330 start_rev = svn_opt_revision_value_t(0)
330 start_rev = svn_opt_revision_value_t(0)
331 peg_rev = svn_opt_revision_value_t(revision)
331 peg_rev = svn_opt_revision_value_t(revision)
332 end_rev = peg_rev
332 end_rev = peg_rev
333
333
334 annotations = []
334 annotations = []
335
335
336 def receiver(line_no, revision, author, date, line, pool):
336 def receiver(line_no, revision, author, date, line, pool):
337 annotations.append((line_no, revision, line))
337 annotations.append((line_no, revision, line))
338
338
339 # TODO: Cannot use blame5, missing typemap function in the swig code
339 # TODO: Cannot use blame5, missing typemap function in the swig code
340 try:
340 try:
341 svn.client.blame2(
341 svn.client.blame2(
342 file_uri, peg_rev, start_rev, end_rev,
342 file_uri, peg_rev, start_rev, end_rev,
343 receiver, svn.client.create_context())
343 receiver, svn.client.create_context())
344 except svn.core.SubversionException as exc:
344 except svn.core.SubversionException as exc:
345 log.exception("Error during blame operation.")
345 log.exception("Error during blame operation.")
346 raise Exception(
346 raise Exception(
347 "Blame not supported or file does not exist at path %s. "
347 "Blame not supported or file does not exist at path %s. "
348 "Error %s." % (path, exc))
348 "Error %s." % (path, exc))
349
349
350 return annotations
350 return annotations
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def get_node_type(self, wire, revision=None, path=''):
353 def get_node_type(self, wire, revision=None, path=''):
354
354
355 cache_on, context_uid, repo_id = self._cache_on(wire)
355 cache_on, context_uid, repo_id = self._cache_on(wire)
356 region = self._region(wire)
356 region = self._region(wire)
357
357
358 @region.conditional_cache_on_arguments(condition=cache_on)
358 @region.conditional_cache_on_arguments(condition=cache_on)
359 def _get_node_type(_repo_id, _revision, _path):
359 def _get_node_type(_repo_id, _revision, _path):
360 repo = self._factory.repo(wire)
360 repo = self._factory.repo(wire)
361 fs_ptr = svn.repos.fs(repo)
361 fs_ptr = svn.repos.fs(repo)
362 if _revision is None:
362 if _revision is None:
363 _revision = svn.fs.youngest_rev(fs_ptr)
363 _revision = svn.fs.youngest_rev(fs_ptr)
364 root = svn.fs.revision_root(fs_ptr, _revision)
364 root = svn.fs.revision_root(fs_ptr, _revision)
365 node = svn.fs.check_path(root, path)
365 node = svn.fs.check_path(root, path)
366 return NODE_TYPE_MAPPING.get(node, None)
366 return NODE_TYPE_MAPPING.get(node, None)
367 return _get_node_type(repo_id, revision, path)
367 return _get_node_type(repo_id, revision, path)
368
368
369 @reraise_safe_exceptions
369 @reraise_safe_exceptions
370 def get_nodes(self, wire, revision=None, path=''):
370 def get_nodes(self, wire, revision=None, path=''):
371
371
372 cache_on, context_uid, repo_id = self._cache_on(wire)
372 cache_on, context_uid, repo_id = self._cache_on(wire)
373 region = self._region(wire)
373 region = self._region(wire)
374
374
375 @region.conditional_cache_on_arguments(condition=cache_on)
375 @region.conditional_cache_on_arguments(condition=cache_on)
376 def _get_nodes(_repo_id, _path, _revision):
376 def _get_nodes(_repo_id, _path, _revision):
377 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
378 fsobj = svn.repos.fs(repo)
378 fsobj = svn.repos.fs(repo)
379 if _revision is None:
379 if _revision is None:
380 _revision = svn.fs.youngest_rev(fsobj)
380 _revision = svn.fs.youngest_rev(fsobj)
381 root = svn.fs.revision_root(fsobj, _revision)
381 root = svn.fs.revision_root(fsobj, _revision)
382 entries = svn.fs.dir_entries(root, path)
382 entries = svn.fs.dir_entries(root, path)
383 result = []
383 result = []
384 for entry_path, entry_info in entries.items():
384 for entry_path, entry_info in entries.items():
385 result.append(
385 result.append(
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
387 return result
387 return result
388 return _get_nodes(repo_id, path, revision)
388 return _get_nodes(repo_id, path, revision)
389
389
390 @reraise_safe_exceptions
390 @reraise_safe_exceptions
391 def get_file_content(self, wire, rev=None, path=''):
391 def get_file_content(self, wire, rev=None, path=''):
392 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394
394
395 if rev is None:
395 if rev is None:
396 rev = svn.fs.youngest_rev(fsobj)
396 rev = svn.fs.youngest_rev(fsobj)
397
397
398 root = svn.fs.revision_root(fsobj, rev)
398 root = svn.fs.revision_root(fsobj, rev)
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
400 return BytesEnvelope(content.read())
400 return BytesEnvelope(content.read())
401
401
402 @reraise_safe_exceptions
402 @reraise_safe_exceptions
403 def get_file_size(self, wire, revision=None, path=''):
403 def get_file_size(self, wire, revision=None, path=''):
404
404
405 cache_on, context_uid, repo_id = self._cache_on(wire)
405 cache_on, context_uid, repo_id = self._cache_on(wire)
406 region = self._region(wire)
406 region = self._region(wire)
407
407
408 @region.conditional_cache_on_arguments(condition=cache_on)
408 @region.conditional_cache_on_arguments(condition=cache_on)
409 def _get_file_size(_repo_id, _revision, _path):
409 def _get_file_size(_repo_id, _revision, _path):
410 repo = self._factory.repo(wire)
410 repo = self._factory.repo(wire)
411 fsobj = svn.repos.fs(repo)
411 fsobj = svn.repos.fs(repo)
412 if _revision is None:
412 if _revision is None:
413 _revision = svn.fs.youngest_revision(fsobj)
413 _revision = svn.fs.youngest_revision(fsobj)
414 root = svn.fs.revision_root(fsobj, _revision)
414 root = svn.fs.revision_root(fsobj, _revision)
415 size = svn.fs.file_length(root, path)
415 size = svn.fs.file_length(root, path)
416 return size
416 return size
417 return _get_file_size(repo_id, revision, path)
417 return _get_file_size(repo_id, revision, path)
418
418
419 def create_repository(self, wire, compatible_version=None):
419 def create_repository(self, wire, compatible_version=None):
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
421 self._factory.repo(wire, create=True,
421 self._factory.repo(wire, create=True,
422 compatible_version=compatible_version)
422 compatible_version=compatible_version)
423
423
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
425 obj = urllib.parse.urlparse(src_url)
425 obj = urllib.parse.urlparse(src_url)
426 username = obj.username or ''
426 username = obj.username or ''
427 password = obj.password or ''
427 password = obj.password or ''
428 return username, password, src_url
428 return username, password, src_url
429
429
430 def import_remote_repository(self, wire, src_url):
430 def import_remote_repository(self, wire, src_url):
431 repo_path = wire['path']
431 repo_path = wire['path']
432 if not self.is_path_valid_repository(wire, repo_path):
432 if not self.is_path_valid_repository(wire, repo_path):
433 raise Exception(
433 raise Exception(
434 "Path %s is not a valid Subversion repository." % repo_path)
434 "Path %s is not a valid Subversion repository." % repo_path)
435
435
436 username, password, src_url = self.get_url_and_credentials(src_url)
436 username, password, src_url = self.get_url_and_credentials(src_url)
437 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
437 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
438 '--trust-server-cert-failures=unknown-ca']
438 '--trust-server-cert-failures=unknown-ca']
439 if username and password:
439 if username and password:
440 rdump_cmd += ['--username', username, '--password', password]
440 rdump_cmd += ['--username', username, '--password', password]
441 rdump_cmd += [src_url]
441 rdump_cmd += [src_url]
442
442
443 rdump = subprocess.Popen(
443 rdump = subprocess.Popen(
444 rdump_cmd,
444 rdump_cmd,
445 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
445 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
446 load = subprocess.Popen(
446 load = subprocess.Popen(
447 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
447 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
448
448
449 # TODO: johbo: This can be a very long operation, might be better
449 # TODO: johbo: This can be a very long operation, might be better
450 # to track some kind of status and provide an api to check if the
450 # to track some kind of status and provide an api to check if the
451 # import is done.
451 # import is done.
452 rdump.wait()
452 rdump.wait()
453 load.wait()
453 load.wait()
454
454
455 log.debug('Return process ended with code: %s', rdump.returncode)
455 log.debug('Return process ended with code: %s', rdump.returncode)
456 if rdump.returncode != 0:
456 if rdump.returncode != 0:
457 errors = rdump.stderr.read()
457 errors = rdump.stderr.read()
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
459
459
460 reason = 'UNKNOWN'
460 reason = 'UNKNOWN'
461 if b'svnrdump: E230001:' in errors:
461 if b'svnrdump: E230001:' in errors:
462 reason = 'INVALID_CERTIFICATE'
462 reason = 'INVALID_CERTIFICATE'
463
463
464 if reason == 'UNKNOWN':
464 if reason == 'UNKNOWN':
465 reason = f'UNKNOWN:{safe_str(errors)}'
465 reason = f'UNKNOWN:{safe_str(errors)}'
466
466
467 raise Exception(
467 raise Exception(
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
469 src_url, reason))
469 src_url, reason))
470 if load.returncode != 0:
470 if load.returncode != 0:
471 raise Exception(
471 raise Exception(
472 'Failed to load the dump of remote repository from %s.' %
472 'Failed to load the dump of remote repository from %s.' %
473 (src_url, ))
473 (src_url, ))
474
474
475 def commit(self, wire, message, author, timestamp, updated, removed):
475 def commit(self, wire, message, author, timestamp, updated, removed):
476
476
477 message = safe_bytes(message)
477 message = safe_bytes(message)
478 author = safe_bytes(author)
478 author = safe_bytes(author)
479
479
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 fsobj = svn.repos.fs(repo)
481 fsobj = svn.repos.fs(repo)
482
482
483 rev = svn.fs.youngest_rev(fsobj)
483 rev = svn.fs.youngest_rev(fsobj)
484 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
484 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
485 txn_root = svn.fs.txn_root(txn)
485 txn_root = svn.fs.txn_root(txn)
486
486
487 for node in updated:
487 for node in updated:
488 TxnNodeProcessor(node, txn_root).update()
488 TxnNodeProcessor(node, txn_root).update()
489 for node in removed:
489 for node in removed:
490 TxnNodeProcessor(node, txn_root).remove()
490 TxnNodeProcessor(node, txn_root).remove()
491
491
492 commit_id = svn.repos.fs_commit_txn(repo, txn)
492 commit_id = svn.repos.fs_commit_txn(repo, txn)
493
493
494 if timestamp:
494 if timestamp:
495 apr_time = apr_time_t(timestamp)
495 apr_time = apr_time_t(timestamp)
496 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
496 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
497 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
497 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
498
498
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
500 return commit_id
500 return commit_id
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
504 ignore_whitespace=False, context=3):
504 ignore_whitespace=False, context=3):
505
505
506 wire.update(cache=False)
506 wire.update(cache=False)
507 repo = self._factory.repo(wire)
507 repo = self._factory.repo(wire)
508 diff_creator = SvnDiffer(
508 diff_creator = SvnDiffer(
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
510 try:
510 try:
511 return BytesEnvelope(diff_creator.generate_diff())
511 return BytesEnvelope(diff_creator.generate_diff())
512 except svn.core.SubversionException as e:
512 except svn.core.SubversionException as e:
513 log.exception(
513 log.exception(
514 "Error during diff operation operation. "
514 "Error during diff operation operation. "
515 "Path might not exist %s, %s", path1, path2)
515 "Path might not exist %s, %s", path1, path2)
516 return BytesEnvelope(b'')
516 return BytesEnvelope(b'')
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def is_large_file(self, wire, path):
519 def is_large_file(self, wire, path):
520 return False
520 return False
521
521
522 @reraise_safe_exceptions
522 @reraise_safe_exceptions
523 def is_binary(self, wire, rev, path):
523 def is_binary(self, wire, rev, path):
524 cache_on, context_uid, repo_id = self._cache_on(wire)
524 cache_on, context_uid, repo_id = self._cache_on(wire)
525 region = self._region(wire)
525 region = self._region(wire)
526
526
527 @region.conditional_cache_on_arguments(condition=cache_on)
527 @region.conditional_cache_on_arguments(condition=cache_on)
528 def _is_binary(_repo_id, _rev, _path):
528 def _is_binary(_repo_id, _rev, _path):
529 raw_bytes = self.get_file_content(wire, rev, path)
529 raw_bytes = self.get_file_content(wire, rev, path)
530 if not raw_bytes:
530 if not raw_bytes:
531 return False
531 return False
532 return b'\0' in raw_bytes
532 return b'\0' in raw_bytes
533
533
534 return _is_binary(repo_id, rev, path)
534 return _is_binary(repo_id, rev, path)
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def md5_hash(self, wire, rev, path):
537 def md5_hash(self, wire, rev, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
539 region = self._region(wire)
540
540
541 @region.conditional_cache_on_arguments(condition=cache_on)
541 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _md5_hash(_repo_id, _rev, _path):
542 def _md5_hash(_repo_id, _rev, _path):
543 return ''
543 return ''
544
544
545 return _md5_hash(repo_id, rev, path)
545 return _md5_hash(repo_id, rev, path)
546
546
547 @reraise_safe_exceptions
547 @reraise_safe_exceptions
548 def run_svn_command(self, wire, cmd, **opts):
548 def run_svn_command(self, wire, cmd, **opts):
549 path = wire.get('path', None)
549 path = wire.get('path', None)
550
550
551 if path and os.path.isdir(path):
551 if path and os.path.isdir(path):
552 opts['cwd'] = path
552 opts['cwd'] = path
553
553
554 safe_call = opts.pop('_safe', False)
554 safe_call = opts.pop('_safe', False)
555
555
556 svnenv = os.environ.copy()
556 svnenv = os.environ.copy()
557 svnenv.update(opts.pop('extra_env', {}))
557 svnenv.update(opts.pop('extra_env', {}))
558
558
559 _opts = {'env': svnenv, 'shell': False}
559 _opts = {'env': svnenv, 'shell': False}
560
560
561 try:
561 try:
562 _opts.update(opts)
562 _opts.update(opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
564
564
565 return b''.join(proc), b''.join(proc.stderr)
565 return b''.join(proc), b''.join(proc.stderr)
566 except OSError as err:
566 except OSError as err:
567 if safe_call:
567 if safe_call:
568 return '', safe_str(err).strip()
568 return '', safe_str(err).strip()
569 else:
569 else:
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
571 tb_err = ("Couldn't run svn command (%s).\n"
571 tb_err = ("Couldn't run svn command (%s).\n"
572 "Original error was:%s\n"
572 "Original error was:%s\n"
573 "Call options:%s\n"
573 "Call options:%s\n"
574 % (cmd, err, _opts))
574 % (cmd, err, _opts))
575 log.exception(tb_err)
575 log.exception(tb_err)
576 raise exceptions.VcsException()(tb_err)
576 raise exceptions.VcsException()(tb_err)
577
577
578 @reraise_safe_exceptions
578 @reraise_safe_exceptions
579 def install_hooks(self, wire, force=False):
579 def install_hooks(self, wire, force=False):
580 from vcsserver.hook_utils import install_svn_hooks
580 from vcsserver.hook_utils import install_svn_hooks
581 repo_path = wire['path']
581 repo_path = wire['path']
582 binary_dir = settings.BINARY_DIR
582 binary_dir = settings.BINARY_DIR
583 executable = None
583 executable = None
584 if binary_dir:
584 if binary_dir:
585 executable = os.path.join(binary_dir, 'python3')
585 executable = os.path.join(binary_dir, 'python3')
586 return install_svn_hooks(repo_path, force_create=force)
586 return install_svn_hooks(repo_path, force_create=force)
587
587
588 @reraise_safe_exceptions
588 @reraise_safe_exceptions
589 def get_hooks_info(self, wire):
589 def get_hooks_info(self, wire):
590 from vcsserver.hook_utils import (
590 from vcsserver.hook_utils import (
591 get_svn_pre_hook_version, get_svn_post_hook_version)
591 get_svn_pre_hook_version, get_svn_post_hook_version)
592 repo_path = wire['path']
592 repo_path = wire['path']
593 return {
593 return {
594 'pre_version': get_svn_pre_hook_version(repo_path),
594 'pre_version': get_svn_pre_hook_version(repo_path),
595 'post_version': get_svn_post_hook_version(repo_path),
595 'post_version': get_svn_post_hook_version(repo_path),
596 }
596 }
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def set_head_ref(self, wire, head_name):
599 def set_head_ref(self, wire, head_name):
600 pass
600 pass
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
604 archive_dir_name, commit_id, cache_config):
604 archive_dir_name, commit_id, cache_config):
605
605
606 def walk_tree(root, root_dir, _commit_id):
606 def walk_tree(root, root_dir, _commit_id):
607 """
607 """
608 Special recursive svn repo walker
608 Special recursive svn repo walker
609 """
609 """
610 root_dir = safe_bytes(root_dir)
610 root_dir = safe_bytes(root_dir)
611
611
612 filemode_default = 0o100644
612 filemode_default = 0o100644
613 filemode_executable = 0o100755
613 filemode_executable = 0o100755
614
614
615 file_iter = svn.fs.dir_entries(root, root_dir)
615 file_iter = svn.fs.dir_entries(root, root_dir)
616 for f_name in file_iter:
616 for f_name in file_iter:
617 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
617 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
618
618
619 if f_type == 'dir':
619 if f_type == 'dir':
620 # return only DIR, and then all entries in that dir
620 # return only DIR, and then all entries in that dir
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
622 new_root = os.path.join(root_dir, f_name)
622 new_root = os.path.join(root_dir, f_name)
623 yield from walk_tree(root, new_root, _commit_id)
623 yield from walk_tree(root, new_root, _commit_id)
624 else:
624 else:
625
625
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
627 prop_list = svn.fs.node_proplist(root, f_path)
627 prop_list = svn.fs.node_proplist(root, f_path)
628
628
629 f_mode = filemode_default
629 f_mode = filemode_default
630 if prop_list.get('svn:executable'):
630 if prop_list.get('svn:executable'):
631 f_mode = filemode_executable
631 f_mode = filemode_executable
632
632
633 f_is_link = False
633 f_is_link = False
634 if prop_list.get('svn:special'):
634 if prop_list.get('svn:special'):
635 f_is_link = True
635 f_is_link = True
636
636
637 data = {
637 data = {
638 'is_link': f_is_link,
638 'is_link': f_is_link,
639 'mode': f_mode,
639 'mode': f_mode,
640 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
640 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
641 }
641 }
642
642
643 yield f_path, data, f_type
643 yield f_path, data, f_type
644
644
645 def file_walker(_commit_id, path):
645 def file_walker(_commit_id, path):
646 repo = self._factory.repo(wire)
646 repo = self._factory.repo(wire)
647 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
647 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
648
648
649 def no_content():
649 def no_content():
650 raise NoContentException()
650 raise NoContentException()
651
651
652 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
652 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
653 file_path = f_name
653 file_path = f_name
654
654
655 if f_type == 'dir':
655 if f_type == 'dir':
656 mode = f_data['mode']
656 mode = f_data['mode']
657 yield ArchiveNode(file_path, mode, False, no_content)
657 yield ArchiveNode(file_path, mode, False, no_content)
658 else:
658 else:
659 mode = f_data['mode']
659 mode = f_data['mode']
660 is_link = f_data['is_link']
660 is_link = f_data['is_link']
661 data_stream = f_data['content_stream']
661 data_stream = f_data['content_stream']
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
663
663
664 return store_archive_in_cache(
664 return store_archive_in_cache(
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
666
666
667
667
668 class SvnDiffer(object):
668 class SvnDiffer(object):
669 """
669 """
670 Utility to create diffs based on difflib and the Subversion api
670 Utility to create diffs based on difflib and the Subversion api
671 """
671 """
672
672
673 binary_content = False
673 binary_content = False
674
674
675 def __init__(
675 def __init__(
676 self, repo, src_rev, src_path, tgt_rev, tgt_path,
676 self, repo, src_rev, src_path, tgt_rev, tgt_path,
677 ignore_whitespace, context):
677 ignore_whitespace, context):
678 self.repo = repo
678 self.repo = repo
679 self.ignore_whitespace = ignore_whitespace
679 self.ignore_whitespace = ignore_whitespace
680 self.context = context
680 self.context = context
681
681
682 fsobj = svn.repos.fs(repo)
682 fsobj = svn.repos.fs(repo)
683
683
684 self.tgt_rev = tgt_rev
684 self.tgt_rev = tgt_rev
685 self.tgt_path = tgt_path or ''
685 self.tgt_path = tgt_path or ''
686 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
686 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
687 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
687 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
688
688
689 self.src_rev = src_rev
689 self.src_rev = src_rev
690 self.src_path = src_path or self.tgt_path
690 self.src_path = src_path or self.tgt_path
691 self.src_root = svn.fs.revision_root(fsobj, src_rev)
691 self.src_root = svn.fs.revision_root(fsobj, src_rev)
692 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
692 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
693
693
694 self._validate()
694 self._validate()
695
695
696 def _validate(self):
696 def _validate(self):
697 if (self.tgt_kind != svn.core.svn_node_none and
697 if (self.tgt_kind != svn.core.svn_node_none and
698 self.src_kind != svn.core.svn_node_none and
698 self.src_kind != svn.core.svn_node_none and
699 self.src_kind != self.tgt_kind):
699 self.src_kind != self.tgt_kind):
700 # TODO: johbo: proper error handling
700 # TODO: johbo: proper error handling
701 raise Exception(
701 raise Exception(
702 "Source and target are not compatible for diff generation. "
702 "Source and target are not compatible for diff generation. "
703 "Source type: %s, target type: %s" %
703 "Source type: %s, target type: %s" %
704 (self.src_kind, self.tgt_kind))
704 (self.src_kind, self.tgt_kind))
705
705
706 def generate_diff(self) -> bytes:
706 def generate_diff(self) -> bytes:
707 buf = io.BytesIO()
707 buf = io.BytesIO()
708 if self.tgt_kind == svn.core.svn_node_dir:
708 if self.tgt_kind == svn.core.svn_node_dir:
709 self._generate_dir_diff(buf)
709 self._generate_dir_diff(buf)
710 else:
710 else:
711 self._generate_file_diff(buf)
711 self._generate_file_diff(buf)
712 return buf.getvalue()
712 return buf.getvalue()
713
713
714 def _generate_dir_diff(self, buf: io.BytesIO):
714 def _generate_dir_diff(self, buf: io.BytesIO):
715 editor = DiffChangeEditor()
715 editor = DiffChangeEditor()
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
717 svn.repos.dir_delta2(
717 svn.repos.dir_delta2(
718 self.src_root,
718 self.src_root,
719 self.src_path,
719 self.src_path,
720 '', # src_entry
720 '', # src_entry
721 self.tgt_root,
721 self.tgt_root,
722 self.tgt_path,
722 self.tgt_path,
723 editor_ptr, editor_baton,
723 editor_ptr, editor_baton,
724 authorization_callback_allow_all,
724 authorization_callback_allow_all,
725 False, # text_deltas
725 False, # text_deltas
726 svn.core.svn_depth_infinity, # depth
726 svn.core.svn_depth_infinity, # depth
727 False, # entry_props
727 False, # entry_props
728 False, # ignore_ancestry
728 False, # ignore_ancestry
729 )
729 )
730
730
731 for path, __, change in sorted(editor.changes):
731 for path, __, change in sorted(editor.changes):
732 self._generate_node_diff(
732 self._generate_node_diff(
733 buf, change, path, self.tgt_path, path, self.src_path)
733 buf, change, path, self.tgt_path, path, self.src_path)
734
734
735 def _generate_file_diff(self, buf: io.BytesIO):
735 def _generate_file_diff(self, buf: io.BytesIO):
736 change = None
736 change = None
737 if self.src_kind == svn.core.svn_node_none:
737 if self.src_kind == svn.core.svn_node_none:
738 change = "add"
738 change = "add"
739 elif self.tgt_kind == svn.core.svn_node_none:
739 elif self.tgt_kind == svn.core.svn_node_none:
740 change = "delete"
740 change = "delete"
741 tgt_base, tgt_path = vcspath.split(self.tgt_path)
741 tgt_base, tgt_path = vcspath.split(self.tgt_path)
742 src_base, src_path = vcspath.split(self.src_path)
742 src_base, src_path = vcspath.split(self.src_path)
743 self._generate_node_diff(
743 self._generate_node_diff(
744 buf, change, tgt_path, tgt_base, src_path, src_base)
744 buf, change, tgt_path, tgt_base, src_path, src_base)
745
745
746 def _generate_node_diff(
746 def _generate_node_diff(
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
748
748
749 tgt_path_bytes = safe_bytes(tgt_path)
749 tgt_path_bytes = safe_bytes(tgt_path)
750 tgt_path = safe_str(tgt_path)
750 tgt_path = safe_str(tgt_path)
751
751
752 src_path_bytes = safe_bytes(src_path)
752 src_path_bytes = safe_bytes(src_path)
753 src_path = safe_str(src_path)
753 src_path = safe_str(src_path)
754
754
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
756 # makes consistent behaviour with git/hg to return empty diff if
756 # makes consistent behaviour with git/hg to return empty diff if
757 # we compare same revisions
757 # we compare same revisions
758 return
758 return
759
759
760 tgt_full_path = vcspath.join(tgt_base, tgt_path)
760 tgt_full_path = vcspath.join(tgt_base, tgt_path)
761 src_full_path = vcspath.join(src_base, src_path)
761 src_full_path = vcspath.join(src_base, src_path)
762
762
763 self.binary_content = False
763 self.binary_content = False
764 mime_type = self._get_mime_type(tgt_full_path)
764 mime_type = self._get_mime_type(tgt_full_path)
765
765
766 if mime_type and not mime_type.startswith(b'text'):
766 if mime_type and not mime_type.startswith(b'text'):
767 self.binary_content = True
767 self.binary_content = True
768 buf.write(b"=" * 67 + b'\n')
768 buf.write(b"=" * 67 + b'\n')
769 buf.write(b"Cannot display: file marked as a binary type.\n")
769 buf.write(b"Cannot display: file marked as a binary type.\n")
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
772 buf.write(b"=" * 67 + b'\n')
772 buf.write(b"=" * 67 + b'\n')
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
774
774
775 if change == 'add':
775 if change == 'add':
776 # TODO: johbo: SVN is missing a zero here compared to git
776 # TODO: johbo: SVN is missing a zero here compared to git
777 buf.write(b"new file mode 10644\n")
777 buf.write(b"new file mode 10644\n")
778
778
779 # TODO(marcink): intro to binary detection of svn patches
779 # TODO(marcink): intro to binary detection of svn patches
780 # if self.binary_content:
780 # if self.binary_content:
781 # buf.write(b'GIT binary patch\n')
781 # buf.write(b'GIT binary patch\n')
782
782
783 buf.write(b"--- /dev/null\t(revision 0)\n")
783 buf.write(b"--- /dev/null\t(revision 0)\n")
784 src_lines = []
784 src_lines = []
785 else:
785 else:
786 if change == 'delete':
786 if change == 'delete':
787 buf.write(b"deleted file mode 10644\n")
787 buf.write(b"deleted file mode 10644\n")
788
788
789 # TODO(marcink): intro to binary detection of svn patches
789 # TODO(marcink): intro to binary detection of svn patches
790 # if self.binary_content:
790 # if self.binary_content:
791 # buf.write('GIT binary patch\n')
791 # buf.write('GIT binary patch\n')
792
792
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
795
795
796 if change == 'delete':
796 if change == 'delete':
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
798 tgt_lines = []
798 tgt_lines = []
799 else:
799 else:
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
802
802
803 # we made our diff header, time to generate the diff content into our buffer
803 # we made our diff header, time to generate the diff content into our buffer
804
804
805 if not self.binary_content:
805 if not self.binary_content:
806 udiff = svn_diff.unified_diff(
806 udiff = svn_diff.unified_diff(
807 src_lines, tgt_lines, context=self.context,
807 src_lines, tgt_lines, context=self.context,
808 ignore_blank_lines=self.ignore_whitespace,
808 ignore_blank_lines=self.ignore_whitespace,
809 ignore_case=False,
809 ignore_case=False,
810 ignore_space_changes=self.ignore_whitespace)
810 ignore_space_changes=self.ignore_whitespace)
811
811
812 buf.writelines(udiff)
812 buf.writelines(udiff)
813
813
814 def _get_mime_type(self, path) -> bytes:
814 def _get_mime_type(self, path) -> bytes:
815 try:
815 try:
816 mime_type = svn.fs.node_prop(
816 mime_type = svn.fs.node_prop(
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
818 except svn.core.SubversionException:
818 except svn.core.SubversionException:
819 mime_type = svn.fs.node_prop(
819 mime_type = svn.fs.node_prop(
820 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
820 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
821 return mime_type
821 return mime_type
822
822
823 def _svn_readlines(self, fs_root, node_path):
823 def _svn_readlines(self, fs_root, node_path):
824 if self.binary_content:
824 if self.binary_content:
825 return []
825 return []
826 node_kind = svn.fs.check_path(fs_root, node_path)
826 node_kind = svn.fs.check_path(fs_root, node_path)
827 if node_kind not in (
827 if node_kind not in (
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
829 return []
829 return []
830 content = svn.core.Stream(
830 content = svn.core.Stream(
831 svn.fs.file_contents(fs_root, node_path)).read()
831 svn.fs.file_contents(fs_root, node_path)).read()
832
832
833 return content.splitlines(True)
833 return content.splitlines(True)
834
834
835
835
836 class DiffChangeEditor(svn.delta.Editor):
836 class DiffChangeEditor(svn.delta.Editor):
837 """
837 """
838 Records changes between two given revisions
838 Records changes between two given revisions
839 """
839 """
840
840
841 def __init__(self):
841 def __init__(self):
842 self.changes = []
842 self.changes = []
843
843
844 def delete_entry(self, path, revision, parent_baton, pool=None):
844 def delete_entry(self, path, revision, parent_baton, pool=None):
845 self.changes.append((path, None, 'delete'))
845 self.changes.append((path, None, 'delete'))
846
846
847 def add_file(
847 def add_file(
848 self, path, parent_baton, copyfrom_path, copyfrom_revision,
848 self, path, parent_baton, copyfrom_path, copyfrom_revision,
849 file_pool=None):
849 file_pool=None):
850 self.changes.append((path, 'file', 'add'))
850 self.changes.append((path, 'file', 'add'))
851
851
852 def open_file(self, path, parent_baton, base_revision, file_pool=None):
852 def open_file(self, path, parent_baton, base_revision, file_pool=None):
853 self.changes.append((path, 'file', 'change'))
853 self.changes.append((path, 'file', 'change'))
854
854
855
855
856 def authorization_callback_allow_all(root, path, pool):
856 def authorization_callback_allow_all(root, path, pool):
857 return True
857 return True
858
858
859
859
860 class TxnNodeProcessor(object):
860 class TxnNodeProcessor(object):
861 """
861 """
862 Utility to process the change of one node within a transaction root.
862 Utility to process the change of one node within a transaction root.
863
863
864 It encapsulates the knowledge of how to add, update or remove
864 It encapsulates the knowledge of how to add, update or remove
865 a node for a given transaction root. The purpose is to support the method
865 a node for a given transaction root. The purpose is to support the method
866 `SvnRemote.commit`.
866 `SvnRemote.commit`.
867 """
867 """
868
868
869 def __init__(self, node, txn_root):
869 def __init__(self, node, txn_root):
870 assert_bytes(node['path'])
870 assert_bytes(node['path'])
871
871
872 self.node = node
872 self.node = node
873 self.txn_root = txn_root
873 self.txn_root = txn_root
874
874
875 def update(self):
875 def update(self):
876 self._ensure_parent_dirs()
876 self._ensure_parent_dirs()
877 self._add_file_if_node_does_not_exist()
877 self._add_file_if_node_does_not_exist()
878 self._update_file_content()
878 self._update_file_content()
879 self._update_file_properties()
879 self._update_file_properties()
880
880
881 def remove(self):
881 def remove(self):
882 svn.fs.delete(self.txn_root, self.node['path'])
882 svn.fs.delete(self.txn_root, self.node['path'])
883 # TODO: Clean up directory if empty
883 # TODO: Clean up directory if empty
884
884
885 def _ensure_parent_dirs(self):
885 def _ensure_parent_dirs(self):
886 curdir = vcspath.dirname(self.node['path'])
886 curdir = vcspath.dirname(self.node['path'])
887 dirs_to_create = []
887 dirs_to_create = []
888 while not self._svn_path_exists(curdir):
888 while not self._svn_path_exists(curdir):
889 dirs_to_create.append(curdir)
889 dirs_to_create.append(curdir)
890 curdir = vcspath.dirname(curdir)
890 curdir = vcspath.dirname(curdir)
891
891
892 for curdir in reversed(dirs_to_create):
892 for curdir in reversed(dirs_to_create):
893 log.debug('Creating missing directory "%s"', curdir)
893 log.debug('Creating missing directory "%s"', curdir)
894 svn.fs.make_dir(self.txn_root, curdir)
894 svn.fs.make_dir(self.txn_root, curdir)
895
895
896 def _svn_path_exists(self, path):
896 def _svn_path_exists(self, path):
897 path_status = svn.fs.check_path(self.txn_root, path)
897 path_status = svn.fs.check_path(self.txn_root, path)
898 return path_status != svn.core.svn_node_none
898 return path_status != svn.core.svn_node_none
899
899
900 def _add_file_if_node_does_not_exist(self):
900 def _add_file_if_node_does_not_exist(self):
901 kind = svn.fs.check_path(self.txn_root, self.node['path'])
901 kind = svn.fs.check_path(self.txn_root, self.node['path'])
902 if kind == svn.core.svn_node_none:
902 if kind == svn.core.svn_node_none:
903 svn.fs.make_file(self.txn_root, self.node['path'])
903 svn.fs.make_file(self.txn_root, self.node['path'])
904
904
905 def _update_file_content(self):
905 def _update_file_content(self):
906 assert_bytes(self.node['content'])
906 assert_bytes(self.node['content'])
907
907
908 handler, baton = svn.fs.apply_textdelta(
908 handler, baton = svn.fs.apply_textdelta(
909 self.txn_root, self.node['path'], None, None)
909 self.txn_root, self.node['path'], None, None)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
911
911
912 def _update_file_properties(self):
912 def _update_file_properties(self):
913 properties = self.node.get('properties', {})
913 properties = self.node.get('properties', {})
914 for key, value in properties.items():
914 for key, value in properties.items():
915 svn.fs.change_node_prop(
915 svn.fs.change_node_prop(
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
917
917
918
918
919 def apr_time_t(timestamp):
919 def apr_time_t(timestamp):
920 """
920 """
921 Convert a Python timestamp into APR timestamp type apr_time_t
921 Convert a Python timestamp into APR timestamp type apr_time_t
922 """
922 """
923 return int(timestamp * 1E6)
923 return int(timestamp * 1E6)
924
924
925
925
926 def svn_opt_revision_value_t(num):
926 def svn_opt_revision_value_t(num):
927 """
927 """
928 Put `num` into a `svn_opt_revision_value_t` structure.
928 Put `num` into a `svn_opt_revision_value_t` structure.
929 """
929 """
930 value = svn.core.svn_opt_revision_value_t()
930 value = svn.core.svn_opt_revision_value_t()
931 value.number = num
931 value.number = num
932 revision = svn.core.svn_opt_revision_t()
932 revision = svn.core.svn_opt_revision_t()
933 revision.kind = svn.core.svn_opt_revision_number
933 revision.kind = svn.core.svn_opt_revision_number
934 revision.value = value
934 revision.value = value
935 return revision
935 return revision
@@ -1,34 +1,34 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver import scm_app, wsgi_app_caller
18 from vcsserver import scm_app, wsgi_app_caller
19
19
20
20
21 class GitRemoteWsgi(object):
21 class GitRemoteWsgi(object):
22 def handle(self, environ, input_data, *args, **kwargs):
22 def handle(self, environ, input_data, *args, **kwargs):
23 app = wsgi_app_caller.WSGIAppCaller(
23 app = wsgi_app_caller.WSGIAppCaller(
24 scm_app.create_git_wsgi_app(*args, **kwargs))
24 scm_app.create_git_wsgi_app(*args, **kwargs))
25
25
26 return app.handle(environ, input_data)
26 return app.handle(environ, input_data)
27
27
28
28
29 class HgRemoteWsgi(object):
29 class HgRemoteWsgi(object):
30 def handle(self, environ, input_data, *args, **kwargs):
30 def handle(self, environ, input_data, *args, **kwargs):
31 app = wsgi_app_caller.WSGIAppCaller(
31 app = wsgi_app_caller.WSGIAppCaller(
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33
33
34 return app.handle(environ, input_data)
34 return app.handle(environ, input_data)
@@ -1,242 +1,242 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.wireprotoserver
25 import mercurial.hgweb.common
25 import mercurial.hgweb.common
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 from vcsserver.str_utils import ascii_bytes, safe_bytes
30 from vcsserver.str_utils import ascii_bytes, safe_bytes
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 from mercurial.hgweb import request as requestmod
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
79 gen = self.run_wsgi(req, res)
80
80
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = next(gen)
84 data = next(gen)
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
88 except StopIteration:
88 except StopIteration:
89 pass
89 pass
90
90
91 if first_chunk:
91 if first_chunk:
92 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
93 return gen
93 return gen
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get(b'cmd', '')
97 cmd = req.qsparams.get(b'cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 log.warning('cmd: `%s` is not supported by the mercurial wireprotocol v1', cmd)
101 log.warning('cmd: `%s` is not supported by the mercurial wireprotocol v1', cmd)
102 from mercurial.hgweb.common import statusmessage
102 from mercurial.hgweb.common import statusmessage
103 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
104 res.setbodybytes(b'')
104 res.setbodybytes(b'')
105 return res.sendresponse()
105 return res.sendresponse()
106
106
107 return super()._runwsgi(req, res, repo)
107 return super()._runwsgi(req, res, repo)
108
108
109
109
110 def make_hg_ui_from_config(repo_config):
110 def make_hg_ui_from_config(repo_config):
111 baseui = mercurial.ui.ui()
111 baseui = mercurial.ui.ui()
112
112
113 # clean the baseui object
113 # clean the baseui object
114 baseui._ocfg = mercurial.config.config()
114 baseui._ocfg = mercurial.config.config()
115 baseui._ucfg = mercurial.config.config()
115 baseui._ucfg = mercurial.config.config()
116 baseui._tcfg = mercurial.config.config()
116 baseui._tcfg = mercurial.config.config()
117
117
118 for section, option, value in repo_config:
118 for section, option, value in repo_config:
119 baseui.setconfig(
119 baseui.setconfig(
120 ascii_bytes(section, allow_bytes=True),
120 ascii_bytes(section, allow_bytes=True),
121 ascii_bytes(option, allow_bytes=True),
121 ascii_bytes(option, allow_bytes=True),
122 ascii_bytes(value, allow_bytes=True))
122 ascii_bytes(value, allow_bytes=True))
123
123
124 # make our hgweb quiet so it doesn't print output
124 # make our hgweb quiet so it doesn't print output
125 baseui.setconfig(b'ui', b'quiet', b'true')
125 baseui.setconfig(b'ui', b'quiet', b'true')
126
126
127 return baseui
127 return baseui
128
128
129
129
130 def update_hg_ui_from_hgrc(baseui, repo_path):
130 def update_hg_ui_from_hgrc(baseui, repo_path):
131 path = os.path.join(repo_path, '.hg', 'hgrc')
131 path = os.path.join(repo_path, '.hg', 'hgrc')
132
132
133 if not os.path.isfile(path):
133 if not os.path.isfile(path):
134 log.debug('hgrc file is not present at %s, skipping...', path)
134 log.debug('hgrc file is not present at %s, skipping...', path)
135 return
135 return
136 log.debug('reading hgrc from %s', path)
136 log.debug('reading hgrc from %s', path)
137 cfg = mercurial.config.config()
137 cfg = mercurial.config.config()
138 cfg.read(ascii_bytes(path))
138 cfg.read(ascii_bytes(path))
139 for section in HG_UI_SECTIONS:
139 for section in HG_UI_SECTIONS:
140 for k, v in cfg.items(section):
140 for k, v in cfg.items(section):
141 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
141 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
142 baseui.setconfig(
142 baseui.setconfig(
143 ascii_bytes(section, allow_bytes=True),
143 ascii_bytes(section, allow_bytes=True),
144 ascii_bytes(k, allow_bytes=True),
144 ascii_bytes(k, allow_bytes=True),
145 ascii_bytes(v, allow_bytes=True))
145 ascii_bytes(v, allow_bytes=True))
146
146
147
147
148 def create_hg_wsgi_app(repo_path, repo_name, config):
148 def create_hg_wsgi_app(repo_path, repo_name, config):
149 """
149 """
150 Prepares a WSGI application to handle Mercurial requests.
150 Prepares a WSGI application to handle Mercurial requests.
151
151
152 :param config: is a list of 3-item tuples representing a ConfigObject
152 :param config: is a list of 3-item tuples representing a ConfigObject
153 (it is the serialized version of the config object).
153 (it is the serialized version of the config object).
154 """
154 """
155 log.debug("Creating Mercurial WSGI application")
155 log.debug("Creating Mercurial WSGI application")
156
156
157 baseui = make_hg_ui_from_config(config)
157 baseui = make_hg_ui_from_config(config)
158 update_hg_ui_from_hgrc(baseui, repo_path)
158 update_hg_ui_from_hgrc(baseui, repo_path)
159
159
160 try:
160 try:
161 return HgWeb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
161 return HgWeb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
162 except mercurial.error.RequirementError as e:
162 except mercurial.error.RequirementError as e:
163 raise exceptions.RequirementException(e)(e)
163 raise exceptions.RequirementException(e)(e)
164
164
165
165
166 class GitHandler(object):
166 class GitHandler(object):
167 """
167 """
168 Handler for Git operations like push/pull etc
168 Handler for Git operations like push/pull etc
169 """
169 """
170 def __init__(self, repo_location, repo_name, git_path, update_server_info,
170 def __init__(self, repo_location, repo_name, git_path, update_server_info,
171 extras):
171 extras):
172 if not os.path.isdir(repo_location):
172 if not os.path.isdir(repo_location):
173 raise OSError(repo_location)
173 raise OSError(repo_location)
174 self.content_path = repo_location
174 self.content_path = repo_location
175 self.repo_name = repo_name
175 self.repo_name = repo_name
176 self.repo_location = repo_location
176 self.repo_location = repo_location
177 self.extras = extras
177 self.extras = extras
178 self.git_path = git_path
178 self.git_path = git_path
179 self.update_server_info = update_server_info
179 self.update_server_info = update_server_info
180
180
181 def __call__(self, environ, start_response):
181 def __call__(self, environ, start_response):
182 app = webob.exc.HTTPNotFound()
182 app = webob.exc.HTTPNotFound()
183 candidate_paths = (
183 candidate_paths = (
184 self.content_path, os.path.join(self.content_path, '.git'))
184 self.content_path, os.path.join(self.content_path, '.git'))
185
185
186 for content_path in candidate_paths:
186 for content_path in candidate_paths:
187 try:
187 try:
188 app = pygrack.GitRepository(
188 app = pygrack.GitRepository(
189 self.repo_name, content_path, self.git_path,
189 self.repo_name, content_path, self.git_path,
190 self.update_server_info, self.extras)
190 self.update_server_info, self.extras)
191 break
191 break
192 except OSError:
192 except OSError:
193 continue
193 continue
194
194
195 return app(environ, start_response)
195 return app(environ, start_response)
196
196
197
197
198 def create_git_wsgi_app(repo_path, repo_name, config):
198 def create_git_wsgi_app(repo_path, repo_name, config):
199 """
199 """
200 Creates a WSGI application to handle Git requests.
200 Creates a WSGI application to handle Git requests.
201
201
202 :param config: is a dictionary holding the extras.
202 :param config: is a dictionary holding the extras.
203 """
203 """
204 git_path = settings.GIT_EXECUTABLE
204 git_path = settings.GIT_EXECUTABLE
205 update_server_info = config.pop('git_update_server_info')
205 update_server_info = config.pop('git_update_server_info')
206 app = GitHandler(
206 app = GitHandler(
207 repo_path, repo_name, git_path, update_server_info, config)
207 repo_path, repo_name, git_path, update_server_info, config)
208
208
209 return app
209 return app
210
210
211
211
212 class GitLFSHandler(object):
212 class GitLFSHandler(object):
213 """
213 """
214 Handler for Git LFS operations
214 Handler for Git LFS operations
215 """
215 """
216
216
217 def __init__(self, repo_location, repo_name, git_path, update_server_info,
217 def __init__(self, repo_location, repo_name, git_path, update_server_info,
218 extras):
218 extras):
219 if not os.path.isdir(repo_location):
219 if not os.path.isdir(repo_location):
220 raise OSError(repo_location)
220 raise OSError(repo_location)
221 self.content_path = repo_location
221 self.content_path = repo_location
222 self.repo_name = repo_name
222 self.repo_name = repo_name
223 self.repo_location = repo_location
223 self.repo_location = repo_location
224 self.extras = extras
224 self.extras = extras
225 self.git_path = git_path
225 self.git_path = git_path
226 self.update_server_info = update_server_info
226 self.update_server_info = update_server_info
227
227
228 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
228 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
229 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
229 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
230 return app
230 return app
231
231
232
232
233 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
233 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
234 git_path = settings.GIT_EXECUTABLE
234 git_path = settings.GIT_EXECUTABLE
235 update_server_info = config.pop(b'git_update_server_info')
235 update_server_info = config.pop(b'git_update_server_info')
236 git_lfs_enabled = config.pop(b'git_lfs_enabled')
236 git_lfs_enabled = config.pop(b'git_lfs_enabled')
237 git_lfs_store_path = config.pop(b'git_lfs_store_path')
237 git_lfs_store_path = config.pop(b'git_lfs_store_path')
238 git_lfs_http_scheme = config.pop(b'git_lfs_http_scheme', 'http')
238 git_lfs_http_scheme = config.pop(b'git_lfs_http_scheme', 'http')
239 app = GitLFSHandler(
239 app = GitLFSHandler(
240 repo_path, repo_name, git_path, update_server_info, config)
240 repo_path, repo_name, git_path, update_server_info, config)
241
241
242 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
242 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,78 +1,78 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import gc
18 import gc
19 import logging
19 import logging
20 import os
20 import os
21 import time
21 import time
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class VcsServer(object):
27 class VcsServer(object):
28 """
28 """
29 Exposed remote interface of the vcsserver itself.
29 Exposed remote interface of the vcsserver itself.
30
30
31 This object can be used to manage the server remotely. Right now the main
31 This object can be used to manage the server remotely. Right now the main
32 use case is to allow to shut down the server.
32 use case is to allow to shut down the server.
33 """
33 """
34
34
35 _shutdown = False
35 _shutdown = False
36
36
37 def shutdown(self):
37 def shutdown(self):
38 self._shutdown = True
38 self._shutdown = True
39
39
40 def ping(self):
40 def ping(self):
41 """
41 """
42 Utility to probe a server connection.
42 Utility to probe a server connection.
43 """
43 """
44 log.debug("Received server ping.")
44 log.debug("Received server ping.")
45
45
46 def echo(self, data):
46 def echo(self, data):
47 """
47 """
48 Utility for performance testing.
48 Utility for performance testing.
49
49
50 Allows to pass in arbitrary data and will return this data.
50 Allows to pass in arbitrary data and will return this data.
51 """
51 """
52 log.debug("Received server echo.")
52 log.debug("Received server echo.")
53 return data
53 return data
54
54
55 def sleep(self, seconds):
55 def sleep(self, seconds):
56 """
56 """
57 Utility to simulate long running server interaction.
57 Utility to simulate long running server interaction.
58 """
58 """
59 log.debug("Sleeping %s seconds", seconds)
59 log.debug("Sleeping %s seconds", seconds)
60 time.sleep(seconds)
60 time.sleep(seconds)
61
61
62 def get_pid(self):
62 def get_pid(self):
63 """
63 """
64 Allows to discover the PID based on a proxy object.
64 Allows to discover the PID based on a proxy object.
65 """
65 """
66 return os.getpid()
66 return os.getpid()
67
67
68 def run_gc(self):
68 def run_gc(self):
69 """
69 """
70 Allows to trigger the garbage collector.
70 Allows to trigger the garbage collector.
71
71
72 Main intention is to support statistics gathering during test runs.
72 Main intention is to support statistics gathering during test runs.
73 """
73 """
74 freed_objects = gc.collect()
74 freed_objects = gc.collect()
75 return {
75 return {
76 'freed_objects': freed_objects,
76 'freed_objects': freed_objects,
77 'garbage': len(gc.garbage),
77 'garbage': len(gc.garbage),
78 }
78 }
@@ -1,22 +1,22 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
19 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
20 SVN_EXECUTABLE = 'svn'
20 SVN_EXECUTABLE = 'svn'
21 SVNLOOK_EXECUTABLE = 'svnlook'
21 SVNLOOK_EXECUTABLE = 'svnlook'
22 BINARY_DIR = ''
22 BINARY_DIR = ''
@@ -1,133 +1,133 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import typing
18 import typing
19 import base64
19 import base64
20 import logging
20 import logging
21
21
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 def safe_int(val, default=None) -> int:
26 def safe_int(val, default=None) -> int:
27 """
27 """
28 Returns int() of val if val is not convertable to int use default
28 Returns int() of val if val is not convertable to int use default
29 instead
29 instead
30
30
31 :param val:
31 :param val:
32 :param default:
32 :param default:
33 """
33 """
34
34
35 try:
35 try:
36 val = int(val)
36 val = int(val)
37 except (ValueError, TypeError):
37 except (ValueError, TypeError):
38 val = default
38 val = default
39
39
40 return val
40 return val
41
41
42
42
43 def base64_to_str(text) -> str:
43 def base64_to_str(text) -> str:
44 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
44 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
45
45
46
46
47 def get_default_encodings() -> list[str]:
47 def get_default_encodings() -> list[str]:
48 return ['utf8']
48 return ['utf8']
49
49
50
50
51 def safe_str(str_, to_encoding=None) -> str:
51 def safe_str(str_, to_encoding=None) -> str:
52 """
52 """
53 safe str function. Does few trick to turn unicode_ into string
53 safe str function. Does few trick to turn unicode_ into string
54
54
55 :param str_: str to encode
55 :param str_: str to encode
56 :param to_encoding: encode to this type UTF8 default
56 :param to_encoding: encode to this type UTF8 default
57 """
57 """
58 if isinstance(str_, str):
58 if isinstance(str_, str):
59 return str_
59 return str_
60
60
61 # if it's bytes cast to str
61 # if it's bytes cast to str
62 if not isinstance(str_, bytes):
62 if not isinstance(str_, bytes):
63 return str(str_)
63 return str(str_)
64
64
65 to_encoding = to_encoding or get_default_encodings()
65 to_encoding = to_encoding or get_default_encodings()
66 if not isinstance(to_encoding, (list, tuple)):
66 if not isinstance(to_encoding, (list, tuple)):
67 to_encoding = [to_encoding]
67 to_encoding = [to_encoding]
68
68
69 for enc in to_encoding:
69 for enc in to_encoding:
70 try:
70 try:
71 return str(str_, enc)
71 return str(str_, enc)
72 except UnicodeDecodeError:
72 except UnicodeDecodeError:
73 pass
73 pass
74
74
75 return str(str_, to_encoding[0], 'replace')
75 return str(str_, to_encoding[0], 'replace')
76
76
77
77
78 def safe_bytes(str_, from_encoding=None) -> bytes:
78 def safe_bytes(str_, from_encoding=None) -> bytes:
79 """
79 """
80 safe bytes function. Does few trick to turn str_ into bytes string:
80 safe bytes function. Does few trick to turn str_ into bytes string:
81
81
82 :param str_: string to decode
82 :param str_: string to decode
83 :param from_encoding: encode from this type UTF8 default
83 :param from_encoding: encode from this type UTF8 default
84 """
84 """
85 if isinstance(str_, bytes):
85 if isinstance(str_, bytes):
86 return str_
86 return str_
87
87
88 if not isinstance(str_, str):
88 if not isinstance(str_, str):
89 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
89 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
90
90
91 from_encoding = from_encoding or get_default_encodings()
91 from_encoding = from_encoding or get_default_encodings()
92 if not isinstance(from_encoding, (list, tuple)):
92 if not isinstance(from_encoding, (list, tuple)):
93 from_encoding = [from_encoding]
93 from_encoding = [from_encoding]
94
94
95 for enc in from_encoding:
95 for enc in from_encoding:
96 try:
96 try:
97 return str_.encode(enc)
97 return str_.encode(enc)
98 except UnicodeDecodeError:
98 except UnicodeDecodeError:
99 pass
99 pass
100
100
101 return str_.encode(from_encoding[0], 'replace')
101 return str_.encode(from_encoding[0], 'replace')
102
102
103
103
104 def ascii_bytes(str_, allow_bytes=False) -> bytes:
104 def ascii_bytes(str_, allow_bytes=False) -> bytes:
105 """
105 """
106 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
106 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
107 Fails with UnicodeError on invalid input.
107 Fails with UnicodeError on invalid input.
108 This should be used where encoding and "safe" ambiguity should be avoided.
108 This should be used where encoding and "safe" ambiguity should be avoided.
109 Where strings already have been encoded in other ways but still are unicode
109 Where strings already have been encoded in other ways but still are unicode
110 string - for example to hex, base64, json, urlencoding, or are known to be
110 string - for example to hex, base64, json, urlencoding, or are known to be
111 identifiers.
111 identifiers.
112 """
112 """
113 if allow_bytes and isinstance(str_, bytes):
113 if allow_bytes and isinstance(str_, bytes):
114 return str_
114 return str_
115
115
116 if not isinstance(str_, str):
116 if not isinstance(str_, str):
117 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
117 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
118 return str_.encode('ascii')
118 return str_.encode('ascii')
119
119
120
120
121 def ascii_str(str_) -> str:
121 def ascii_str(str_) -> str:
122 """
122 """
123 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
123 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
124 Fails with UnicodeError on invalid input.
124 Fails with UnicodeError on invalid input.
125 This should be used where encoding and "safe" ambiguity should be avoided.
125 This should be used where encoding and "safe" ambiguity should be avoided.
126 Where strings are encoded but also in other ways are known to be ASCII, and
126 Where strings are encoded but also in other ways are known to be ASCII, and
127 where a unicode string is wanted without caring about encoding. For example
127 where a unicode string is wanted without caring about encoding. For example
128 to hex, base64, urlencoding, or are known to be identifiers.
128 to hex, base64, urlencoding, or are known to be identifiers.
129 """
129 """
130
130
131 if not isinstance(str_, bytes):
131 if not isinstance(str_, bytes):
132 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
132 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
133 return str_.decode('ascii')
133 return str_.decode('ascii')
@@ -1,211 +1,212 b''
1 #
1 #
2 # Copyright (C) 2004-2009 Edgewall Software
2 # Copyright (C) 2004-2009 Edgewall Software
3 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
3 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
4 # Copyright (C) 2014-2023 RhodeCode GmbH
4 # All rights reserved.
5 # All rights reserved.
5 #
6 #
6 # This software is licensed as described in the file COPYING, which
7 # This software is licensed as described in the file COPYING, which
7 # you should have received as part of this distribution. The terms
8 # you should have received as part of this distribution. The terms
8 # are also available at http://trac.edgewall.org/wiki/TracLicense.
9 # are also available at http://trac.edgewall.org/wiki/TracLicense.
9 #
10 #
10 # This software consists of voluntary contributions made by many
11 # This software consists of voluntary contributions made by many
11 # individuals. For the exact contribution history, see the revision
12 # individuals. For the exact contribution history, see the revision
12 # history and logs, available at http://trac.edgewall.org/log/.
13 # history and logs, available at http://trac.edgewall.org/log/.
13 #
14 #
14 # Author: Christopher Lenz <cmlenz@gmx.de>
15 # Author: Christopher Lenz <cmlenz@gmx.de>
15
16
16 import difflib
17 import difflib
17
18
18
19
19 def get_filtered_hunks(from_lines, to_lines, context=None,
20 def get_filtered_hunks(from_lines, to_lines, context=None,
20 ignore_blank_lines: bool = False, ignore_case: bool = False,
21 ignore_blank_lines: bool = False, ignore_case: bool = False,
21 ignore_space_changes: bool = False):
22 ignore_space_changes: bool = False):
22 """Retrieve differences in the form of `difflib.SequenceMatcher`
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
23 opcodes, grouped according to the ``context`` and ``ignore_*``
24 opcodes, grouped according to the ``context`` and ``ignore_*``
24 parameters.
25 parameters.
25
26
26 :param from_lines: list of lines corresponding to the old content
27 :param from_lines: list of lines corresponding to the old content
27 :param to_lines: list of lines corresponding to the new content
28 :param to_lines: list of lines corresponding to the new content
28 :param ignore_blank_lines: differences about empty lines only are ignored
29 :param ignore_blank_lines: differences about empty lines only are ignored
29 :param ignore_case: upper case / lower case only differences are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
30 :param ignore_space_changes: differences in amount of spaces are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
31 :param context: the number of "equal" lines kept for representing
32 :param context: the number of "equal" lines kept for representing
32 the context of the change
33 the context of the change
33 :return: generator of grouped `difflib.SequenceMatcher` opcodes
34 :return: generator of grouped `difflib.SequenceMatcher` opcodes
34
35
35 If none of the ``ignore_*`` parameters is `True`, there's nothing
36 If none of the ``ignore_*`` parameters is `True`, there's nothing
36 to filter out the results will come straight from the
37 to filter out the results will come straight from the
37 SequenceMatcher.
38 SequenceMatcher.
38 """
39 """
39 hunks = get_hunks(from_lines, to_lines, context)
40 hunks = get_hunks(from_lines, to_lines, context)
40 if ignore_space_changes or ignore_case or ignore_blank_lines:
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
41 hunks = filter_ignorable_lines(hunks, from_lines, to_lines, context,
42 hunks = filter_ignorable_lines(hunks, from_lines, to_lines, context,
42 ignore_blank_lines, ignore_case,
43 ignore_blank_lines, ignore_case,
43 ignore_space_changes)
44 ignore_space_changes)
44 return hunks
45 return hunks
45
46
46
47
47 def get_hunks(from_lines, to_lines, context=None):
48 def get_hunks(from_lines, to_lines, context=None):
48 """Generator yielding grouped opcodes describing differences .
49 """Generator yielding grouped opcodes describing differences .
49
50
50 See `get_filtered_hunks` for the parameter descriptions.
51 See `get_filtered_hunks` for the parameter descriptions.
51 """
52 """
52 matcher = difflib.SequenceMatcher(None, from_lines, to_lines)
53 matcher = difflib.SequenceMatcher(None, from_lines, to_lines)
53 if context is None:
54 if context is None:
54 return (hunk for hunk in [matcher.get_opcodes()])
55 return (hunk for hunk in [matcher.get_opcodes()])
55 else:
56 else:
56 return matcher.get_grouped_opcodes(context)
57 return matcher.get_grouped_opcodes(context)
57
58
58
59
59 def filter_ignorable_lines(hunks, from_lines, to_lines, context,
60 def filter_ignorable_lines(hunks, from_lines, to_lines, context,
60 ignore_blank_lines, ignore_case,
61 ignore_blank_lines, ignore_case,
61 ignore_space_changes):
62 ignore_space_changes):
62 """Detect line changes that should be ignored and emits them as
63 """Detect line changes that should be ignored and emits them as
63 tagged as "equal", possibly joined with the preceding and/or
64 tagged as "equal", possibly joined with the preceding and/or
64 following "equal" block.
65 following "equal" block.
65
66
66 See `get_filtered_hunks` for the parameter descriptions.
67 See `get_filtered_hunks` for the parameter descriptions.
67 """
68 """
68 def is_ignorable(tag, fromlines, tolines):
69 def is_ignorable(tag, fromlines, tolines):
69
70
70 if tag == 'delete' and ignore_blank_lines:
71 if tag == 'delete' and ignore_blank_lines:
71 if b''.join(fromlines) == b'':
72 if b''.join(fromlines) == b'':
72 return True
73 return True
73 elif tag == 'insert' and ignore_blank_lines:
74 elif tag == 'insert' and ignore_blank_lines:
74 if b''.join(tolines) == b'':
75 if b''.join(tolines) == b'':
75 return True
76 return True
76 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 if len(fromlines) != len(tolines):
78 if len(fromlines) != len(tolines):
78 return False
79 return False
79
80
80 def f(input_str):
81 def f(input_str):
81 if ignore_case:
82 if ignore_case:
82 input_str = input_str.lower()
83 input_str = input_str.lower()
83 if ignore_space_changes:
84 if ignore_space_changes:
84 input_str = b' '.join(input_str.split())
85 input_str = b' '.join(input_str.split())
85 return input_str
86 return input_str
86
87
87 for i in range(len(fromlines)):
88 for i in range(len(fromlines)):
88 if f(fromlines[i]) != f(tolines[i]):
89 if f(fromlines[i]) != f(tolines[i]):
89 return False
90 return False
90 return True
91 return True
91
92
92 hunks = list(hunks)
93 hunks = list(hunks)
93 opcodes = []
94 opcodes = []
94 ignored_lines = False
95 ignored_lines = False
95 prev = None
96 prev = None
96 for hunk in hunks:
97 for hunk in hunks:
97 for tag, i1, i2, j1, j2 in hunk:
98 for tag, i1, i2, j1, j2 in hunk:
98 if tag == 'equal':
99 if tag == 'equal':
99 if prev:
100 if prev:
100 prev = (tag, prev[1], i2, prev[3], j2)
101 prev = (tag, prev[1], i2, prev[3], j2)
101 else:
102 else:
102 prev = (tag, i1, i2, j1, j2)
103 prev = (tag, i1, i2, j1, j2)
103 else:
104 else:
104 if is_ignorable(tag, from_lines[i1:i2], to_lines[j1:j2]):
105 if is_ignorable(tag, from_lines[i1:i2], to_lines[j1:j2]):
105 ignored_lines = True
106 ignored_lines = True
106 if prev:
107 if prev:
107 prev = 'equal', prev[1], i2, prev[3], j2
108 prev = 'equal', prev[1], i2, prev[3], j2
108 else:
109 else:
109 prev = 'equal', i1, i2, j1, j2
110 prev = 'equal', i1, i2, j1, j2
110 continue
111 continue
111 if prev:
112 if prev:
112 opcodes.append(prev)
113 opcodes.append(prev)
113 opcodes.append((tag, i1, i2, j1, j2))
114 opcodes.append((tag, i1, i2, j1, j2))
114 prev = None
115 prev = None
115 if prev:
116 if prev:
116 opcodes.append(prev)
117 opcodes.append(prev)
117
118
118 if ignored_lines:
119 if ignored_lines:
119 if context is None:
120 if context is None:
120 yield opcodes
121 yield opcodes
121 else:
122 else:
122 # we leave at most n lines with the tag 'equal' before and after
123 # we leave at most n lines with the tag 'equal' before and after
123 # every change
124 # every change
124 n = context
125 n = context
125 nn = n + n
126 nn = n + n
126
127
127 group = []
128 group = []
128
129
129 def all_equal():
130 def all_equal():
130 all(op[0] == 'equal' for op in group)
131 all(op[0] == 'equal' for op in group)
131 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
132 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
132 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
133 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
133 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
134 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
134 elif tag == 'equal' and i2 - i1 > nn:
135 elif tag == 'equal' and i2 - i1 > nn:
135 group.append((tag, i1, min(i2, i1 + n), j1,
136 group.append((tag, i1, min(i2, i1 + n), j1,
136 min(j2, j1 + n)))
137 min(j2, j1 + n)))
137 if not all_equal():
138 if not all_equal():
138 yield group
139 yield group
139 group = []
140 group = []
140 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
141 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
141 group.append((tag, i1, i2, j1, j2))
142 group.append((tag, i1, i2, j1, j2))
142
143
143 if group and not (len(group) == 1 and group[0][0] == 'equal'):
144 if group and not (len(group) == 1 and group[0][0] == 'equal'):
144 if group[-1][0] == 'equal': # Fixup trailing unchanged block
145 if group[-1][0] == 'equal': # Fixup trailing unchanged block
145 tag, i1, i2, j1, j2 = group[-1]
146 tag, i1, i2, j1, j2 = group[-1]
146 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
147 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
147 if not all_equal():
148 if not all_equal():
148 yield group
149 yield group
149 else:
150 else:
150 for hunk in hunks:
151 for hunk in hunks:
151 yield hunk
152 yield hunk
152
153
153
154
154 NO_NEWLINE_AT_END = b'\\ No newline at end of file'
155 NO_NEWLINE_AT_END = b'\\ No newline at end of file'
155 LINE_TERM = b'\n'
156 LINE_TERM = b'\n'
156
157
157
158
158 def unified_diff(from_lines, to_lines, context=None, ignore_blank_lines: bool = False,
159 def unified_diff(from_lines, to_lines, context=None, ignore_blank_lines: bool = False,
159 ignore_case: bool = False, ignore_space_changes: bool = False, lineterm=LINE_TERM) -> bytes:
160 ignore_case: bool = False, ignore_space_changes: bool = False, lineterm=LINE_TERM) -> bytes:
160 """
161 """
161 Generator producing lines corresponding to a textual diff.
162 Generator producing lines corresponding to a textual diff.
162
163
163 See `get_filtered_hunks` for the parameter descriptions.
164 See `get_filtered_hunks` for the parameter descriptions.
164 """
165 """
165 # TODO: johbo: Check if this can be nicely integrated into the matching
166 # TODO: johbo: Check if this can be nicely integrated into the matching
166
167
167 if ignore_space_changes:
168 if ignore_space_changes:
168 from_lines = [l.strip() for l in from_lines]
169 from_lines = [l.strip() for l in from_lines]
169 to_lines = [l.strip() for l in to_lines]
170 to_lines = [l.strip() for l in to_lines]
170
171
171 def _hunk_range(start, length) -> bytes:
172 def _hunk_range(start, length) -> bytes:
172 if length != 1:
173 if length != 1:
173 return b'%d,%d' % (start, length)
174 return b'%d,%d' % (start, length)
174 else:
175 else:
175 return b'%d' % (start,)
176 return b'%d' % (start,)
176
177
177 for group in get_filtered_hunks(from_lines, to_lines, context,
178 for group in get_filtered_hunks(from_lines, to_lines, context,
178 ignore_blank_lines, ignore_case,
179 ignore_blank_lines, ignore_case,
179 ignore_space_changes):
180 ignore_space_changes):
180 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
181 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
181 if i1 == 0 and i2 == 0:
182 if i1 == 0 and i2 == 0:
182 i1, i2 = -1, -1 # support for Add changes
183 i1, i2 = -1, -1 # support for Add changes
183 if j1 == 0 and j2 == 0:
184 if j1 == 0 and j2 == 0:
184 j1, j2 = -1, -1 # support for Delete changes
185 j1, j2 = -1, -1 # support for Delete changes
185 yield b'@@ -%b +%b @@%b' % (
186 yield b'@@ -%b +%b @@%b' % (
186 _hunk_range(i1 + 1, i2 - i1),
187 _hunk_range(i1 + 1, i2 - i1),
187 _hunk_range(j1 + 1, j2 - j1),
188 _hunk_range(j1 + 1, j2 - j1),
188 lineterm)
189 lineterm)
189 for tag, i1, i2, j1, j2 in group:
190 for tag, i1, i2, j1, j2 in group:
190 if tag == 'equal':
191 if tag == 'equal':
191 for line in from_lines[i1:i2]:
192 for line in from_lines[i1:i2]:
192 if not line.endswith(lineterm):
193 if not line.endswith(lineterm):
193 yield b' ' + line + lineterm
194 yield b' ' + line + lineterm
194 yield NO_NEWLINE_AT_END + lineterm
195 yield NO_NEWLINE_AT_END + lineterm
195 else:
196 else:
196 yield b' ' + line
197 yield b' ' + line
197 else:
198 else:
198 if tag in ('replace', 'delete'):
199 if tag in ('replace', 'delete'):
199 for line in from_lines[i1:i2]:
200 for line in from_lines[i1:i2]:
200 if not line.endswith(lineterm):
201 if not line.endswith(lineterm):
201 yield b'-' + line + lineterm
202 yield b'-' + line + lineterm
202 yield NO_NEWLINE_AT_END + lineterm
203 yield NO_NEWLINE_AT_END + lineterm
203 else:
204 else:
204 yield b'-' + line
205 yield b'-' + line
205 if tag in ('replace', 'insert'):
206 if tag in ('replace', 'insert'):
206 for line in to_lines[j1:j2]:
207 for line in to_lines[j1:j2]:
207 if not line.endswith(lineterm):
208 if not line.endswith(lineterm):
208 yield b'+' + line + lineterm
209 yield b'+' + line + lineterm
209 yield NO_NEWLINE_AT_END + lineterm
210 yield NO_NEWLINE_AT_END + lineterm
210 else:
211 else:
211 yield b'+' + line
212 yield b'+' + line
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,85 +1,85 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21 import configparser
21 import configparser
22
22
23
23
24 class ContextINI(object):
24 class ContextINI(object):
25 """
25 """
26 Allows to create a new test.ini file as a copy of existing one with edited
26 Allows to create a new test.ini file as a copy of existing one with edited
27 data. If existing file is not present, it creates a new one. Example usage::
27 data. If existing file is not present, it creates a new one. Example usage::
28
28
29 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
29 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 print 'vcsserver --config=%s' % new_test_ini
30 print 'vcsserver --config=%s' % new_test_ini
31 """
31 """
32
32
33 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
33 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 destroy=True):
34 destroy=True):
35 self.ini_file_path = ini_file_path
35 self.ini_file_path = ini_file_path
36 self.ini_params = ini_params
36 self.ini_params = ini_params
37 self.new_path = None
37 self.new_path = None
38 self.new_path_prefix = new_file_prefix or 'test'
38 self.new_path_prefix = new_file_prefix or 'test'
39 self.destroy = destroy
39 self.destroy = destroy
40
40
41 def __enter__(self):
41 def __enter__(self):
42 _, pref = tempfile.mkstemp()
42 _, pref = tempfile.mkstemp()
43 loc = tempfile.gettempdir()
43 loc = tempfile.gettempdir()
44 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
44 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 pref, self.new_path_prefix, self.ini_file_path))
45 pref, self.new_path_prefix, self.ini_file_path))
46
46
47 # copy ini file and modify according to the params, if we re-use a file
47 # copy ini file and modify according to the params, if we re-use a file
48 if os.path.isfile(self.ini_file_path):
48 if os.path.isfile(self.ini_file_path):
49 shutil.copy(self.ini_file_path, self.new_path)
49 shutil.copy(self.ini_file_path, self.new_path)
50 else:
50 else:
51 # create new dump file for configObj to write to.
51 # create new dump file for configObj to write to.
52 with open(self.new_path, 'wb'):
52 with open(self.new_path, 'wb'):
53 pass
53 pass
54
54
55 parser = configparser.ConfigParser()
55 parser = configparser.ConfigParser()
56 parser.read(self.ini_file_path)
56 parser.read(self.ini_file_path)
57
57
58 for data in self.ini_params:
58 for data in self.ini_params:
59 section, ini_params = list(data.items())[0]
59 section, ini_params = list(data.items())[0]
60 key, val = list(ini_params.items())[0]
60 key, val = list(ini_params.items())[0]
61 if section not in parser:
61 if section not in parser:
62 parser[section] = {}
62 parser[section] = {}
63 parser[section][key] = val
63 parser[section][key] = val
64 with open(self.ini_file_path, 'w') as f:
64 with open(self.ini_file_path, 'w') as f:
65 parser.write(f)
65 parser.write(f)
66 return self.new_path
66 return self.new_path
67
67
68 def __exit__(self, exc_type, exc_val, exc_tb):
68 def __exit__(self, exc_type, exc_val, exc_tb):
69 if self.destroy:
69 if self.destroy:
70 os.remove(self.new_path)
70 os.remove(self.new_path)
71
71
72
72
73 def no_newline_id_generator(test_name):
73 def no_newline_id_generator(test_name):
74 """
74 """
75 Generates a test name without spaces or newlines characters. Used for
75 Generates a test name without spaces or newlines characters. Used for
76 nicer output of progress of test
76 nicer output of progress of test
77 """
77 """
78 org_name = test_name
78 org_name = test_name
79 test_name = str(test_name)\
79 test_name = str(test_name)\
80 .replace('\n', '_N') \
80 .replace('\n', '_N') \
81 .replace('\r', '_N') \
81 .replace('\r', '_N') \
82 .replace('\t', '_T') \
82 .replace('\t', '_T') \
83 .replace(' ', '_S')
83 .replace(' ', '_S')
84
84
85 return test_name or 'test-with-empty-name'
85 return test_name or 'test-with-empty-name'
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver.remote import git
24 from vcsserver.remote import git
25
25
26 SAMPLE_REFS = {
26 SAMPLE_REFS = {
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 }
32 }
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def git_remote():
36 def git_remote():
37 """
37 """
38 A GitRemote instance with a mock factory.
38 A GitRemote instance with a mock factory.
39 """
39 """
40 factory = Mock()
40 factory = Mock()
41 remote = git.GitRemote(factory)
41 remote = git.GitRemote(factory)
42 return remote
42 return remote
43
43
44
44
45 def test_discover_git_version(git_remote):
45 def test_discover_git_version(git_remote):
46 version = git_remote.discover_git_version()
46 version = git_remote.discover_git_version()
47 assert version
47 assert version
48
48
49
49
50 class TestGitFetch(object):
50 class TestGitFetch(object):
51 def setup_method(self):
51 def setup_method(self):
52 self.mock_repo = Mock()
52 self.mock_repo = Mock()
53 factory = Mock()
53 factory = Mock()
54 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git.GitRemote(factory)
55 self.remote_git = git.GitRemote(factory)
56
56
57 def test_fetches_all_when_no_commit_ids_specified(self):
57 def test_fetches_all_when_no_commit_ids_specified(self):
58 def side_effect(determine_wants, *args, **kwargs):
58 def side_effect(determine_wants, *args, **kwargs):
59 determine_wants(SAMPLE_REFS)
59 determine_wants(SAMPLE_REFS)
60
60
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 mock_fetch.side_effect = side_effect
62 mock_fetch.side_effect = side_effect
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 determine_wants = self.mock_repo.object_store.determine_wants_all
64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66
66
67 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
68 selected_refs = {
68 selected_refs = {
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 }
71 }
72
72
73 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
74 result = determine_wants(SAMPLE_REFS)
74 result = determine_wants(SAMPLE_REFS)
75 assert sorted(result) == sorted(selected_refs.values())
75 assert sorted(result) == sorted(selected_refs.values())
76 return result
76 return result
77
77
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
80 self.remote_git.pull(
80 self.remote_git.pull(
81 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
82 refs=list(selected_refs.keys()))
82 refs=list(selected_refs.keys()))
83 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
85
85
86 def test_get_remote_refs(self):
86 def test_get_remote_refs(self):
87 factory = Mock()
87 factory = Mock()
88 remote_git = git.GitRemote(factory)
88 remote_git = git.GitRemote(factory)
89 url = 'http://example.com/test/test.git'
89 url = 'http://example.com/test/test.git'
90 sample_refs = {
90 sample_refs = {
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 }
93 }
94
94
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
96 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
99 assert remote_refs == sample_refs
99 assert remote_refs == sample_refs
100
100
101
101
102 class TestReraiseSafeExceptions(object):
102 class TestReraiseSafeExceptions(object):
103
103
104 def test_method_decorated_with_reraise_safe_exceptions(self):
104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 factory = Mock()
105 factory = Mock()
106 git_remote = git.GitRemote(factory)
106 git_remote = git.GitRemote(factory)
107
107
108 def fake_function():
108 def fake_function():
109 return None
109 return None
110
110
111 decorator = git.reraise_safe_exceptions(fake_function)
111 decorator = git.reraise_safe_exceptions(fake_function)
112
112
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 for method_name, method in methods:
114 for method_name, method in methods:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 assert method.__func__.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
117
117
118 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.HangupException(), 'error'),
123 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 ])
125 ])
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git.reraise_safe_exceptions
127 @git.reraise_safe_exceptions
128 def fake_method():
128 def fake_method():
129 raise side_effect
129 raise side_effect
130
130
131 with pytest.raises(Exception) as exc_info:
131 with pytest.raises(Exception) as exc_info:
132 fake_method()
132 fake_method()
133 assert type(exc_info.value) == Exception
133 assert type(exc_info.value) == Exception
134 assert exc_info.value._vcs_kind == expected_type
134 assert exc_info.value._vcs_kind == expected_type
135
135
136
136
137 class TestDulwichRepoWrapper(object):
137 class TestDulwichRepoWrapper(object):
138 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 with patch.object(git.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 assert repo is not None
143 assert repo is not None
144 repo.__del__()
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
146
147 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
148
148
149
149
150 class TestGitFactory(object):
150 class TestGitFactory(object):
151 def test_create_repo_returns_dulwich_wrapper(self):
151 def test_create_repo_returns_dulwich_wrapper(self):
152
152
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 mock.side_effect = {'repo_objects': ''}
154 mock.side_effect = {'repo_objects': ''}
155 factory = git.GitFactory()
155 factory = git.GitFactory()
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, patch
24 from mock import Mock, patch
25
25
26 from vcsserver import exceptions, hgcompat
26 from vcsserver import exceptions, hgcompat
27 from vcsserver.remote import hg
27 from vcsserver.remote import hg
28
28
29
29
30 class TestDiff(object):
30 class TestDiff(object):
31 def test_raising_safe_exception_when_lookup_failed(self):
31 def test_raising_safe_exception_when_lookup_failed(self):
32
32
33 factory = Mock()
33 factory = Mock()
34 hg_remote = hg.HgRemote(factory)
34 hg_remote = hg.HgRemote(factory)
35 with patch('mercurial.patch.diff') as diff_mock:
35 with patch('mercurial.patch.diff') as diff_mock:
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
37
37
38 with pytest.raises(Exception) as exc_info:
38 with pytest.raises(Exception) as exc_info:
39 hg_remote.diff(
39 hg_remote.diff(
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
41 file_filter=None, opt_git=True, opt_ignorews=True,
41 file_filter=None, opt_git=True, opt_ignorews=True,
42 context=3)
42 context=3)
43 assert type(exc_info.value) == Exception
43 assert type(exc_info.value) == Exception
44 assert exc_info.value._vcs_kind == 'lookup'
44 assert exc_info.value._vcs_kind == 'lookup'
45
45
46
46
47 class TestReraiseSafeExceptions(object):
47 class TestReraiseSafeExceptions(object):
48 def test_method_decorated_with_reraise_safe_exceptions(self):
48 def test_method_decorated_with_reraise_safe_exceptions(self):
49 factory = Mock()
49 factory = Mock()
50 hg_remote = hg.HgRemote(factory)
50 hg_remote = hg.HgRemote(factory)
51 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
52 decorator = hg.reraise_safe_exceptions(None)
52 decorator = hg.reraise_safe_exceptions(None)
53 for method_name, method in methods:
53 for method_name, method in methods:
54 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
55 assert method.__func__.__code__ == decorator.__code__
55 assert method.__func__.__code__ == decorator.__code__
56
56
57 @pytest.mark.parametrize('side_effect, expected_type', [
57 @pytest.mark.parametrize('side_effect, expected_type', [
58 (hgcompat.Abort('failed-abort'), 'abort'),
58 (hgcompat.Abort('failed-abort'), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
60 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.RepoLookupError(), 'lookup'),
61 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
61 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
62 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RepoError(), 'error'),
63 (hgcompat.RequirementError(), 'requirement'),
63 (hgcompat.RequirementError(), 'requirement'),
64 ])
64 ])
65 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 def test_safe_exceptions_reraised(self, side_effect, expected_type):
66 @hg.reraise_safe_exceptions
66 @hg.reraise_safe_exceptions
67 def fake_method():
67 def fake_method():
68 raise side_effect
68 raise side_effect
69
69
70 with pytest.raises(Exception) as exc_info:
70 with pytest.raises(Exception) as exc_info:
71 fake_method()
71 fake_method()
72 assert type(exc_info.value) == Exception
72 assert type(exc_info.value) == Exception
73 assert exc_info.value._vcs_kind == expected_type
73 assert exc_info.value._vcs_kind == expected_type
74
74
75 def test_keeps_original_traceback(self):
75 def test_keeps_original_traceback(self):
76 @hg.reraise_safe_exceptions
76 @hg.reraise_safe_exceptions
77 def fake_method():
77 def fake_method():
78 try:
78 try:
79 raise hgcompat.Abort('test-abort')
79 raise hgcompat.Abort('test-abort')
80 except:
80 except:
81 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
81 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
85 fake_method()
85 fake_method()
86 except Exception:
86 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
88
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
91
91
92 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
94 def stub_method():
94 def stub_method():
95 raise ValueError('stub')
95 raise ValueError('stub')
96
96
97 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
98 stub_method()
98 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
100
100
101 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
103 def stub_method():
103 def stub_method():
104 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
105
105
106 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
107 stub_method()
107 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,119 +1,119 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto._capabilities.__name__ == '_dynamic_capabilities'
31 assert lfproto._capabilities.__name__ == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 orig_capabilities):
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
38 hgcompat.largefiles.proto, stub_extensions)
38 hgcompat.largefiles.proto, stub_extensions)
39
39
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
41
41
42 stub_extensions.assert_called_once_with(stub_ui)
42 stub_extensions.assert_called_once_with(stub_ui)
43 assert LARGEFILES_CAPABILITY not in caps
43 assert LARGEFILES_CAPABILITY not in caps
44
44
45
45
46 def test_dynamic_capabilities_ignores_updated_capabilities(
46 def test_dynamic_capabilities_ignores_updated_capabilities(
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
48 orig_capabilities):
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
51 hgcompat.largefiles.proto, stub_extensions)
51 hgcompat.largefiles.proto, stub_extensions)
52
52
53 # This happens when the extension is loaded for the first time, important
53 # This happens when the extension is loaded for the first time, important
54 # to ensure that an updated function is correctly picked up.
54 # to ensure that an updated function is correctly picked up.
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
56 side_effect=Exception('Must not be called'))
56 side_effect=Exception('Must not be called'))
57
57
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
59
59
60
60
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
63 orig_capabilities):
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
65
65
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
67 hgcompat.largefiles.proto, stub_extensions)
67 hgcompat.largefiles.proto, stub_extensions)
68
68
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
70
70
71 stub_extensions.assert_called_once_with(stub_ui)
71 stub_extensions.assert_called_once_with(stub_ui)
72 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
73
73
74
74
75 @pytest.fixture
75 @pytest.fixture
76 def patched_capabilities(request):
76 def patched_capabilities(request):
77 """
77 """
78 Patch in `capabilitiesorig` and restore both capability functions.
78 Patch in `capabilitiesorig` and restore both capability functions.
79 """
79 """
80 lfproto = hgcompat.largefiles.proto
80 lfproto = hgcompat.largefiles.proto
81 orig_capabilities = lfproto._capabilities
81 orig_capabilities = lfproto._capabilities
82
82
83 @request.addfinalizer
83 @request.addfinalizer
84 def restore():
84 def restore():
85 lfproto._capabilities = orig_capabilities
85 lfproto._capabilities = orig_capabilities
86
86
87
87
88 @pytest.fixture
88 @pytest.fixture
89 def stub_repo(stub_ui):
89 def stub_repo(stub_ui):
90 repo = mock.Mock()
90 repo = mock.Mock()
91 repo.ui = stub_ui
91 repo.ui = stub_ui
92 return repo
92 return repo
93
93
94
94
95 @pytest.fixture
95 @pytest.fixture
96 def stub_proto(stub_ui):
96 def stub_proto(stub_ui):
97 proto = mock.Mock()
97 proto = mock.Mock()
98 proto.ui = stub_ui
98 proto.ui = stub_ui
99 return proto
99 return proto
100
100
101
101
102 @pytest.fixture
102 @pytest.fixture
103 def orig_capabilities():
103 def orig_capabilities():
104 from mercurial.wireprotov1server import wireprotocaps
104 from mercurial.wireprotov1server import wireprotocaps
105
105
106 def _capabilities(repo, proto):
106 def _capabilities(repo, proto):
107 return wireprotocaps
107 return wireprotocaps
108 return _capabilities
108 return _capabilities
109
109
110
110
111 @pytest.fixture
111 @pytest.fixture
112 def stub_ui():
112 def stub_ui():
113 return hgcompat.ui.ui()
113 return hgcompat.ui.ui()
114
114
115
115
116 @pytest.fixture
116 @pytest.fixture
117 def stub_extensions():
117 def stub_extensions():
118 extensions = mock.Mock(return_value=tuple())
118 extensions = mock.Mock(return_value=tuple())
119 return extensions
119 return extensions
@@ -1,286 +1,286 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import threading
18 import threading
19 import msgpack
19 import msgpack
20
20
21 from http.server import BaseHTTPRequestHandler
21 from http.server import BaseHTTPRequestHandler
22 from socketserver import TCPServer
22 from socketserver import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from vcsserver.hooks import HooksHttpClient
28 from vcsserver.hooks import HooksHttpClient
29 from vcsserver.lib.rc_json import json
29 from vcsserver.lib.rc_json import json
30 from vcsserver import hooks
30 from vcsserver import hooks
31
31
32
32
33 def get_hg_ui(extras=None):
33 def get_hg_ui(extras=None):
34 """Create a Config object with a valid RC_SCM_DATA entry."""
34 """Create a Config object with a valid RC_SCM_DATA entry."""
35 extras = extras or {}
35 extras = extras or {}
36 required_extras = {
36 required_extras = {
37 'username': '',
37 'username': '',
38 'repository': '',
38 'repository': '',
39 'locked_by': '',
39 'locked_by': '',
40 'scm': '',
40 'scm': '',
41 'make_lock': '',
41 'make_lock': '',
42 'action': '',
42 'action': '',
43 'ip': '',
43 'ip': '',
44 'hooks_uri': 'fake_hooks_uri',
44 'hooks_uri': 'fake_hooks_uri',
45 }
45 }
46 required_extras.update(extras)
46 required_extras.update(extras)
47 hg_ui = mercurial.ui.ui()
47 hg_ui = mercurial.ui.ui()
48 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
48 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
49
49
50 return hg_ui
50 return hg_ui
51
51
52
52
53 def test_git_pre_receive_is_disabled():
53 def test_git_pre_receive_is_disabled():
54 extras = {'hooks': ['pull']}
54 extras = {'hooks': ['pull']}
55 response = hooks.git_pre_receive(None, None,
55 response = hooks.git_pre_receive(None, None,
56 {'RC_SCM_DATA': json.dumps(extras)})
56 {'RC_SCM_DATA': json.dumps(extras)})
57
57
58 assert response == 0
58 assert response == 0
59
59
60
60
61 def test_git_post_receive_is_disabled():
61 def test_git_post_receive_is_disabled():
62 extras = {'hooks': ['pull']}
62 extras = {'hooks': ['pull']}
63 response = hooks.git_post_receive(None, '',
63 response = hooks.git_post_receive(None, '',
64 {'RC_SCM_DATA': json.dumps(extras)})
64 {'RC_SCM_DATA': json.dumps(extras)})
65
65
66 assert response == 0
66 assert response == 0
67
67
68
68
69 def test_git_post_receive_calls_repo_size():
69 def test_git_post_receive_calls_repo_size():
70 extras = {'hooks': ['push', 'repo_size']}
70 extras = {'hooks': ['push', 'repo_size']}
71
71
72 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
72 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
73 hooks.git_post_receive(
73 hooks.git_post_receive(
74 None, '', {'RC_SCM_DATA': json.dumps(extras)})
74 None, '', {'RC_SCM_DATA': json.dumps(extras)})
75 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
75 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
76 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
76 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
77 expected_calls = [
77 expected_calls = [
78 mock.call('repo_size', extras, mock.ANY),
78 mock.call('repo_size', extras, mock.ANY),
79 mock.call('post_push', extras, mock.ANY),
79 mock.call('post_push', extras, mock.ANY),
80 ]
80 ]
81 assert call_hook_mock.call_args_list == expected_calls
81 assert call_hook_mock.call_args_list == expected_calls
82
82
83
83
84 def test_git_post_receive_does_not_call_disabled_repo_size():
84 def test_git_post_receive_does_not_call_disabled_repo_size():
85 extras = {'hooks': ['push']}
85 extras = {'hooks': ['push']}
86
86
87 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
87 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
88 hooks.git_post_receive(
88 hooks.git_post_receive(
89 None, '', {'RC_SCM_DATA': json.dumps(extras)})
89 None, '', {'RC_SCM_DATA': json.dumps(extras)})
90 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
90 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
91 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
91 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
92 expected_calls = [
92 expected_calls = [
93 mock.call('post_push', extras, mock.ANY)
93 mock.call('post_push', extras, mock.ANY)
94 ]
94 ]
95 assert call_hook_mock.call_args_list == expected_calls
95 assert call_hook_mock.call_args_list == expected_calls
96
96
97
97
98 def test_repo_size_exception_does_not_affect_git_post_receive():
98 def test_repo_size_exception_does_not_affect_git_post_receive():
99 extras = {'hooks': ['push', 'repo_size']}
99 extras = {'hooks': ['push', 'repo_size']}
100 status = 0
100 status = 0
101
101
102 def side_effect(name, *args, **kwargs):
102 def side_effect(name, *args, **kwargs):
103 if name == 'repo_size':
103 if name == 'repo_size':
104 raise Exception('Fake exception')
104 raise Exception('Fake exception')
105 else:
105 else:
106 return status
106 return status
107
107
108 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
108 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
109 call_hook_mock.side_effect = side_effect
109 call_hook_mock.side_effect = side_effect
110 result = hooks.git_post_receive(
110 result = hooks.git_post_receive(
111 None, '', {'RC_SCM_DATA': json.dumps(extras)})
111 None, '', {'RC_SCM_DATA': json.dumps(extras)})
112 assert result == status
112 assert result == status
113
113
114
114
115 def test_git_pre_pull_is_disabled():
115 def test_git_pre_pull_is_disabled():
116 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
116 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
117
117
118
118
119 def test_git_post_pull_is_disabled():
119 def test_git_post_pull_is_disabled():
120 assert (
120 assert (
121 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
121 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
122
122
123
123
124 class TestGetHooksClient(object):
124 class TestGetHooksClient(object):
125
125
126 def test_returns_http_client_when_protocol_matches(self):
126 def test_returns_http_client_when_protocol_matches(self):
127 hooks_uri = 'localhost:8000'
127 hooks_uri = 'localhost:8000'
128 result = hooks._get_hooks_client({
128 result = hooks._get_hooks_client({
129 'hooks_uri': hooks_uri,
129 'hooks_uri': hooks_uri,
130 'hooks_protocol': 'http'
130 'hooks_protocol': 'http'
131 })
131 })
132 assert isinstance(result, hooks.HooksHttpClient)
132 assert isinstance(result, hooks.HooksHttpClient)
133 assert result.hooks_uri == hooks_uri
133 assert result.hooks_uri == hooks_uri
134
134
135 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
135 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
136 fake_module = mock.Mock()
136 fake_module = mock.Mock()
137 import_patcher = mock.patch.object(
137 import_patcher = mock.patch.object(
138 hooks.importlib, 'import_module', return_value=fake_module)
138 hooks.importlib, 'import_module', return_value=fake_module)
139 fake_module_name = 'fake.module'
139 fake_module_name = 'fake.module'
140 with import_patcher as import_mock:
140 with import_patcher as import_mock:
141 result = hooks._get_hooks_client(
141 result = hooks._get_hooks_client(
142 {'hooks_module': fake_module_name})
142 {'hooks_module': fake_module_name})
143
143
144 import_mock.assert_called_once_with(fake_module_name)
144 import_mock.assert_called_once_with(fake_module_name)
145 assert isinstance(result, hooks.HooksDummyClient)
145 assert isinstance(result, hooks.HooksDummyClient)
146 assert result._hooks_module == fake_module
146 assert result._hooks_module == fake_module
147
147
148
148
149 class TestHooksHttpClient(object):
149 class TestHooksHttpClient(object):
150 def test_init_sets_hooks_uri(self):
150 def test_init_sets_hooks_uri(self):
151 uri = 'localhost:3000'
151 uri = 'localhost:3000'
152 client = hooks.HooksHttpClient(uri)
152 client = hooks.HooksHttpClient(uri)
153 assert client.hooks_uri == uri
153 assert client.hooks_uri == uri
154
154
155 def test_serialize_returns_serialized_string(self):
155 def test_serialize_returns_serialized_string(self):
156 client = hooks.HooksHttpClient('localhost:3000')
156 client = hooks.HooksHttpClient('localhost:3000')
157 hook_name = 'test'
157 hook_name = 'test'
158 extras = {
158 extras = {
159 'first': 1,
159 'first': 1,
160 'second': 'two'
160 'second': 'two'
161 }
161 }
162 hooks_proto, result = client._serialize(hook_name, extras)
162 hooks_proto, result = client._serialize(hook_name, extras)
163 expected_result = msgpack.packb({
163 expected_result = msgpack.packb({
164 'method': hook_name,
164 'method': hook_name,
165 'extras': extras,
165 'extras': extras,
166 })
166 })
167 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1', 'Connection': 'keep-alive'}
167 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1', 'Connection': 'keep-alive'}
168 assert result == expected_result
168 assert result == expected_result
169
169
170 def test_call_queries_http_server(self, http_mirror):
170 def test_call_queries_http_server(self, http_mirror):
171 client = hooks.HooksHttpClient(http_mirror.uri)
171 client = hooks.HooksHttpClient(http_mirror.uri)
172 hook_name = 'test'
172 hook_name = 'test'
173 extras = {
173 extras = {
174 'first': 1,
174 'first': 1,
175 'second': 'two'
175 'second': 'two'
176 }
176 }
177 result = client(hook_name, extras)
177 result = client(hook_name, extras)
178 expected_result = msgpack.unpackb(msgpack.packb({
178 expected_result = msgpack.unpackb(msgpack.packb({
179 'method': hook_name,
179 'method': hook_name,
180 'extras': extras
180 'extras': extras
181 }), raw=False)
181 }), raw=False)
182 assert result == expected_result
182 assert result == expected_result
183
183
184
184
185 class TestHooksDummyClient(object):
185 class TestHooksDummyClient(object):
186 def test_init_imports_hooks_module(self):
186 def test_init_imports_hooks_module(self):
187 hooks_module_name = 'rhodecode.fake.module'
187 hooks_module_name = 'rhodecode.fake.module'
188 hooks_module = mock.MagicMock()
188 hooks_module = mock.MagicMock()
189
189
190 import_patcher = mock.patch.object(
190 import_patcher = mock.patch.object(
191 hooks.importlib, 'import_module', return_value=hooks_module)
191 hooks.importlib, 'import_module', return_value=hooks_module)
192 with import_patcher as import_mock:
192 with import_patcher as import_mock:
193 client = hooks.HooksDummyClient(hooks_module_name)
193 client = hooks.HooksDummyClient(hooks_module_name)
194 import_mock.assert_called_once_with(hooks_module_name)
194 import_mock.assert_called_once_with(hooks_module_name)
195 assert client._hooks_module == hooks_module
195 assert client._hooks_module == hooks_module
196
196
197 def test_call_returns_hook_result(self):
197 def test_call_returns_hook_result(self):
198 hooks_module_name = 'rhodecode.fake.module'
198 hooks_module_name = 'rhodecode.fake.module'
199 hooks_module = mock.MagicMock()
199 hooks_module = mock.MagicMock()
200 import_patcher = mock.patch.object(
200 import_patcher = mock.patch.object(
201 hooks.importlib, 'import_module', return_value=hooks_module)
201 hooks.importlib, 'import_module', return_value=hooks_module)
202 with import_patcher:
202 with import_patcher:
203 client = hooks.HooksDummyClient(hooks_module_name)
203 client = hooks.HooksDummyClient(hooks_module_name)
204
204
205 result = client('post_push', {})
205 result = client('post_push', {})
206 hooks_module.Hooks.assert_called_once_with()
206 hooks_module.Hooks.assert_called_once_with()
207 assert result == hooks_module.Hooks().__enter__().post_push()
207 assert result == hooks_module.Hooks().__enter__().post_push()
208
208
209
209
210 @pytest.fixture
210 @pytest.fixture
211 def http_mirror(request):
211 def http_mirror(request):
212 server = MirrorHttpServer()
212 server = MirrorHttpServer()
213 request.addfinalizer(server.stop)
213 request.addfinalizer(server.stop)
214 return server
214 return server
215
215
216
216
217 class MirrorHttpHandler(BaseHTTPRequestHandler):
217 class MirrorHttpHandler(BaseHTTPRequestHandler):
218
218
219 def do_POST(self):
219 def do_POST(self):
220 length = int(self.headers['Content-Length'])
220 length = int(self.headers['Content-Length'])
221 body = self.rfile.read(length)
221 body = self.rfile.read(length)
222 self.send_response(200)
222 self.send_response(200)
223 self.end_headers()
223 self.end_headers()
224 self.wfile.write(body)
224 self.wfile.write(body)
225
225
226
226
227 class MirrorHttpServer(object):
227 class MirrorHttpServer(object):
228 ip_address = '127.0.0.1'
228 ip_address = '127.0.0.1'
229 port = 0
229 port = 0
230
230
231 def __init__(self):
231 def __init__(self):
232 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
232 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
233 _, self.port = self._daemon.server_address
233 _, self.port = self._daemon.server_address
234 self._thread = threading.Thread(target=self._daemon.serve_forever)
234 self._thread = threading.Thread(target=self._daemon.serve_forever)
235 self._thread.daemon = True
235 self._thread.daemon = True
236 self._thread.start()
236 self._thread.start()
237
237
238 def stop(self):
238 def stop(self):
239 self._daemon.shutdown()
239 self._daemon.shutdown()
240 self._thread.join()
240 self._thread.join()
241 self._daemon = None
241 self._daemon = None
242 self._thread = None
242 self._thread = None
243
243
244 @property
244 @property
245 def uri(self):
245 def uri(self):
246 return '{}:{}'.format(self.ip_address, self.port)
246 return '{}:{}'.format(self.ip_address, self.port)
247
247
248
248
249 def test_hooks_http_client_init():
249 def test_hooks_http_client_init():
250 hooks_uri = 'http://localhost:8000'
250 hooks_uri = 'http://localhost:8000'
251 client = HooksHttpClient(hooks_uri)
251 client = HooksHttpClient(hooks_uri)
252 assert client.hooks_uri == hooks_uri
252 assert client.hooks_uri == hooks_uri
253
253
254
254
255 def test_hooks_http_client_call():
255 def test_hooks_http_client_call():
256 hooks_uri = 'http://localhost:8000'
256 hooks_uri = 'http://localhost:8000'
257
257
258 method = 'test_method'
258 method = 'test_method'
259 extras = {'key': 'value'}
259 extras = {'key': 'value'}
260
260
261 with \
261 with \
262 mock.patch('http.client.HTTPConnection') as mock_connection,\
262 mock.patch('http.client.HTTPConnection') as mock_connection,\
263 mock.patch('msgpack.load') as mock_load:
263 mock.patch('msgpack.load') as mock_load:
264
264
265 client = HooksHttpClient(hooks_uri)
265 client = HooksHttpClient(hooks_uri)
266
266
267 mock_load.return_value = {'result': 'success'}
267 mock_load.return_value = {'result': 'success'}
268 response = mock.MagicMock()
268 response = mock.MagicMock()
269 response.status = 200
269 response.status = 200
270 mock_connection.request.side_effect = None
270 mock_connection.request.side_effect = None
271 mock_connection.getresponse.return_value = response
271 mock_connection.getresponse.return_value = response
272
272
273 result = client(method, extras)
273 result = client(method, extras)
274
274
275 mock_connection.assert_called_with(hooks_uri)
275 mock_connection.assert_called_with(hooks_uri)
276 mock_connection.return_value.request.assert_called_once()
276 mock_connection.return_value.request.assert_called_once()
277 assert result == {'result': 'success'}
277 assert result == {'result': 'success'}
278
278
279
279
280 def test_hooks_http_client_serialize():
280 def test_hooks_http_client_serialize():
281 method = 'test_method'
281 method = 'test_method'
282 extras = {'key': 'value'}
282 extras = {'key': 'value'}
283 headers, body = HooksHttpClient._serialize(method, extras)
283 headers, body = HooksHttpClient._serialize(method, extras)
284
284
285 assert headers == {'rc-hooks-protocol': HooksHttpClient.proto, 'Connection': 'keep-alive'}
285 assert headers == {'rc-hooks-protocol': HooksHttpClient.proto, 'Connection': 'keep-alive'}
286 assert msgpack.unpackb(body) == {'method': method, 'extras': extras}
286 assert msgpack.unpackb(body) == {'method': method, 'extras': extras}
@@ -1,206 +1,206 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import stat
20 import stat
21 import pytest
21 import pytest
22 import vcsserver
22 import vcsserver
23 import tempfile
23 import tempfile
24 from vcsserver import hook_utils
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.str_utils import safe_bytes, safe_str
26 from vcsserver.str_utils import safe_bytes, safe_str
27 from vcsserver.utils import AttributeDict
27 from vcsserver.utils import AttributeDict
28
28
29
29
30 class TestCheckRhodecodeHook(object):
30 class TestCheckRhodecodeHook(object):
31
31
32 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
33 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
34 with open(hook, 'wb') as f:
34 with open(hook, 'wb') as f:
35 f.write(b'dummy test')
35 f.write(b'dummy test')
36 result = hook_utils.check_rhodecode_hook(hook)
36 result = hook_utils.check_rhodecode_hook(hook)
37 assert result is False
37 assert result is False
38
38
39 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 def test_returns_true_when_no_hook_file_found(self, tmpdir):
40 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
41 result = hook_utils.check_rhodecode_hook(hook)
41 result = hook_utils.check_rhodecode_hook(hook)
42 assert result
42 assert result
43
43
44 @pytest.mark.parametrize("file_content, expected_result", [
44 @pytest.mark.parametrize("file_content, expected_result", [
45 ("RC_HOOK_VER = '3.3.3'\n", True),
45 ("RC_HOOK_VER = '3.3.3'\n", True),
46 ("RC_HOOK = '3.3.3'\n", False),
46 ("RC_HOOK = '3.3.3'\n", False),
47 ], ids=no_newline_id_generator)
47 ], ids=no_newline_id_generator)
48 def test_signatures(self, file_content, expected_result, tmpdir):
48 def test_signatures(self, file_content, expected_result, tmpdir):
49 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
50 with open(hook, 'wb') as f:
50 with open(hook, 'wb') as f:
51 f.write(safe_bytes(file_content))
51 f.write(safe_bytes(file_content))
52
52
53 result = hook_utils.check_rhodecode_hook(hook)
53 result = hook_utils.check_rhodecode_hook(hook)
54
54
55 assert result is expected_result
55 assert result is expected_result
56
56
57
57
58 class BaseInstallHooks(object):
58 class BaseInstallHooks(object):
59 HOOK_FILES = ()
59 HOOK_FILES = ()
60
60
61 def _check_hook_file_mode(self, file_path):
61 def _check_hook_file_mode(self, file_path):
62 assert os.path.exists(file_path), 'path %s missing' % file_path
62 assert os.path.exists(file_path), 'path %s missing' % file_path
63 stat_info = os.stat(file_path)
63 stat_info = os.stat(file_path)
64
64
65 file_mode = stat.S_IMODE(stat_info.st_mode)
65 file_mode = stat.S_IMODE(stat_info.st_mode)
66 expected_mode = int('755', 8)
66 expected_mode = int('755', 8)
67 assert expected_mode == file_mode
67 assert expected_mode == file_mode
68
68
69 def _check_hook_file_content(self, file_path, executable):
69 def _check_hook_file_content(self, file_path, executable):
70 executable = executable or sys.executable
70 executable = executable or sys.executable
71 with open(file_path, 'rt') as hook_file:
71 with open(file_path, 'rt') as hook_file:
72 content = hook_file.read()
72 content = hook_file.read()
73
73
74 expected_env = '#!{}'.format(executable)
74 expected_env = '#!{}'.format(executable)
75 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.__version__)
75 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
77 assert expected_rc_version in content
78
78
79 def _create_fake_hook(self, file_path, content):
79 def _create_fake_hook(self, file_path, content):
80 with open(file_path, 'w') as hook_file:
80 with open(file_path, 'w') as hook_file:
81 hook_file.write(content)
81 hook_file.write(content)
82
82
83 def create_dummy_repo(self, repo_type):
83 def create_dummy_repo(self, repo_type):
84 tmpdir = tempfile.mkdtemp()
84 tmpdir = tempfile.mkdtemp()
85 repo = AttributeDict()
85 repo = AttributeDict()
86 if repo_type == 'git':
86 if repo_type == 'git':
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 os.makedirs(repo.path)
88 os.makedirs(repo.path)
89 os.makedirs(os.path.join(repo.path, 'hooks'))
89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 repo.bare = True
90 repo.bare = True
91
91
92 elif repo_type == 'svn':
92 elif repo_type == 'svn':
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 os.makedirs(repo.path)
94 os.makedirs(repo.path)
95 os.makedirs(os.path.join(repo.path, 'hooks'))
95 os.makedirs(os.path.join(repo.path, 'hooks'))
96
96
97 return repo
97 return repo
98
98
99 def check_hooks(self, repo_path, repo_bare=True):
99 def check_hooks(self, repo_path, repo_bare=True):
100 for file_name in self.HOOK_FILES:
100 for file_name in self.HOOK_FILES:
101 if repo_bare:
101 if repo_bare:
102 file_path = os.path.join(repo_path, 'hooks', file_name)
102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 else:
103 else:
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 self._check_hook_file_mode(file_path)
105 self._check_hook_file_mode(file_path)
106 self._check_hook_file_content(file_path, sys.executable)
106 self._check_hook_file_content(file_path, sys.executable)
107
107
108
108
109 class TestInstallGitHooks(BaseInstallHooks):
109 class TestInstallGitHooks(BaseInstallHooks):
110 HOOK_FILES = ('pre-receive', 'post-receive')
110 HOOK_FILES = ('pre-receive', 'post-receive')
111
111
112 def test_hooks_are_installed(self):
112 def test_hooks_are_installed(self):
113 repo = self.create_dummy_repo('git')
113 repo = self.create_dummy_repo('git')
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 assert result
115 assert result
116 self.check_hooks(repo.path, repo.bare)
116 self.check_hooks(repo.path, repo.bare)
117
117
118 def test_hooks_are_replaced(self):
118 def test_hooks_are_replaced(self):
119 repo = self.create_dummy_repo('git')
119 repo = self.create_dummy_repo('git')
120 hooks_path = os.path.join(repo.path, 'hooks')
120 hooks_path = os.path.join(repo.path, 'hooks')
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 self._create_fake_hook(
122 self._create_fake_hook(
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124
124
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 assert result
126 assert result
127 self.check_hooks(repo.path, repo.bare)
127 self.check_hooks(repo.path, repo.bare)
128
128
129 def test_non_rc_hooks_are_not_replaced(self):
129 def test_non_rc_hooks_are_not_replaced(self):
130 repo = self.create_dummy_repo('git')
130 repo = self.create_dummy_repo('git')
131 hooks_path = os.path.join(repo.path, 'hooks')
131 hooks_path = os.path.join(repo.path, 'hooks')
132 non_rc_content = 'echo "non rc hook"\n'
132 non_rc_content = 'echo "non rc hook"\n'
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
134 self._create_fake_hook(
135 file_path, content=non_rc_content)
135 file_path, content=non_rc_content)
136
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
138 assert result
139
139
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 with open(file_path, 'rt') as hook_file:
141 with open(file_path, 'rt') as hook_file:
142 content = hook_file.read()
142 content = hook_file.read()
143 assert content == non_rc_content
143 assert content == non_rc_content
144
144
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 repo = self.create_dummy_repo('git')
146 repo = self.create_dummy_repo('git')
147 hooks_path = os.path.join(repo.path, 'hooks')
147 hooks_path = os.path.join(repo.path, 'hooks')
148 non_rc_content = 'echo "non rc hook"\n'
148 non_rc_content = 'echo "non rc hook"\n'
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 self._create_fake_hook(
150 self._create_fake_hook(
151 file_path, content=non_rc_content)
151 file_path, content=non_rc_content)
152
152
153 result = hook_utils.install_git_hooks(
153 result = hook_utils.install_git_hooks(
154 repo.path, repo.bare, force_create=True)
154 repo.path, repo.bare, force_create=True)
155 assert result
155 assert result
156 self.check_hooks(repo.path, repo.bare)
156 self.check_hooks(repo.path, repo.bare)
157
157
158
158
159 class TestInstallSvnHooks(BaseInstallHooks):
159 class TestInstallSvnHooks(BaseInstallHooks):
160 HOOK_FILES = ('pre-commit', 'post-commit')
160 HOOK_FILES = ('pre-commit', 'post-commit')
161
161
162 def test_hooks_are_installed(self):
162 def test_hooks_are_installed(self):
163 repo = self.create_dummy_repo('svn')
163 repo = self.create_dummy_repo('svn')
164 result = hook_utils.install_svn_hooks(repo.path)
164 result = hook_utils.install_svn_hooks(repo.path)
165 assert result
165 assert result
166 self.check_hooks(repo.path)
166 self.check_hooks(repo.path)
167
167
168 def test_hooks_are_replaced(self):
168 def test_hooks_are_replaced(self):
169 repo = self.create_dummy_repo('svn')
169 repo = self.create_dummy_repo('svn')
170 hooks_path = os.path.join(repo.path, 'hooks')
170 hooks_path = os.path.join(repo.path, 'hooks')
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 self._create_fake_hook(
172 self._create_fake_hook(
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174
174
175 result = hook_utils.install_svn_hooks(repo.path)
175 result = hook_utils.install_svn_hooks(repo.path)
176 assert result
176 assert result
177 self.check_hooks(repo.path)
177 self.check_hooks(repo.path)
178
178
179 def test_non_rc_hooks_are_not_replaced(self):
179 def test_non_rc_hooks_are_not_replaced(self):
180 repo = self.create_dummy_repo('svn')
180 repo = self.create_dummy_repo('svn')
181 hooks_path = os.path.join(repo.path, 'hooks')
181 hooks_path = os.path.join(repo.path, 'hooks')
182 non_rc_content = 'echo "non rc hook"\n'
182 non_rc_content = 'echo "non rc hook"\n'
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
184 self._create_fake_hook(
185 file_path, content=non_rc_content)
185 file_path, content=non_rc_content)
186
186
187 result = hook_utils.install_svn_hooks(repo.path)
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
188 assert result
189
189
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 with open(file_path, 'rt') as hook_file:
191 with open(file_path, 'rt') as hook_file:
192 content = hook_file.read()
192 content = hook_file.read()
193 assert content == non_rc_content
193 assert content == non_rc_content
194
194
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 repo = self.create_dummy_repo('svn')
196 repo = self.create_dummy_repo('svn')
197 hooks_path = os.path.join(repo.path, 'hooks')
197 hooks_path = os.path.join(repo.path, 'hooks')
198 non_rc_content = 'echo "non rc hook"\n'
198 non_rc_content = 'echo "non rc hook"\n'
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 self._create_fake_hook(
200 self._create_fake_hook(
201 file_path, content=non_rc_content)
201 file_path, content=non_rc_content)
202
202
203 result = hook_utils.install_svn_hooks(
203 result = hook_utils.install_svn_hooks(
204 repo.path, force_create=True)
204 repo.path, force_create=True)
205 assert result
205 assert result
206 self.check_hooks(repo.path, )
206 self.check_hooks(repo.path, )
@@ -1,56 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import http_main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 http_main.main({'__file__': ''})
28 http_main.main({'__file__': ''})
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 http_main.main({'__file__': ''})
38 http_main.main({'__file__': ''})
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
46 ('', ''),
46 ('', ''),
47 (None, None),
47 (None, None),
48 ('foo=bar', 'foo=bar'),
48 ('foo=bar', 'foo=bar'),
49 ('auth_token=secret', 'auth_token=*****'),
49 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret&api_key=secret2',
50 ('auth_token=secret&api_key=secret2',
51 'auth_token=*****&api_key=*****'),
51 'auth_token=*****&api_key=*****'),
52 ('auth_token=secret&api_key=secret2&param=value',
52 ('auth_token=secret&api_key=secret2&param=value',
53 'auth_token=*****&api_key=*****&param=value'),
53 'auth_token=*****&api_key=*****&param=value'),
54 ])
54 ])
55 def test_obfuscate_qs(given, expected):
55 def test_obfuscate_qs(given, expected):
56 assert expected == obfuscate_qs(given)
56 assert expected == obfuscate_qs(given)
@@ -1,295 +1,295 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import more_itertools
19 import more_itertools
20
20
21 import dulwich.protocol
21 import dulwich.protocol
22 import mock
22 import mock
23 import pytest
23 import pytest
24 import webob
24 import webob
25 import webtest
25 import webtest
26
26
27 from vcsserver import hooks, pygrack
27 from vcsserver import hooks, pygrack
28
28
29 from vcsserver.str_utils import ascii_bytes
29 from vcsserver.str_utils import ascii_bytes
30
30
31
31
32 @pytest.fixture()
32 @pytest.fixture()
33 def pygrack_instance(tmpdir):
33 def pygrack_instance(tmpdir):
34 """
34 """
35 Creates a pygrack app instance.
35 Creates a pygrack app instance.
36
36
37 Right now, it does not much helpful regarding the passed directory.
37 Right now, it does not much helpful regarding the passed directory.
38 It just contains the required folders to pass the signature test.
38 It just contains the required folders to pass the signature test.
39 """
39 """
40 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
41 tmpdir.mkdir(dir_name)
41 tmpdir.mkdir(dir_name)
42
42
43 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
44
44
45
45
46 @pytest.fixture()
46 @pytest.fixture()
47 def pygrack_app(pygrack_instance):
47 def pygrack_app(pygrack_instance):
48 """
48 """
49 Creates a pygrack app wrapped in webtest.TestApp.
49 Creates a pygrack app wrapped in webtest.TestApp.
50 """
50 """
51 return webtest.TestApp(pygrack_instance)
51 return webtest.TestApp(pygrack_instance)
52
52
53
53
54 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 def test_invalid_service_info_refs_returns_403(pygrack_app):
55 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 response = pygrack_app.get('/info/refs?service=git-upload-packs',
56 expect_errors=True)
56 expect_errors=True)
57
57
58 assert response.status_int == 403
58 assert response.status_int == 403
59
59
60
60
61 def test_invalid_endpoint_returns_403(pygrack_app):
61 def test_invalid_endpoint_returns_403(pygrack_app):
62 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
63
63
64 assert response.status_int == 403
64 assert response.status_int == 403
65
65
66
66
67 @pytest.mark.parametrize('sideband', [
67 @pytest.mark.parametrize('sideband', [
68 'side-band-64k',
68 'side-band-64k',
69 'side-band',
69 'side-band',
70 'side-band no-progress',
70 'side-band no-progress',
71 ])
71 ])
72 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
73 request = ''.join([
73 request = ''.join([
74 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
75 'multi_ack %s ofs-delta\n' % sideband,
75 'multi_ack %s ofs-delta\n' % sideband,
76 '0000',
76 '0000',
77 '0009done\n',
77 '0009done\n',
78 ])
78 ])
79 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
79 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
83
83
84 data = io.BytesIO(response.body)
84 data = io.BytesIO(response.body)
85 proto = dulwich.protocol.Protocol(data.read, None)
85 proto = dulwich.protocol.Protocol(data.read, None)
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
89 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
90 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
90 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
94
94
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 request = ''.join([
96 request = ''.join([
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 'multi_ack ofs-delta\n'
98 'multi_ack ofs-delta\n'
99 '0000',
99 '0000',
100 '0009done\n',
100 '0009done\n',
101 ])
101 ])
102 with mock.patch('vcsserver.hooks.git_pre_pull',
102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 return_value=hooks.HookResponse(1, 'foo')):
103 return_value=hooks.HookResponse(1, 'foo')):
104 response = pygrack_app.post(
104 response = pygrack_app.post(
105 '/git-upload-pack', params=request,
105 '/git-upload-pack', params=request,
106 content_type='application/x-git-upload-pack')
106 content_type='application/x-git-upload-pack')
107
107
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109
109
110
110
111 def test_pull_has_hook_messages(pygrack_app):
111 def test_pull_has_hook_messages(pygrack_app):
112 request = ''.join([
112 request = ''.join([
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 'multi_ack side-band-64k ofs-delta\n'
114 'multi_ack side-band-64k ofs-delta\n'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118
118
119 pre_pull = 'pre_pull_output'
119 pre_pull = 'pre_pull_output'
120 post_pull = 'post_pull_output'
120 post_pull = 'post_pull_output'
121
121
122 with mock.patch('vcsserver.hooks.git_pre_pull',
122 with mock.patch('vcsserver.hooks.git_pre_pull',
123 return_value=hooks.HookResponse(0, pre_pull)):
123 return_value=hooks.HookResponse(0, pre_pull)):
124 with mock.patch('vcsserver.hooks.git_post_pull',
124 with mock.patch('vcsserver.hooks.git_post_pull',
125 return_value=hooks.HookResponse(1, post_pull)):
125 return_value=hooks.HookResponse(1, post_pull)):
126 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
126 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
127 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
127 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
128 response = pygrack_app.post(
128 response = pygrack_app.post(
129 '/git-upload-pack', params=request,
129 '/git-upload-pack', params=request,
130 content_type='application/x-git-upload-pack')
130 content_type='application/x-git-upload-pack')
131
131
132 data = io.BytesIO(response.body)
132 data = io.BytesIO(response.body)
133 proto = dulwich.protocol.Protocol(data.read, None)
133 proto = dulwich.protocol.Protocol(data.read, None)
134 packets = list(proto.read_pkt_seq())
134 packets = list(proto.read_pkt_seq())
135
135
136 assert packets == [b'NAK\n',
136 assert packets == [b'NAK\n',
137 # pre-pull only outputs if IT FAILS as in != 0 ret code
137 # pre-pull only outputs if IT FAILS as in != 0 ret code
138 #b'\x02pre_pull_output',
138 #b'\x02pre_pull_output',
139 b'subp\n',
139 b'subp\n',
140 b'\x02post_pull_output']
140 b'\x02post_pull_output']
141
141
142
142
143 def test_get_want_capabilities(pygrack_instance):
143 def test_get_want_capabilities(pygrack_instance):
144 data = io.BytesIO(
144 data = io.BytesIO(
145 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
145 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
146 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
146 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
147
147
148 request = webob.Request({
148 request = webob.Request({
149 'wsgi.input': data,
149 'wsgi.input': data,
150 'REQUEST_METHOD': 'POST',
150 'REQUEST_METHOD': 'POST',
151 'webob.is_body_seekable': True
151 'webob.is_body_seekable': True
152 })
152 })
153
153
154 capabilities = pygrack_instance._get_want_capabilities(request)
154 capabilities = pygrack_instance._get_want_capabilities(request)
155
155
156 assert capabilities == frozenset(
156 assert capabilities == frozenset(
157 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
157 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
158 assert data.tell() == 0
158 assert data.tell() == 0
159
159
160
160
161 @pytest.mark.parametrize('data,capabilities,expected', [
161 @pytest.mark.parametrize('data,capabilities,expected', [
162 ('foo', [], []),
162 ('foo', [], []),
163 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
163 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
164 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
164 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
165 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
165 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
166 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
166 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
167 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
167 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
168 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
168 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
169 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
169 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
170 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
170 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
171 ], ids=[
171 ], ids=[
172 'foo-empty',
172 'foo-empty',
173 'empty-64k', 'empty',
173 'empty-64k', 'empty',
174 'foo-64k', 'foo',
174 'foo-64k', 'foo',
175 'f-1000-64k', 'f-1000',
175 'f-1000-64k', 'f-1000',
176 'f-65520-64k', 'f-65520'])
176 'f-65520-64k', 'f-65520'])
177 def test_get_messages(pygrack_instance, data, capabilities, expected):
177 def test_get_messages(pygrack_instance, data, capabilities, expected):
178 messages = pygrack_instance._get_messages(data, capabilities)
178 messages = pygrack_instance._get_messages(data, capabilities)
179
179
180 assert messages == expected
180 assert messages == expected
181
181
182
182
183 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
183 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
184 # Unexpected response
184 # Unexpected response
185 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
185 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
186 # No sideband
186 # No sideband
187 ([b'no-sideband'], [], 'foo', 'bar'),
187 ([b'no-sideband'], [], 'foo', 'bar'),
188 # No messages
188 # No messages
189 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
189 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
190 ])
190 ])
191 def test_inject_messages_to_response_nothing_to_do(
191 def test_inject_messages_to_response_nothing_to_do(
192 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
192 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
193
193
194 new_response = pygrack_instance._build_post_pull_response(
194 new_response = pygrack_instance._build_post_pull_response(
195 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
195 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
196
196
197 assert list(new_response) == response
197 assert list(new_response) == response
198
198
199
199
200 @pytest.mark.parametrize('capabilities', [
200 @pytest.mark.parametrize('capabilities', [
201 [pygrack.CAPABILITY_SIDE_BAND],
201 [pygrack.CAPABILITY_SIDE_BAND],
202 [pygrack.CAPABILITY_SIDE_BAND_64K],
202 [pygrack.CAPABILITY_SIDE_BAND_64K],
203 ])
203 ])
204 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
204 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
205 response = [b'0008NAK\n0009subp\n0000']
205 response = [b'0008NAK\n0009subp\n0000']
206 new_response = pygrack_instance._build_post_pull_response(
206 new_response = pygrack_instance._build_post_pull_response(
207 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
207 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
208
208
209 expected_response = b''.join([
209 expected_response = b''.join([
210 b'0008NAK\n',
210 b'0008NAK\n',
211 b'0008\x02foo',
211 b'0008\x02foo',
212 b'0009subp\n',
212 b'0009subp\n',
213 b'0008\x02bar',
213 b'0008\x02bar',
214 b'0000'])
214 b'0000'])
215
215
216 assert b''.join(new_response) == expected_response
216 assert b''.join(new_response) == expected_response
217
217
218
218
219 @pytest.mark.parametrize('capabilities', [
219 @pytest.mark.parametrize('capabilities', [
220 [pygrack.CAPABILITY_SIDE_BAND],
220 [pygrack.CAPABILITY_SIDE_BAND],
221 [pygrack.CAPABILITY_SIDE_BAND_64K],
221 [pygrack.CAPABILITY_SIDE_BAND_64K],
222 ])
222 ])
223 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
223 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
224 response = more_itertools.always_iterable([
224 response = more_itertools.always_iterable([
225 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
225 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
226 ])
226 ])
227 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
227 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
228
228
229 expected_response = b''.join([
229 expected_response = b''.join([
230 b'0008NAK\n',
230 b'0008NAK\n',
231 b'0008\x02foo',
231 b'0008\x02foo',
232 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
232 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
233 b'0008\x02bar',
233 b'0008\x02bar',
234 b'0000'
234 b'0000'
235 ])
235 ])
236
236
237 assert b''.join(new_response) == expected_response
237 assert b''.join(new_response) == expected_response
238
238
239
239
240 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
240 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
241 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
241 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
242
242
243 assert response == [pygrack.GitRepository.EMPTY_PACK]
243 assert response == [pygrack.GitRepository.EMPTY_PACK]
244
244
245
245
246 @pytest.mark.parametrize('capabilities', [
246 @pytest.mark.parametrize('capabilities', [
247 [pygrack.CAPABILITY_SIDE_BAND],
247 [pygrack.CAPABILITY_SIDE_BAND],
248 [pygrack.CAPABILITY_SIDE_BAND_64K],
248 [pygrack.CAPABILITY_SIDE_BAND_64K],
249 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
249 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
250 ])
250 ])
251 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
251 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
252 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
252 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
253
253
254 expected_response = [
254 expected_response = [
255 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
255 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
256 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
256 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
257 pygrack.GitRepository.FLUSH_PACKET,
257 pygrack.GitRepository.FLUSH_PACKET,
258 ]
258 ]
259
259
260 assert response == expected_response
260 assert response == expected_response
261
261
262
262
263 def test_inject_messages_to_response_generator(pygrack_instance):
263 def test_inject_messages_to_response_generator(pygrack_instance):
264
264
265 def response_generator():
265 def response_generator():
266 response = [
266 response = [
267 # protocol start
267 # protocol start
268 b'0008NAK\n',
268 b'0008NAK\n',
269 ]
269 ]
270 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
270 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
271 response += [
271 response += [
272 # protocol end
272 # protocol end
273 pygrack.GitRepository.FLUSH_PACKET
273 pygrack.GitRepository.FLUSH_PACKET
274 ]
274 ]
275 for elem in response:
275 for elem in response:
276 yield elem
276 yield elem
277
277
278 new_response = pygrack_instance._build_post_pull_response(
278 new_response = pygrack_instance._build_post_pull_response(
279 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
279 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
280
280
281 assert iter(new_response)
281 assert iter(new_response)
282
282
283 expected_response = b''.join([
283 expected_response = b''.join([
284 # start
284 # start
285 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
285 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
286 ] + [
286 ] + [
287 # ... rest
287 # ... rest
288 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
288 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
289 ] + [
289 ] + [
290 # final message,
290 # final message,
291 b'0013\x02POST_PULL_MSG\n0000',
291 b'0013\x02POST_PULL_MSG\n0000',
292
292
293 ])
293 ])
294
294
295 assert b''.join(new_response) == expected_response
295 assert b''.join(new_response) == expected_response
@@ -1,87 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mercurial.hg
20 import mercurial.hg
21 import mercurial.ui
21 import mercurial.ui
22 import mercurial.error
22 import mercurial.error
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28 from vcsserver.str_utils import ascii_bytes
28 from vcsserver.str_utils import ascii_bytes
29
29
30
30
31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
33 app = webtest.TestApp(scm_app.HgWeb(repo))
33 app = webtest.TestApp(scm_app.HgWeb(repo))
34
34
35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
36
36
37 assert response.status_int == 400
37 assert response.status_int == 400
38
38
39
39
40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
42 config = (
42 config = (
43 ('paths', 'default', ''),
43 ('paths', 'default', ''),
44 )
44 )
45 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
46 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 hgweb_mock.side_effect = mercurial.error.RequirementError()
47 with pytest.raises(Exception):
47 with pytest.raises(Exception):
48 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
49
49
50
50
51 def test_git_returns_not_found(tmpdir):
51 def test_git_returns_not_found(tmpdir):
52 app = webtest.TestApp(
52 app = webtest.TestApp(
53 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
54
54
55 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 response = app.get('/repo_name/inforefs?service=git-upload-pack',
56 expect_errors=True)
56 expect_errors=True)
57
57
58 assert response.status_int == 404
58 assert response.status_int == 404
59
59
60
60
61 def test_git(tmpdir):
61 def test_git(tmpdir):
62 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
63 tmpdir.mkdir(dir_name)
63 tmpdir.mkdir(dir_name)
64
64
65 app = webtest.TestApp(
65 app = webtest.TestApp(
66 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
67
67
68 # We set service to git-upload-packs to trigger a 403
68 # We set service to git-upload-packs to trigger a 403
69 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 response = app.get('/repo_name/inforefs?service=git-upload-packs',
70 expect_errors=True)
70 expect_errors=True)
71
71
72 assert response.status_int == 403
72 assert response.status_int == 403
73
73
74
74
75 def test_git_fallbacks_to_git_folder(tmpdir):
75 def test_git_fallbacks_to_git_folder(tmpdir):
76 tmpdir.mkdir('.git')
76 tmpdir.mkdir('.git')
77 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
78 tmpdir.mkdir(os.path.join('.git', dir_name))
78 tmpdir.mkdir(os.path.join('.git', dir_name))
79
79
80 app = webtest.TestApp(
80 app = webtest.TestApp(
81 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
82
82
83 # We set service to git-upload-packs to trigger a 403
83 # We set service to git-upload-packs to trigger a 403
84 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 response = app.get('/repo_name/inforefs?service=git-upload-packs',
85 expect_errors=True)
85 expect_errors=True)
86
86
87 assert response.status_int == 403
87 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from vcsserver.server import VcsServer
23 from vcsserver.server import VcsServer
24
24
25
25
26 def test_provides_the_pid(server):
26 def test_provides_the_pid(server):
27 pid = server.get_pid()
27 pid = server.get_pid()
28 assert pid == os.getpid()
28 assert pid == os.getpid()
29
29
30
30
31 def test_allows_to_trigger_the_garbage_collector(server):
31 def test_allows_to_trigger_the_garbage_collector(server):
32 with mock.patch('gc.collect') as collect:
32 with mock.patch('gc.collect') as collect:
33 server.run_gc()
33 server.run_gc()
34 assert collect.called
34 assert collect.called
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def server():
38 def server():
39 return VcsServer()
39 return VcsServer()
@@ -1,155 +1,155 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25 from vcsserver.str_utils import ascii_bytes
25 from vcsserver.str_utils import ascii_bytes
26
26
27
27
28 class FileLikeObj(object): # pragma: no cover
28 class FileLikeObj(object): # pragma: no cover
29
29
30 def __init__(self, data: bytes, size):
30 def __init__(self, data: bytes, size):
31 chunks = size // len(data)
31 chunks = size // len(data)
32
32
33 self.stream = self._get_stream(data, chunks)
33 self.stream = self._get_stream(data, chunks)
34
34
35 def _get_stream(self, data, chunks):
35 def _get_stream(self, data, chunks):
36 for x in range(chunks):
36 for x in range(chunks):
37 yield data
37 yield data
38
38
39 def read(self, n):
39 def read(self, n):
40
40
41 buffer_stream = b''
41 buffer_stream = b''
42 for chunk in self.stream:
42 for chunk in self.stream:
43 buffer_stream += chunk
43 buffer_stream += chunk
44 if len(buffer_stream) >= n:
44 if len(buffer_stream) >= n:
45 break
45 break
46
46
47 # self.stream = self.bytes[n:]
47 # self.stream = self.bytes[n:]
48 return buffer_stream
48 return buffer_stream
49
49
50
50
51 @pytest.fixture(scope='module')
51 @pytest.fixture(scope='module')
52 def environ():
52 def environ():
53 """Delete coverage variables, as they make the tests fail."""
53 """Delete coverage variables, as they make the tests fail."""
54 env = dict(os.environ)
54 env = dict(os.environ)
55 for key in list(env.keys()):
55 for key in list(env.keys()):
56 if key.startswith('COV_CORE_'):
56 if key.startswith('COV_CORE_'):
57 del env[key]
57 del env[key]
58
58
59 return env
59 return env
60
60
61
61
62 def _get_python_args(script):
62 def _get_python_args(script):
63 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
64
64
65
65
66 def test_raise_exception_on_non_zero_return_code(environ):
66 def test_raise_exception_on_non_zero_return_code(environ):
67 call_args = _get_python_args('raise ValueError("fail")')
67 call_args = _get_python_args('raise ValueError("fail")')
68 with pytest.raises(OSError):
68 with pytest.raises(OSError):
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
70
70
71
71
72 def test_does_not_fail_on_non_zero_return_code(environ):
72 def test_does_not_fail_on_non_zero_return_code(environ):
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
75 output = b''.join(proc)
75 output = b''.join(proc)
76
76
77 assert output == b'hello'
77 assert output == b'hello'
78
78
79
79
80 def test_raise_exception_on_stderr(environ):
80 def test_raise_exception_on_stderr(environ):
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
82
82
83 with pytest.raises(OSError) as excinfo:
83 with pytest.raises(OSError) as excinfo:
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
85
85
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
87
87
88
88
89 def test_does_not_fail_on_stderr(environ):
89 def test_does_not_fail_on_stderr(environ):
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
92 output = b''.join(proc)
92 output = b''.join(proc)
93
93
94 assert output == b''
94 assert output == b''
95
95
96
96
97 @pytest.mark.parametrize('size', [
97 @pytest.mark.parametrize('size', [
98 1,
98 1,
99 10 ** 5
99 10 ** 5
100 ])
100 ])
101 def test_output_with_no_input(size, environ):
101 def test_output_with_no_input(size, environ):
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
104 output = b''.join(proc)
104 output = b''.join(proc)
105
105
106 assert output == ascii_bytes("X" * size)
106 assert output == ascii_bytes("X" * size)
107
107
108
108
109 @pytest.mark.parametrize('size', [
109 @pytest.mark.parametrize('size', [
110 1,
110 1,
111 10 ** 5
111 10 ** 5
112 ])
112 ])
113 def test_output_with_no_input_does_not_fail(size, environ):
113 def test_output_with_no_input_does_not_fail(size, environ):
114
114
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
117 output = b''.join(proc)
117 output = b''.join(proc)
118
118
119 assert output == ascii_bytes("X" * size)
119 assert output == ascii_bytes("X" * size)
120
120
121
121
122 @pytest.mark.parametrize('size', [
122 @pytest.mark.parametrize('size', [
123 1,
123 1,
124 10 ** 5
124 10 ** 5
125 ])
125 ])
126 def test_output_with_input(size, environ):
126 def test_output_with_input(size, environ):
127 data_len = size
127 data_len = size
128 inputstream = FileLikeObj(b'X', size)
128 inputstream = FileLikeObj(b'X', size)
129
129
130 # This acts like the cat command.
130 # This acts like the cat command.
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
133 output = b''.join(
133 output = b''.join(
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
135 )
135 )
136
136
137 assert len(output) == data_len
137 assert len(output) == data_len
138
138
139
139
140 @pytest.mark.parametrize('size', [
140 @pytest.mark.parametrize('size', [
141 1,
141 1,
142 10 ** 5
142 10 ** 5
143 ])
143 ])
144 def test_output_with_input_skipping_iterator(size, environ):
144 def test_output_with_input_skipping_iterator(size, environ):
145 data_len = size
145 data_len = size
146 inputstream = FileLikeObj(b'X', size)
146 inputstream = FileLikeObj(b'X', size)
147
147
148 # This acts like the cat command.
148 # This acts like the cat command.
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
150
150
151 # Note: assigning the chunker makes sure that it is not deleted too early
151 # Note: assigning the chunker makes sure that it is not deleted too early
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
153 output = b''.join(proc.stdout)
153 output = b''.join(proc.stdout)
154
154
155 assert len(output) == data_len
155 assert len(output) == data_len
@@ -1,103 +1,103 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23 from vcsserver.str_utils import ascii_bytes
23 from vcsserver.str_utils import ascii_bytes
24
24
25
25
26 class MockPopen(object):
26 class MockPopen(object):
27 def __init__(self, stderr):
27 def __init__(self, stderr):
28 self.stdout = io.BytesIO(b'')
28 self.stdout = io.BytesIO(b'')
29 self.stderr = io.BytesIO(stderr)
29 self.stderr = io.BytesIO(stderr)
30 self.returncode = 1
30 self.returncode = 1
31
31
32 def wait(self):
32 def wait(self):
33 pass
33 pass
34
34
35
35
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
39 ])
39 ])
40
40
41
41
42 @pytest.mark.parametrize('stderr,expected_reason', [
42 @pytest.mark.parametrize('stderr,expected_reason', [
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
46 @pytest.mark.xfail(sys.platform == "cygwin",
46 @pytest.mark.xfail(sys.platform == "cygwin",
47 reason="SVN not packaged for Cygwin")
47 reason="SVN not packaged for Cygwin")
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
49 from vcsserver.remote import svn
49 from vcsserver.remote import svn
50 factory = mock.Mock()
50 factory = mock.Mock()
51 factory.repo = mock.Mock(return_value=mock.Mock())
51 factory.repo = mock.Mock(return_value=mock.Mock())
52
52
53 remote = svn.SvnRemote(factory)
53 remote = svn.SvnRemote(factory)
54 remote.is_path_valid_repository = lambda wire, path: True
54 remote.is_path_valid_repository = lambda wire, path: True
55
55
56 with mock.patch('subprocess.Popen',
56 with mock.patch('subprocess.Popen',
57 return_value=MockPopen(ascii_bytes(stderr))):
57 return_value=MockPopen(ascii_bytes(stderr))):
58 with pytest.raises(Exception) as excinfo:
58 with pytest.raises(Exception) as excinfo:
59 remote.import_remote_repository({'path': 'path'}, 'url')
59 remote.import_remote_repository({'path': 'path'}, 'url')
60
60
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
62
62
63 assert excinfo.value.args[0] == expected_error_args
63 assert excinfo.value.args[0] == expected_error_args
64
64
65
65
66 def test_svn_libraries_can_be_imported():
66 def test_svn_libraries_can_be_imported():
67 import svn.client
67 import svn.client
68 assert svn.client is not None
68 assert svn.client is not None
69
69
70
70
71 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
72 ('http://server.com', ('', '', 'http://server.com')),
72 ('http://server.com', ('', '', 'http://server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 ('<script>', ('', '', '<script>')),
75 ('<script>', ('', '', '<script>')),
76 ('http://', ('', '', 'http://')),
76 ('http://', ('', '', 'http://')),
77 ])
77 ])
78 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver.remote import svn
79 from vcsserver.remote import svn
80
80
81 factory = mock.Mock()
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
83
84 remote = svn.SvnRemote(factory)
84 remote = svn.SvnRemote(factory)
85 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
86
86
87 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
88
88
89
89
90 @pytest.mark.parametrize('call_url', [
90 @pytest.mark.parametrize('call_url', [
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 ])
94 ])
95 def test_check_url(call_url):
95 def test_check_url(call_url):
96 from vcsserver.remote import svn
96 from vcsserver.remote import svn
97 factory = mock.Mock()
97 factory = mock.Mock()
98 factory.repo = mock.Mock(return_value=mock.Mock())
98 factory.repo = mock.Mock(return_value=mock.Mock())
99
99
100 remote = svn.SvnRemote(factory)
100 remote = svn.SvnRemote(factory)
101 remote.is_path_valid_repository = lambda wire, path: True
101 remote.is_path_valid_repository = lambda wire, path: True
102 assert remote.check_url(call_url, {'dummy': 'config'})
102 assert remote.check_url(call_url, {'dummy': 'config'})
103
103
@@ -1,53 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pytest
18 import pytest
19 from vcsserver.str_utils import ascii_bytes, ascii_str
19 from vcsserver.str_utils import ascii_bytes, ascii_str
20
20
21
21
22 @pytest.mark.parametrize('given, expected', [
22 @pytest.mark.parametrize('given, expected', [
23 ('a', b'a'),
23 ('a', b'a'),
24 ('a', b'a'),
24 ('a', b'a'),
25 ])
25 ])
26 def test_ascii_bytes(given, expected):
26 def test_ascii_bytes(given, expected):
27 assert ascii_bytes(given) == expected
27 assert ascii_bytes(given) == expected
28
28
29
29
30 @pytest.mark.parametrize('given', [
30 @pytest.mark.parametrize('given', [
31 'Ã¥',
31 'Ã¥',
32 'Ã¥'.encode('utf8')
32 'Ã¥'.encode('utf8')
33 ])
33 ])
34 def test_ascii_bytes_raises(given):
34 def test_ascii_bytes_raises(given):
35 with pytest.raises(ValueError):
35 with pytest.raises(ValueError):
36 ascii_bytes(given)
36 ascii_bytes(given)
37
37
38
38
39 @pytest.mark.parametrize('given, expected', [
39 @pytest.mark.parametrize('given, expected', [
40 (b'a', 'a'),
40 (b'a', 'a'),
41 ])
41 ])
42 def test_ascii_str(given, expected):
42 def test_ascii_str(given, expected):
43 assert ascii_str(given) == expected
43 assert ascii_str(given) == expected
44
44
45
45
46 @pytest.mark.parametrize('given', [
46 @pytest.mark.parametrize('given', [
47 'a',
47 'a',
48 'Ã¥'.encode('utf8'),
48 'Ã¥'.encode('utf8'),
49 'Ã¥'
49 'Ã¥'
50 ])
50 ])
51 def test_ascii_str_raises(given):
51 def test_ascii_str_raises(given):
52 with pytest.raises(ValueError):
52 with pytest.raises(ValueError):
53 ascii_str(given)
53 ascii_str(given)
@@ -1,98 +1,98 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import wsgiref.simple_server
18 import wsgiref.simple_server
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22 from vcsserver.str_utils import ascii_bytes, safe_str
22 from vcsserver.str_utils import ascii_bytes, safe_str
23
23
24
24
25 @wsgiref.validate.validator
25 @wsgiref.validate.validator
26 def demo_app(environ, start_response):
26 def demo_app(environ, start_response):
27 """WSGI app used for testing."""
27 """WSGI app used for testing."""
28
28
29 input_data = safe_str(environ['wsgi.input'].read(1024))
29 input_data = safe_str(environ['wsgi.input'].read(1024))
30
30
31 data = [
31 data = [
32 'Hello World!\n',
32 'Hello World!\n',
33 f'input_data={input_data}\n',
33 f'input_data={input_data}\n',
34 ]
34 ]
35 for key, value in sorted(environ.items()):
35 for key, value in sorted(environ.items()):
36 data.append(f'{key}={value}\n')
36 data.append(f'{key}={value}\n')
37
37
38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
39 write(b'Old school write method\n')
39 write(b'Old school write method\n')
40 write(b'***********************\n')
40 write(b'***********************\n')
41 return list(map(ascii_bytes, data))
41 return list(map(ascii_bytes, data))
42
42
43
43
44 BASE_ENVIRON = {
44 BASE_ENVIRON = {
45 'REQUEST_METHOD': 'GET',
45 'REQUEST_METHOD': 'GET',
46 'SERVER_NAME': 'localhost',
46 'SERVER_NAME': 'localhost',
47 'SERVER_PORT': '80',
47 'SERVER_PORT': '80',
48 'SCRIPT_NAME': '',
48 'SCRIPT_NAME': '',
49 'PATH_INFO': '/',
49 'PATH_INFO': '/',
50 'QUERY_STRING': '',
50 'QUERY_STRING': '',
51 'foo.var': 'bla',
51 'foo.var': 'bla',
52 }
52 }
53
53
54
54
55 def test_complete_environ():
55 def test_complete_environ():
56 environ = dict(BASE_ENVIRON)
56 environ = dict(BASE_ENVIRON)
57 data = b"data"
57 data = b"data"
58 wsgi_app_caller._complete_environ(environ, data)
58 wsgi_app_caller._complete_environ(environ, data)
59 wsgiref.validate.check_environ(environ)
59 wsgiref.validate.check_environ(environ)
60
60
61 assert data == environ['wsgi.input'].read(1024)
61 assert data == environ['wsgi.input'].read(1024)
62
62
63
63
64 def test_start_response():
64 def test_start_response():
65 start_response = wsgi_app_caller._StartResponse()
65 start_response = wsgi_app_caller._StartResponse()
66 status = '200 OK'
66 status = '200 OK'
67 headers = [('Content-Type', 'text/plain')]
67 headers = [('Content-Type', 'text/plain')]
68 start_response(status, headers)
68 start_response(status, headers)
69
69
70 assert status == start_response.status
70 assert status == start_response.status
71 assert headers == start_response.headers
71 assert headers == start_response.headers
72
72
73
73
74 def test_start_response_with_error():
74 def test_start_response_with_error():
75 start_response = wsgi_app_caller._StartResponse()
75 start_response = wsgi_app_caller._StartResponse()
76 status = '500 Internal Server Error'
76 status = '500 Internal Server Error'
77 headers = [('Content-Type', 'text/plain')]
77 headers = [('Content-Type', 'text/plain')]
78 start_response(status, headers, (None, None, None))
78 start_response(status, headers, (None, None, None))
79
79
80 assert status == start_response.status
80 assert status == start_response.status
81 assert headers == start_response.headers
81 assert headers == start_response.headers
82
82
83
83
84 def test_wsgi_app_caller():
84 def test_wsgi_app_caller():
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87
87
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
89 responses, status, headers = caller.handle(environ, input_data)
89 responses, status, headers = caller.handle(environ, input_data)
90 response = b''.join(responses)
90 response = b''.join(responses)
91
91
92 assert status == '200 OK'
92 assert status == '200 OK'
93 assert headers == [('Content-Type', 'text/plain')]
93 assert headers == [('Content-Type', 'text/plain')]
94 assert response.startswith(b'Old school write method\n***********************\n')
94 assert response.startswith(b'Old school write method\n***********************\n')
95 assert b'Hello World!\n' in response
95 assert b'Hello World!\n' in response
96 assert b'foo.var=bla\n' in response
96 assert b'foo.var=bla\n' in response
97
97
98 assert ascii_bytes(f'input_data={input_data}\n') in response
98 assert ascii_bytes(f'input_data={input_data}\n') in response
@@ -1,17 +1,17 b''
1 # Copyright (C) 2016-2020 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -1,123 +1,123 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import base64
17 import base64
18 import time
18 import time
19 import logging
19 import logging
20
20
21 import msgpack
21 import msgpack
22
22
23 import vcsserver
23 import vcsserver
24 from vcsserver.str_utils import safe_str, ascii_str
24 from vcsserver.str_utils import safe_str, ascii_str
25
25
26 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
27
27
28
28
29 def get_access_path(environ):
29 def get_access_path(environ):
30 path = environ.get('PATH_INFO')
30 path = environ.get('PATH_INFO')
31 return path
31 return path
32
32
33
33
34 def get_user_agent(environ):
34 def get_user_agent(environ):
35 return environ.get('HTTP_USER_AGENT')
35 return environ.get('HTTP_USER_AGENT')
36
36
37
37
38 def get_call_context(request) -> dict:
38 def get_call_context(request) -> dict:
39 cc = {}
39 cc = {}
40 registry = request.registry
40 registry = request.registry
41 if hasattr(registry, 'vcs_call_context'):
41 if hasattr(registry, 'vcs_call_context'):
42 cc.update({
42 cc.update({
43 'X-RC-Method': registry.vcs_call_context.get('method'),
43 'X-RC-Method': registry.vcs_call_context.get('method'),
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
45 })
45 })
46
46
47 return cc
47 return cc
48
48
49
49
50 def get_headers_call_context(environ, strict=True):
50 def get_headers_call_context(environ, strict=True):
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
53 return msgpack.unpackb(packed_cc)
53 return msgpack.unpackb(packed_cc)
54 elif strict:
54 elif strict:
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
56
56
57
57
58 class RequestWrapperTween(object):
58 class RequestWrapperTween(object):
59 def __init__(self, handler, registry):
59 def __init__(self, handler, registry):
60 self.handler = handler
60 self.handler = handler
61 self.registry = registry
61 self.registry = registry
62
62
63 # one-time configuration code goes here
63 # one-time configuration code goes here
64
64
65 def __call__(self, request):
65 def __call__(self, request):
66 start = time.time()
66 start = time.time()
67 log.debug('Starting request time measurement')
67 log.debug('Starting request time measurement')
68 response = None
68 response = None
69
69
70 try:
70 try:
71 response = self.handler(request)
71 response = self.handler(request)
72 finally:
72 finally:
73 ua = get_user_agent(request.environ)
73 ua = get_user_agent(request.environ)
74 call_context = get_call_context(request)
74 call_context = get_call_context(request)
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
77
77
78 count = request.request_count()
78 count = request.request_count()
79 _ver_ = vcsserver.__version__
79 _ver_ = vcsserver.__version__
80 _path = safe_str(get_access_path(request.environ))
80 _path = safe_str(get_access_path(request.environ))
81
81
82 ip = '127.0.0.1'
82 ip = '127.0.0.1'
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
85
85
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
87
87
88 total = time.time() - start
88 total = time.time() - start
89
89
90 log.info(
90 log.info(
91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
92 count, ip, request.environ.get('REQUEST_METHOD'),
92 count, ip, request.environ.get('REQUEST_METHOD'),
93 _view_path, total, ua, _ver_,
93 _view_path, total, ua, _ver_,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
95 "path": _path, "view_name": match_route, "user_agent": ua,
95 "path": _path, "view_name": match_route, "user_agent": ua,
96 "vcs_method": vcs_method, "repo_name": repo_name}
96 "vcs_method": vcs_method, "repo_name": repo_name}
97 )
97 )
98
98
99 statsd = request.registry.statsd
99 statsd = request.registry.statsd
100 if statsd:
100 if statsd:
101 match_route = request.matched_route.name if request.matched_route else _path
101 match_route = request.matched_route.name if request.matched_route else _path
102 elapsed_time_ms = round(1000.0 * total) # use ms only
102 elapsed_time_ms = round(1000.0 * total) # use ms only
103 statsd.timing(
103 statsd.timing(
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
105 tags=[
105 tags=[
106 "view_name:{}".format(match_route),
106 "view_name:{}".format(match_route),
107 "code:{}".format(resp_code)
107 "code:{}".format(resp_code)
108 ],
108 ],
109 use_decimals=False
109 use_decimals=False
110 )
110 )
111 statsd.incr(
111 statsd.incr(
112 "vcsserver_req_total", tags=[
112 "vcsserver_req_total", tags=[
113 "view_name:{}".format(match_route),
113 "view_name:{}".format(match_route),
114 "code:{}".format(resp_code)
114 "code:{}".format(resp_code)
115 ])
115 ])
116
116
117 return response
117 return response
118
118
119
119
120 def includeme(config):
120 def includeme(config):
121 config.add_tween(
121 config.add_tween(
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
123 )
123 )
@@ -1,67 +1,67 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import logging
19 import logging
20
20
21 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
22
22
23
23
24 def str2bool(str_):
24 def str2bool(str_):
25 """
25 """
26 returns True/False value from given string, it tries to translate the
26 returns True/False value from given string, it tries to translate the
27 string into boolean
27 string into boolean
28
28
29 :param str_: string value to translate into boolean
29 :param str_: string value to translate into boolean
30 :rtype: boolean
30 :rtype: boolean
31 :returns: boolean from given string
31 :returns: boolean from given string
32 """
32 """
33 if str_ is None:
33 if str_ is None:
34 return False
34 return False
35 if str_ in (True, False):
35 if str_ in (True, False):
36 return str_
36 return str_
37 str_ = str(str_).strip().lower()
37 str_ = str(str_).strip().lower()
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
39
39
40
40
41 def aslist(obj, sep=None, strip=True) -> list:
41 def aslist(obj, sep=None, strip=True) -> list:
42 """
42 """
43 Returns given string separated by sep as list
43 Returns given string separated by sep as list
44
44
45 :param obj:
45 :param obj:
46 :param sep:
46 :param sep:
47 :param strip:
47 :param strip:
48 """
48 """
49 if isinstance(obj, str):
49 if isinstance(obj, str):
50 if obj in ['', ""]:
50 if obj in ['', ""]:
51 return []
51 return []
52
52
53 lst = obj.split(sep)
53 lst = obj.split(sep)
54 if strip:
54 if strip:
55 lst = [v.strip() for v in lst]
55 lst = [v.strip() for v in lst]
56 return lst
56 return lst
57 elif isinstance(obj, (list, tuple)):
57 elif isinstance(obj, (list, tuple)):
58 return obj
58 return obj
59 elif obj is None:
59 elif obj is None:
60 return []
60 return []
61 else:
61 else:
62 return [obj]
62 return [obj]
63
63
64
64
65 def assert_bytes(input_type, expected_types=(bytes,)):
65 def assert_bytes(input_type, expected_types=(bytes,)):
66 if not isinstance(input_type, expected_types):
66 if not isinstance(input_type, expected_types):
67 raise ValueError(f'input_types should be one of {expected_types} got {type(input_type)} instead')
67 raise ValueError(f'input_types should be one of {expected_types} got {type(input_type)} instead')
@@ -1,54 +1,54 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18 import hashlib
19
19
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22
22
23 class AttributeDictBase(dict):
23 class AttributeDictBase(dict):
24 def __getstate__(self):
24 def __getstate__(self):
25 odict = self.__dict__ # get attribute dictionary
25 odict = self.__dict__ # get attribute dictionary
26 return odict
26 return odict
27
27
28 def __setstate__(self, dict):
28 def __setstate__(self, dict):
29 self.__dict__ = dict
29 self.__dict__ = dict
30
30
31 __setattr__ = dict.__setitem__
31 __setattr__ = dict.__setitem__
32 __delattr__ = dict.__delitem__
32 __delattr__ = dict.__delitem__
33
33
34
34
35 class StrictAttributeDict(AttributeDictBase):
35 class StrictAttributeDict(AttributeDictBase):
36 """
36 """
37 Strict Version of Attribute dict which raises an Attribute error when
37 Strict Version of Attribute dict which raises an Attribute error when
38 requested attribute is not set
38 requested attribute is not set
39 """
39 """
40 def __getattr__(self, attr):
40 def __getattr__(self, attr):
41 try:
41 try:
42 return self[attr]
42 return self[attr]
43 except KeyError:
43 except KeyError:
44 raise AttributeError('{} object has no attribute {}'.format(
44 raise AttributeError('{} object has no attribute {}'.format(
45 self.__class__, attr))
45 self.__class__, attr))
46
46
47
47
48 class AttributeDict(AttributeDictBase):
48 class AttributeDict(AttributeDictBase):
49 def __getattr__(self, attr):
49 def __getattr__(self, attr):
50 return self.get(attr, None)
50 return self.get(attr, None)
51
51
52
52
53 def sha1(val):
53 def sha1(val):
54 return hashlib.sha1(val).hexdigest()
54 return hashlib.sha1(val).hexdigest()
@@ -1,47 +1,47 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18 from vcsserver.lib import rc_cache
19
19
20
20
21 class RemoteBase(object):
21 class RemoteBase(object):
22 EMPTY_COMMIT = '0' * 40
22 EMPTY_COMMIT = '0' * 40
23
23
24 def _region(self, wire):
24 def _region(self, wire):
25 cache_repo_id = wire.get('cache_repo_id', '')
25 cache_repo_id = wire.get('cache_repo_id', '')
26 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
26 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
28
28
29 def _cache_on(self, wire):
29 def _cache_on(self, wire):
30 context = wire.get('context', '')
30 context = wire.get('context', '')
31 context_uid = f'{context}'
31 context_uid = f'{context}'
32 repo_id = wire.get('repo_id', '')
32 repo_id = wire.get('repo_id', '')
33 cache = wire.get('cache', True)
33 cache = wire.get('cache', True)
34 cache_on = context and cache
34 cache_on = context and cache
35 return cache_on, context_uid, repo_id
35 return cache_on, context_uid, repo_id
36
36
37 def vcsserver_invalidate_cache(self, wire, delete):
37 def vcsserver_invalidate_cache(self, wire, delete):
38 from vcsserver.lib import rc_cache
38 from vcsserver.lib import rc_cache
39 repo_id = wire.get('repo_id', '')
39 repo_id = wire.get('repo_id', '')
40 cache_repo_id = wire.get('cache_repo_id', '')
40 cache_repo_id = wire.get('cache_repo_id', '')
41 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
41 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
42
42
43 if delete:
43 if delete:
44 rc_cache.clear_cache_namespace(
44 rc_cache.clear_cache_namespace(
45 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
45 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
46
46
47 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
47 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
@@ -1,116 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Extract the responses of a WSGI app."""
18 """Extract the responses of a WSGI app."""
19
19
20 __all__ = ('WSGIAppCaller',)
20 __all__ = ('WSGIAppCaller',)
21
21
22 import io
22 import io
23 import logging
23 import logging
24 import os
24 import os
25
25
26 from vcsserver.str_utils import ascii_bytes
26 from vcsserver.str_utils import ascii_bytes
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30 DEV_NULL = open(os.devnull)
30 DEV_NULL = open(os.devnull)
31
31
32
32
33 def _complete_environ(environ, input_data: bytes):
33 def _complete_environ(environ, input_data: bytes):
34 """Update the missing wsgi.* variables of a WSGI environment.
34 """Update the missing wsgi.* variables of a WSGI environment.
35
35
36 :param environ: WSGI environment to update
36 :param environ: WSGI environment to update
37 :type environ: dict
37 :type environ: dict
38 :param input_data: data to be read by the app
38 :param input_data: data to be read by the app
39 :type input_data: bytes
39 :type input_data: bytes
40 """
40 """
41 environ.update({
41 environ.update({
42 'wsgi.version': (1, 0),
42 'wsgi.version': (1, 0),
43 'wsgi.url_scheme': 'http',
43 'wsgi.url_scheme': 'http',
44 'wsgi.multithread': True,
44 'wsgi.multithread': True,
45 'wsgi.multiprocess': True,
45 'wsgi.multiprocess': True,
46 'wsgi.run_once': False,
46 'wsgi.run_once': False,
47 'wsgi.input': io.BytesIO(input_data),
47 'wsgi.input': io.BytesIO(input_data),
48 'wsgi.errors': DEV_NULL,
48 'wsgi.errors': DEV_NULL,
49 })
49 })
50
50
51
51
52 # pylint: disable=too-few-public-methods
52 # pylint: disable=too-few-public-methods
53 class _StartResponse(object):
53 class _StartResponse(object):
54 """Save the arguments of a start_response call."""
54 """Save the arguments of a start_response call."""
55
55
56 __slots__ = ['status', 'headers', 'content']
56 __slots__ = ['status', 'headers', 'content']
57
57
58 def __init__(self):
58 def __init__(self):
59 self.status = None
59 self.status = None
60 self.headers = None
60 self.headers = None
61 self.content = []
61 self.content = []
62
62
63 def __call__(self, status, headers, exc_info=None):
63 def __call__(self, status, headers, exc_info=None):
64 # TODO(skreft): do something meaningful with the exc_info
64 # TODO(skreft): do something meaningful with the exc_info
65 exc_info = None # avoid dangling circular reference
65 exc_info = None # avoid dangling circular reference
66 self.status = status
66 self.status = status
67 self.headers = headers
67 self.headers = headers
68
68
69 return self.write
69 return self.write
70
70
71 def write(self, content):
71 def write(self, content):
72 """Write method returning when calling this object.
72 """Write method returning when calling this object.
73
73
74 All the data written is then available in content.
74 All the data written is then available in content.
75 """
75 """
76 self.content.append(content)
76 self.content.append(content)
77
77
78
78
79 class WSGIAppCaller(object):
79 class WSGIAppCaller(object):
80 """Calls a WSGI app."""
80 """Calls a WSGI app."""
81
81
82 def __init__(self, app):
82 def __init__(self, app):
83 """
83 """
84 :param app: WSGI app to call
84 :param app: WSGI app to call
85 """
85 """
86 self.app = app
86 self.app = app
87
87
88 def handle(self, environ, input_data):
88 def handle(self, environ, input_data):
89 """Process a request with the WSGI app.
89 """Process a request with the WSGI app.
90
90
91 The returned data of the app is fully consumed into a list.
91 The returned data of the app is fully consumed into a list.
92
92
93 :param environ: WSGI environment to update
93 :param environ: WSGI environment to update
94 :type environ: dict
94 :type environ: dict
95 :param input_data: data to be read by the app
95 :param input_data: data to be read by the app
96 :type input_data: str/bytes
96 :type input_data: str/bytes
97
97
98 :returns: a tuple with the contents, status and headers
98 :returns: a tuple with the contents, status and headers
99 :rtype: (list<str>, str, list<(str, str)>)
99 :rtype: (list<str>, str, list<(str, str)>)
100 """
100 """
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
102 start_response = _StartResponse()
102 start_response = _StartResponse()
103 log.debug("Calling wrapped WSGI application")
103 log.debug("Calling wrapped WSGI application")
104 responses = self.app(environ, start_response)
104 responses = self.app(environ, start_response)
105 responses_list = list(responses)
105 responses_list = list(responses)
106 existing_responses = start_response.content
106 existing_responses = start_response.content
107 if existing_responses:
107 if existing_responses:
108 log.debug("Adding returned response to response written via write()")
108 log.debug("Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
112 log.debug("Closing iterator from WSGI application")
112 log.debug("Closing iterator from WSGI application")
113 responses.close()
113 responses.close()
114
114
115 log.debug("Handling of WSGI request done, returning response")
115 log.debug("Handling of WSGI request done, returning response")
116 return responses_list, start_response.status, start_response.headers
116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now