##// END OF EJS Templates
core: updated copyrights to 2024
super-admin -
r1327:278da2b3 default
parent child Browse files
Show More
@@ -1,56 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19 import pytest
19 import pytest
20
20
21
21
22 def pytest_addoption(parser):
22 def pytest_addoption(parser):
23 parser.addoption(
23 parser.addoption(
24 '--perf-repeat-vcs', type=int, default=100,
24 '--perf-repeat-vcs', type=int, default=100,
25 help="Number of repetitions in performance tests.")
25 help="Number of repetitions in performance tests.")
26
26
27
27
28 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
29 def repeat(request):
29 def repeat(request):
30 """
30 """
31 The number of repetitions is based on this fixture.
31 The number of repetitions is based on this fixture.
32
32
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 tests are not too slow in our default test suite.
34 tests are not too slow in our default test suite.
35 """
35 """
36 return request.config.getoption('--perf-repeat-vcs')
36 return request.config.getoption('--perf-repeat-vcs')
37
37
38
38
39 @pytest.fixture(scope='session')
39 @pytest.fixture(scope='session')
40 def vcsserver_port(request):
40 def vcsserver_port(request):
41 port = get_available_port()
41 port = get_available_port()
42 print(f'Using vcsserver port {port}')
42 print(f'Using vcsserver port {port}')
43 return port
43 return port
44
44
45
45
46 def get_available_port():
46 def get_available_port():
47 family = socket.AF_INET
47 family = socket.AF_INET
48 socktype = socket.SOCK_STREAM
48 socktype = socket.SOCK_STREAM
49 host = '127.0.0.1'
49 host = '127.0.0.1'
50
50
51 mysocket = socket.socket(family, socktype)
51 mysocket = socket.socket(family, socktype)
52 mysocket.bind((host, 0))
52 mysocket.bind((host, 0))
53 port = mysocket.getsockname()[1]
53 port = mysocket.getsockname()[1]
54 mysocket.close()
54 mysocket.close()
55 del mysocket
55 del mysocket
56 return port
56 return port
@@ -1,70 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 __version__ = ''
20 __version__ = ''
21
21
22
22
23 def get_version():
23 def get_version():
24 global __version__
24 global __version__
25 if __version__:
25 if __version__:
26 return __version__
26 return __version__
27
27
28 here = os.path.abspath(os.path.dirname(__file__))
28 here = os.path.abspath(os.path.dirname(__file__))
29 ver_file = os.path.join(here, "VERSION")
29 ver_file = os.path.join(here, "VERSION")
30 with open(ver_file, "rt") as f:
30 with open(ver_file, "rt") as f:
31 version = f.read().strip()
31 version = f.read().strip()
32
32
33 __version__ = version
33 __version__ = version
34 return version
34 return version
35
35
36 # link to config for pyramid
36 # link to config for pyramid
37 CONFIG = {}
37 CONFIG = {}
38
38
39
39
40 class ConfigGet:
40 class ConfigGet:
41 NotGiven = object()
41 NotGiven = object()
42
42
43 def _get_val_or_missing(self, key, missing):
43 def _get_val_or_missing(self, key, missing):
44 if key not in CONFIG:
44 if key not in CONFIG:
45 if missing == self.NotGiven:
45 if missing == self.NotGiven:
46 return missing
46 return missing
47 # we don't get key, we don't get missing value, return nothing similar as config.get(key)
47 # we don't get key, we don't get missing value, return nothing similar as config.get(key)
48 return None
48 return None
49 else:
49 else:
50 val = CONFIG[key]
50 val = CONFIG[key]
51 return val
51 return val
52
52
53 def get_str(self, key, missing=NotGiven):
53 def get_str(self, key, missing=NotGiven):
54 from vcsserver.lib.str_utils import safe_str
54 from vcsserver.lib.str_utils import safe_str
55 val = self._get_val_or_missing(key, missing)
55 val = self._get_val_or_missing(key, missing)
56 return safe_str(val)
56 return safe_str(val)
57
57
58 def get_int(self, key, missing=NotGiven):
58 def get_int(self, key, missing=NotGiven):
59 from vcsserver.lib.str_utils import safe_int
59 from vcsserver.lib.str_utils import safe_int
60 val = self._get_val_or_missing(key, missing)
60 val = self._get_val_or_missing(key, missing)
61 return safe_int(val)
61 return safe_int(val)
62
62
63 def get_bool(self, key, missing=NotGiven):
63 def get_bool(self, key, missing=NotGiven):
64 from vcsserver.lib.type_utils import str2bool
64 from vcsserver.lib.type_utils import str2bool
65 val = self._get_val_or_missing(key, missing)
65 val = self._get_val_or_missing(key, missing)
66 return str2bool(val)
66 return str2bool(val)
67
67
68 # Populated with the settings dictionary from application init in
68 # Populated with the settings dictionary from application init in
69 #
69 #
70 PYRAMID_SETTINGS = {}
70 PYRAMID_SETTINGS = {}
@@ -1,187 +1,187 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import logging
20 import logging
21 import urllib.parse
21 import urllib.parse
22
22
23 from vcsserver.lib.archive_cache import get_archival_cache_store
23 from vcsserver.lib.archive_cache import get_archival_cache_store
24
24
25 from vcsserver import exceptions
25 from vcsserver import exceptions
26 from vcsserver.exceptions import NoContentException
26 from vcsserver.exceptions import NoContentException
27 from vcsserver.hgcompat import archival
27 from vcsserver.hgcompat import archival
28 from vcsserver.lib.str_utils import safe_bytes
28 from vcsserver.lib.str_utils import safe_bytes
29 from vcsserver.lib.exc_tracking import format_exc
29 from vcsserver.lib.exc_tracking import format_exc
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class RepoFactory:
33 class RepoFactory:
34 """
34 """
35 Utility to create instances of repository
35 Utility to create instances of repository
36
36
37 It provides internal caching of the `repo` object based on
37 It provides internal caching of the `repo` object based on
38 the :term:`call context`.
38 the :term:`call context`.
39 """
39 """
40 repo_type = None
40 repo_type = None
41
41
42 def __init__(self):
42 def __init__(self):
43 pass
43 pass
44
44
45 def _create_config(self, path, config):
45 def _create_config(self, path, config):
46 config = {}
46 config = {}
47 return config
47 return config
48
48
49 def _create_repo(self, wire, create):
49 def _create_repo(self, wire, create):
50 raise NotImplementedError()
50 raise NotImplementedError()
51
51
52 def repo(self, wire, create=False):
52 def repo(self, wire, create=False):
53 raise NotImplementedError()
53 raise NotImplementedError()
54
54
55
55
56 def obfuscate_qs(query_string):
56 def obfuscate_qs(query_string):
57 if query_string is None:
57 if query_string is None:
58 return None
58 return None
59
59
60 parsed = []
60 parsed = []
61 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
61 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
62 if k in ['auth_token', 'api_key']:
62 if k in ['auth_token', 'api_key']:
63 v = "*****"
63 v = "*****"
64 parsed.append((k, v))
64 parsed.append((k, v))
65
65
66 return '&'.join('{}{}'.format(
66 return '&'.join('{}{}'.format(
67 k, f'={v}' if v else '') for k, v in parsed)
67 k, f'={v}' if v else '') for k, v in parsed)
68
68
69
69
70 def raise_from_original(new_type, org_exc: Exception):
70 def raise_from_original(new_type, org_exc: Exception):
71 """
71 """
72 Raise a new exception type with original args and traceback.
72 Raise a new exception type with original args and traceback.
73 """
73 """
74 exc_info = sys.exc_info()
74 exc_info = sys.exc_info()
75 exc_type, exc_value, exc_traceback = exc_info
75 exc_type, exc_value, exc_traceback = exc_info
76 new_exc = new_type(*exc_value.args)
76 new_exc = new_type(*exc_value.args)
77
77
78 # store the original traceback into the new exc
78 # store the original traceback into the new exc
79 new_exc._org_exc_tb = format_exc(exc_info)
79 new_exc._org_exc_tb = format_exc(exc_info)
80
80
81 try:
81 try:
82 raise new_exc.with_traceback(exc_traceback)
82 raise new_exc.with_traceback(exc_traceback)
83 finally:
83 finally:
84 del exc_traceback
84 del exc_traceback
85
85
86
86
87 class ArchiveNode:
87 class ArchiveNode:
88 def __init__(self, path, mode, is_link, raw_bytes):
88 def __init__(self, path, mode, is_link, raw_bytes):
89 self.path = path
89 self.path = path
90 self.mode = mode
90 self.mode = mode
91 self.is_link = is_link
91 self.is_link = is_link
92 self.raw_bytes = raw_bytes
92 self.raw_bytes = raw_bytes
93
93
94
94
95 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
95 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
96 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
96 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
97 """
97 """
98 Function that would store generate archive and send it to a dedicated backend store
98 Function that would store generate archive and send it to a dedicated backend store
99 In here we use diskcache
99 In here we use diskcache
100
100
101 :param node_walker: a generator returning nodes to add to archive
101 :param node_walker: a generator returning nodes to add to archive
102 :param archive_key: key used to store the path
102 :param archive_key: key used to store the path
103 :param kind: archive kind
103 :param kind: archive kind
104 :param mtime: time of creation
104 :param mtime: time of creation
105 :param archive_at_path: default '/' the path at archive was started.
105 :param archive_at_path: default '/' the path at archive was started.
106 If this is not '/' it means it's a partial archive
106 If this is not '/' it means it's a partial archive
107 :param archive_dir_name: inside dir name when creating an archive
107 :param archive_dir_name: inside dir name when creating an archive
108 :param commit_id: commit sha of revision archive was created at
108 :param commit_id: commit sha of revision archive was created at
109 :param write_metadata:
109 :param write_metadata:
110 :param extra_metadata:
110 :param extra_metadata:
111 :param cache_config:
111 :param cache_config:
112
112
113 walker should be a file walker, for example,
113 walker should be a file walker, for example,
114 def node_walker():
114 def node_walker():
115 for file_info in files:
115 for file_info in files:
116 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
116 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
117 """
117 """
118 extra_metadata = extra_metadata or {}
118 extra_metadata = extra_metadata or {}
119
119
120 d_cache = get_archival_cache_store(config=cache_config)
120 d_cache = get_archival_cache_store(config=cache_config)
121
121
122 if archive_key in d_cache:
122 if archive_key in d_cache:
123 reader, metadata = d_cache.fetch(archive_key)
123 reader, metadata = d_cache.fetch(archive_key)
124 return reader.name
124 return reader.name
125
125
126 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
126 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
127 log.debug('Creating new temp archive in %s', archive_tmp_path)
127 log.debug('Creating new temp archive in %s', archive_tmp_path)
128
128
129 if kind == "tgz":
129 if kind == "tgz":
130 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
130 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
131 elif kind == "tbz2":
131 elif kind == "tbz2":
132 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
132 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
133 elif kind == 'zip':
133 elif kind == 'zip':
134 archiver = archival.zipit(archive_tmp_path, mtime)
134 archiver = archival.zipit(archive_tmp_path, mtime)
135 else:
135 else:
136 raise exceptions.ArchiveException()(
136 raise exceptions.ArchiveException()(
137 f'Remote does not support: "{kind}" archive type.')
137 f'Remote does not support: "{kind}" archive type.')
138
138
139 for f in node_walker(commit_id, archive_at_path):
139 for f in node_walker(commit_id, archive_at_path):
140 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
140 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
141
141
142 try:
142 try:
143 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
143 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
144 except NoContentException:
144 except NoContentException:
145 # NOTE(marcink): this is a special case for SVN so we can create "empty"
145 # NOTE(marcink): this is a special case for SVN so we can create "empty"
146 # directories which are not supported by archiver
146 # directories which are not supported by archiver
147 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
147 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
148
148
149 metadata = dict([
149 metadata = dict([
150 ('commit_id', commit_id),
150 ('commit_id', commit_id),
151 ('mtime', mtime),
151 ('mtime', mtime),
152 ])
152 ])
153 metadata.update(extra_metadata)
153 metadata.update(extra_metadata)
154 if write_metadata:
154 if write_metadata:
155 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
155 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
156 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
156 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
157 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
157 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
158
158
159 archiver.done()
159 archiver.done()
160
160
161 with open(archive_tmp_path, 'rb') as archive_file:
161 with open(archive_tmp_path, 'rb') as archive_file:
162 add_result = d_cache.store(archive_key, archive_file, metadata=metadata)
162 add_result = d_cache.store(archive_key, archive_file, metadata=metadata)
163 if not add_result:
163 if not add_result:
164 log.error('Failed to store cache for key=%s', archive_key)
164 log.error('Failed to store cache for key=%s', archive_key)
165
165
166 os.remove(archive_tmp_path)
166 os.remove(archive_tmp_path)
167
167
168 reader, metadata = d_cache.fetch(archive_key)
168 reader, metadata = d_cache.fetch(archive_key)
169
169
170 return reader.name
170 return reader.name
171
171
172
172
173 class BinaryEnvelope:
173 class BinaryEnvelope:
174 def __init__(self, val):
174 def __init__(self, val):
175 self.val = val
175 self.val = val
176
176
177
177
178 class BytesEnvelope(bytes):
178 class BytesEnvelope(bytes):
179 def __new__(cls, content):
179 def __new__(cls, content):
180 if isinstance(content, bytes):
180 if isinstance(content, bytes):
181 return super().__new__(cls, content)
181 return super().__new__(cls, content)
182 else:
182 else:
183 raise TypeError('BytesEnvelope content= param must be bytes. Use BinaryEnvelope to wrap other types')
183 raise TypeError('BytesEnvelope content= param must be bytes. Use BinaryEnvelope to wrap other types')
184
184
185
185
186 class BinaryBytesEnvelope(BytesEnvelope):
186 class BinaryBytesEnvelope(BytesEnvelope):
187 pass
187 pass
@@ -1,1 +1,1 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
@@ -1,27 +1,27 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 HOOK_REPO_SIZE = 'changegroup.repo_size'
19 HOOK_REPO_SIZE = 'changegroup.repo_size'
20
20
21 # HG
21 # HG
22 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
22 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
23 HOOK_PULL = 'outgoing.pull_logger'
23 HOOK_PULL = 'outgoing.pull_logger'
24 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
24 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
25 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
25 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
26 HOOK_PUSH = 'changegroup.push_logger'
26 HOOK_PUSH = 'changegroup.push_logger'
27 HOOK_PUSH_KEY = 'pushkey.key_push'
27 HOOK_PUSH_KEY = 'pushkey.key_push'
@@ -1,185 +1,185 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import textwrap
20 import textwrap
21 import string
21 import string
22 import functools
22 import functools
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import logging.config
25 import logging.config
26
26
27 from vcsserver.lib.type_utils import str2bool, aslist
27 from vcsserver.lib.type_utils import str2bool, aslist
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 # skip keys, that are set here, so we don't double process those
32 # skip keys, that are set here, so we don't double process those
33 set_keys = {
33 set_keys = {
34 '__file__': ''
34 '__file__': ''
35 }
35 }
36
36
37
37
38 class SettingsMaker:
38 class SettingsMaker:
39
39
40 def __init__(self, app_settings):
40 def __init__(self, app_settings):
41 self.settings = app_settings
41 self.settings = app_settings
42
42
43 @classmethod
43 @classmethod
44 def _bool_func(cls, input_val):
44 def _bool_func(cls, input_val):
45 if isinstance(input_val, bytes):
45 if isinstance(input_val, bytes):
46 # decode to str
46 # decode to str
47 input_val = input_val.decode('utf8')
47 input_val = input_val.decode('utf8')
48 return str2bool(input_val)
48 return str2bool(input_val)
49
49
50 @classmethod
50 @classmethod
51 def _int_func(cls, input_val):
51 def _int_func(cls, input_val):
52 return int(input_val)
52 return int(input_val)
53
53
54 @classmethod
54 @classmethod
55 def _float_func(cls, input_val):
55 def _float_func(cls, input_val):
56 return float(input_val)
56 return float(input_val)
57
57
58 @classmethod
58 @classmethod
59 def _list_func(cls, input_val, sep=','):
59 def _list_func(cls, input_val, sep=','):
60 return aslist(input_val, sep=sep)
60 return aslist(input_val, sep=sep)
61
61
62 @classmethod
62 @classmethod
63 def _string_func(cls, input_val, lower=True):
63 def _string_func(cls, input_val, lower=True):
64 if lower:
64 if lower:
65 input_val = input_val.lower()
65 input_val = input_val.lower()
66 return input_val
66 return input_val
67
67
68 @classmethod
68 @classmethod
69 def _string_no_quote_func(cls, input_val, lower=True):
69 def _string_no_quote_func(cls, input_val, lower=True):
70 """
70 """
71 Special case string function that detects if value is set to empty quote string
71 Special case string function that detects if value is set to empty quote string
72 e.g.
72 e.g.
73
73
74 core.binary_dir = ""
74 core.binary_dir = ""
75 """
75 """
76
76
77 input_val = cls._string_func(input_val, lower=lower)
77 input_val = cls._string_func(input_val, lower=lower)
78 if input_val in ['""', "''"]:
78 if input_val in ['""', "''"]:
79 return ''
79 return ''
80 return input_val
80 return input_val
81
81
82 @classmethod
82 @classmethod
83 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
83 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
84
84
85 # ensure we have our dir created
85 # ensure we have our dir created
86 if not os.path.isdir(input_val) and ensure_dir:
86 if not os.path.isdir(input_val) and ensure_dir:
87 os.makedirs(input_val, mode=mode, exist_ok=True)
87 os.makedirs(input_val, mode=mode, exist_ok=True)
88
88
89 if not os.path.isdir(input_val):
89 if not os.path.isdir(input_val):
90 raise Exception(f'Dir at {input_val} does not exist')
90 raise Exception(f'Dir at {input_val} does not exist')
91 return input_val
91 return input_val
92
92
93 @classmethod
93 @classmethod
94 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
94 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
95 dirname = os.path.dirname(input_val)
95 dirname = os.path.dirname(input_val)
96 cls._dir_func(dirname, ensure_dir=ensure_dir)
96 cls._dir_func(dirname, ensure_dir=ensure_dir)
97 return input_val
97 return input_val
98
98
99 @classmethod
99 @classmethod
100 def _key_transformator(cls, key):
100 def _key_transformator(cls, key):
101 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
101 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
102
102
103 def maybe_env_key(self, key):
103 def maybe_env_key(self, key):
104 # now maybe we have this KEY in env, search and use the value with higher priority.
104 # now maybe we have this KEY in env, search and use the value with higher priority.
105 transformed_key = self._key_transformator(key)
105 transformed_key = self._key_transformator(key)
106 envvar_value = os.environ.get(transformed_key)
106 envvar_value = os.environ.get(transformed_key)
107 if envvar_value:
107 if envvar_value:
108 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
108 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
109
109
110 return envvar_value
110 return envvar_value
111
111
112 def env_expand(self):
112 def env_expand(self):
113 replaced = {}
113 replaced = {}
114 for k, v in self.settings.items():
114 for k, v in self.settings.items():
115 if k not in set_keys:
115 if k not in set_keys:
116 envvar_value = self.maybe_env_key(k)
116 envvar_value = self.maybe_env_key(k)
117 if envvar_value:
117 if envvar_value:
118 replaced[k] = envvar_value
118 replaced[k] = envvar_value
119 set_keys[k] = envvar_value
119 set_keys[k] = envvar_value
120
120
121 # replace ALL keys updated
121 # replace ALL keys updated
122 self.settings.update(replaced)
122 self.settings.update(replaced)
123
123
124 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
124 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
125 """
125 """
126 Helper to enable debug on running instance
126 Helper to enable debug on running instance
127 :return:
127 :return:
128 """
128 """
129
129
130 if not str2bool(self.settings.get('logging.autoconfigure')):
130 if not str2bool(self.settings.get('logging.autoconfigure')):
131 log.info('logging configuration based on main .ini file')
131 log.info('logging configuration based on main .ini file')
132 return
132 return
133
133
134 if logging_conf is None:
134 if logging_conf is None:
135 logging_conf = self.settings.get('logging.logging_conf_file') or ''
135 logging_conf = self.settings.get('logging.logging_conf_file') or ''
136
136
137 if not os.path.isfile(logging_conf):
137 if not os.path.isfile(logging_conf):
138 log.error('Unable to setup logging based on %s, '
138 log.error('Unable to setup logging based on %s, '
139 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
139 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
140 return
140 return
141
141
142 with open(logging_conf, 'rt') as f:
142 with open(logging_conf, 'rt') as f:
143 ini_template = textwrap.dedent(f.read())
143 ini_template = textwrap.dedent(f.read())
144 ini_template = string.Template(ini_template).safe_substitute(
144 ini_template = string.Template(ini_template).safe_substitute(
145 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
145 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
146 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
146 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
147 )
147 )
148
148
149 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
149 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
150 log.info('Saved Temporary LOGGING config at %s', f.name)
150 log.info('Saved Temporary LOGGING config at %s', f.name)
151 f.write(ini_template)
151 f.write(ini_template)
152
152
153 logging.config.fileConfig(f.name)
153 logging.config.fileConfig(f.name)
154 os.remove(f.name)
154 os.remove(f.name)
155
155
156 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
156 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
157 input_val = self.settings.get(key, default)
157 input_val = self.settings.get(key, default)
158
158
159 if default_when_empty and not input_val:
159 if default_when_empty and not input_val:
160 # use default value when value is set in the config but it is empty
160 # use default value when value is set in the config but it is empty
161 input_val = default
161 input_val = default
162
162
163 parser_func = {
163 parser_func = {
164 'bool': self._bool_func,
164 'bool': self._bool_func,
165 'int': self._int_func,
165 'int': self._int_func,
166 'float': self._float_func,
166 'float': self._float_func,
167 'list': self._list_func,
167 'list': self._list_func,
168 'list:newline': functools.partial(self._list_func, sep='/n'),
168 'list:newline': functools.partial(self._list_func, sep='/n'),
169 'list:spacesep': functools.partial(self._list_func, sep=' '),
169 'list:spacesep': functools.partial(self._list_func, sep=' '),
170 'string': functools.partial(self._string_func, lower=lower),
170 'string': functools.partial(self._string_func, lower=lower),
171 'string:noquote': functools.partial(self._string_no_quote_func, lower=lower),
171 'string:noquote': functools.partial(self._string_no_quote_func, lower=lower),
172 'dir': self._dir_func,
172 'dir': self._dir_func,
173 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
173 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
174 'file': self._file_path_func,
174 'file': self._file_path_func,
175 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
175 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
176 None: lambda i: i
176 None: lambda i: i
177 }[parser]
177 }[parser]
178
178
179 envvar_value = self.maybe_env_key(key)
179 envvar_value = self.maybe_env_key(key)
180 if envvar_value:
180 if envvar_value:
181 input_val = envvar_value
181 input_val = envvar_value
182 set_keys[key] = input_val
182 set_keys[key] = input_val
183
183
184 self.settings[key] = parser_func(input_val)
184 self.settings[key] = parser_func(input_val)
185 return self.settings[key]
185 return self.settings[key]
@@ -1,10 +1,10 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2
2
3 """
3 """
4 Provides a stub implementation for VCS operations.
4 Provides a stub implementation for VCS operations.
5
5
6 Intended usage is to help in performance measurements. The basic idea is to
6 Intended usage is to help in performance measurements. The basic idea is to
7 implement an `EchoApp` which sends back what it gets. Based on a configuration
7 implement an `EchoApp` which sends back what it gets. Based on a configuration
8 parameter this app can be activated, so that it replaced the endpoints for Git
8 parameter this app can be activated, so that it replaced the endpoints for Git
9 and Mercurial.
9 and Mercurial.
10 """
10 """
@@ -1,56 +1,56 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2
2
3 """
3 """
4 Implementation of :class:`EchoApp`.
4 Implementation of :class:`EchoApp`.
5
5
6 This WSGI application will just echo back the data which it recieves.
6 This WSGI application will just echo back the data which it recieves.
7 """
7 """
8
8
9 import logging
9 import logging
10
10
11
11
12 log = logging.getLogger(__name__)
12 log = logging.getLogger(__name__)
13
13
14
14
15 class EchoApp:
15 class EchoApp:
16
16
17 def __init__(self, repo_path, repo_name, config):
17 def __init__(self, repo_path, repo_name, config):
18 self._repo_path = repo_path
18 self._repo_path = repo_path
19 log.info("EchoApp initialized for %s", repo_path)
19 log.info("EchoApp initialized for %s", repo_path)
20
20
21 def __call__(self, environ, start_response):
21 def __call__(self, environ, start_response):
22 log.debug("EchoApp called for %s", self._repo_path)
22 log.debug("EchoApp called for %s", self._repo_path)
23 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
23 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
24 environ['wsgi.input'].read()
24 environ['wsgi.input'].read()
25 status = '200 OK'
25 status = '200 OK'
26 headers = [('Content-Type', 'text/plain')]
26 headers = [('Content-Type', 'text/plain')]
27 start_response(status, headers)
27 start_response(status, headers)
28 return [b"ECHO"]
28 return [b"ECHO"]
29
29
30
30
31 class EchoAppStream:
31 class EchoAppStream:
32
32
33 def __init__(self, repo_path, repo_name, config):
33 def __init__(self, repo_path, repo_name, config):
34 self._repo_path = repo_path
34 self._repo_path = repo_path
35 log.info("EchoApp initialized for %s", repo_path)
35 log.info("EchoApp initialized for %s", repo_path)
36
36
37 def __call__(self, environ, start_response):
37 def __call__(self, environ, start_response):
38 log.debug("EchoApp called for %s", self._repo_path)
38 log.debug("EchoApp called for %s", self._repo_path)
39 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
39 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
40 environ['wsgi.input'].read()
40 environ['wsgi.input'].read()
41 status = '200 OK'
41 status = '200 OK'
42 headers = [('Content-Type', 'text/plain')]
42 headers = [('Content-Type', 'text/plain')]
43 start_response(status, headers)
43 start_response(status, headers)
44
44
45 def generator():
45 def generator():
46 for _ in range(1000000):
46 for _ in range(1000000):
47 yield b"ECHO_STREAM"
47 yield b"ECHO_STREAM"
48 return generator()
48 return generator()
49
49
50
50
51 def create_app():
51 def create_app():
52 """
52 """
53 Allows to run this app directly in a WSGI server.
53 Allows to run this app directly in a WSGI server.
54 """
54 """
55 stub_config = {}
55 stub_config = {}
56 return EchoApp('stub_path', 'stub_name', stub_config)
56 return EchoApp('stub_path', 'stub_name', stub_config)
@@ -1,47 +1,47 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2
2
3 """
3 """
4 Provides the same API as :mod:`remote_wsgi`.
4 Provides the same API as :mod:`remote_wsgi`.
5
5
6 Uses the `EchoApp` instead of real implementations.
6 Uses the `EchoApp` instead of real implementations.
7 """
7 """
8
8
9 import logging
9 import logging
10
10
11 from .echo_app import EchoApp
11 from .echo_app import EchoApp
12 from vcsserver import wsgi_app_caller
12 from vcsserver import wsgi_app_caller
13
13
14
14
15 log = logging.getLogger(__name__)
15 log = logging.getLogger(__name__)
16
16
17
17
18 class GitRemoteWsgi:
18 class GitRemoteWsgi:
19 def handle(self, environ, input_data, *args, **kwargs):
19 def handle(self, environ, input_data, *args, **kwargs):
20 app = wsgi_app_caller.WSGIAppCaller(
20 app = wsgi_app_caller.WSGIAppCaller(
21 create_echo_wsgi_app(*args, **kwargs))
21 create_echo_wsgi_app(*args, **kwargs))
22
22
23 return app.handle(environ, input_data)
23 return app.handle(environ, input_data)
24
24
25
25
26 class HgRemoteWsgi:
26 class HgRemoteWsgi:
27 def handle(self, environ, input_data, *args, **kwargs):
27 def handle(self, environ, input_data, *args, **kwargs):
28 app = wsgi_app_caller.WSGIAppCaller(
28 app = wsgi_app_caller.WSGIAppCaller(
29 create_echo_wsgi_app(*args, **kwargs))
29 create_echo_wsgi_app(*args, **kwargs))
30
30
31 return app.handle(environ, input_data)
31 return app.handle(environ, input_data)
32
32
33
33
34 def create_echo_wsgi_app(repo_path, repo_name, config):
34 def create_echo_wsgi_app(repo_path, repo_name, config):
35 log.debug("Creating EchoApp WSGI application")
35 log.debug("Creating EchoApp WSGI application")
36
36
37 _assert_valid_config(config)
37 _assert_valid_config(config)
38
38
39 # Remaining items are forwarded to have the extras available
39 # Remaining items are forwarded to have the extras available
40 return EchoApp(repo_path, repo_name, config=config)
40 return EchoApp(repo_path, repo_name, config=config)
41
41
42
42
43 def _assert_valid_config(config):
43 def _assert_valid_config(config):
44 config = config.copy()
44 config = config.copy()
45
45
46 # This is what git needs from config at this stage
46 # This is what git needs from config at this stage
47 config.pop('git_update_server_info')
47 config.pop('git_update_server_info')
@@ -1,138 +1,138 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28
28
29
29
30 def _make_exception(kind, org_exc, *args):
30 def _make_exception(kind, org_exc, *args):
31 """
31 """
32 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
33
33
34 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
36 """
36 """
37 exc = Exception(*args)
37 exc = Exception(*args)
38 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
39 exc._org_exc = org_exc
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 return exc
41 return exc
42
42
43
43
44 def AbortException(org_exc=None):
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
47 return _make_exception_wrapper
48
48
49
49
50 def ArchiveException(org_exc=None):
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
53 return _make_exception_wrapper
54
54
55
55
56 def LookupException(org_exc=None):
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
59 return _make_exception_wrapper
60
60
61
61
62 def VcsException(org_exc=None):
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
65 return _make_exception_wrapper
66
66
67
67
68 def LockedRepoException(org_exc=None):
68 def LockedRepoException(org_exc=None):
69 def _make_exception_wrapper(*args):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
71 return _make_exception_wrapper
72
72
73
73
74 def RepositoryBranchProtectedException(org_exc=None):
74 def RepositoryBranchProtectedException(org_exc=None):
75 def _make_exception_wrapper(*args):
75 def _make_exception_wrapper(*args):
76 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception('repo_branch_protected', org_exc, *args)
77 return _make_exception_wrapper
77 return _make_exception_wrapper
78
78
79 def ClientNotSupportedException(org_exc=None):
79 def ClientNotSupportedException(org_exc=None):
80 def _make_exception_wrapper(*args):
80 def _make_exception_wrapper(*args):
81 return _make_exception('client_not_supported', org_exc, *args)
81 return _make_exception('client_not_supported', org_exc, *args)
82 return _make_exception_wrapper
82 return _make_exception_wrapper
83
83
84 def RequirementException(org_exc=None):
84 def RequirementException(org_exc=None):
85 def _make_exception_wrapper(*args):
85 def _make_exception_wrapper(*args):
86 return _make_exception('requirement', org_exc, *args)
86 return _make_exception('requirement', org_exc, *args)
87 return _make_exception_wrapper
87 return _make_exception_wrapper
88
88
89
89
90 def UnhandledException(org_exc=None):
90 def UnhandledException(org_exc=None):
91 def _make_exception_wrapper(*args):
91 def _make_exception_wrapper(*args):
92 return _make_exception('unhandled', org_exc, *args)
92 return _make_exception('unhandled', org_exc, *args)
93 return _make_exception_wrapper
93 return _make_exception_wrapper
94
94
95
95
96 def URLError(org_exc=None):
96 def URLError(org_exc=None):
97 def _make_exception_wrapper(*args):
97 def _make_exception_wrapper(*args):
98 return _make_exception('url_error', org_exc, *args)
98 return _make_exception('url_error', org_exc, *args)
99 return _make_exception_wrapper
99 return _make_exception_wrapper
100
100
101
101
102 def SubrepoMergeException(org_exc=None):
102 def SubrepoMergeException(org_exc=None):
103 def _make_exception_wrapper(*args):
103 def _make_exception_wrapper(*args):
104 return _make_exception('subrepo_merge_error', org_exc, *args)
104 return _make_exception('subrepo_merge_error', org_exc, *args)
105 return _make_exception_wrapper
105 return _make_exception_wrapper
106
106
107
107
108 class HTTPRepoLocked(HTTPLocked):
108 class HTTPRepoLocked(HTTPLocked):
109 """
109 """
110 Subclass of HTTPLocked response that allows to set the title and status
110 Subclass of HTTPLocked response that allows to set the title and status
111 code via constructor arguments.
111 code via constructor arguments.
112 """
112 """
113 def __init__(self, title, status_code=None, **kwargs):
113 def __init__(self, title, status_code=None, **kwargs):
114 self.code = status_code or HTTPLocked.code
114 self.code = status_code or HTTPLocked.code
115 self.title = title
115 self.title = title
116 super().__init__(**kwargs)
116 super().__init__(**kwargs)
117
117
118
118
119 class HTTPRepoBranchProtected(HTTPLocked):
119 class HTTPRepoBranchProtected(HTTPLocked):
120 def __init__(self, title, status_code=None, **kwargs):
120 def __init__(self, title, status_code=None, **kwargs):
121 self.code = status_code or HTTPLocked.code
121 self.code = status_code or HTTPLocked.code
122 self.title = title
122 self.title = title
123 super().__init__(**kwargs)
123 super().__init__(**kwargs)
124
124
125
125
126 class HTTPClientNotSupported(HTTPLocked):
126 class HTTPClientNotSupported(HTTPLocked):
127 def __init__(self, title, status_code=None, **kwargs):
127 def __init__(self, title, status_code=None, **kwargs):
128 self.code = status_code or HTTPLocked.code
128 self.code = status_code or HTTPLocked.code
129 self.title = title
129 self.title = title
130 super().__init__(**kwargs)
130 super().__init__(**kwargs)
131
131
132
132
133 class RefNotFoundException(KeyError):
133 class RefNotFoundException(KeyError):
134 pass
134 pass
135
135
136
136
137 class NoContentException(ValueError):
137 class NoContentException(ValueError):
138 pass
138 pass
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from .app import create_app # noqa
19 from .app import create_app # noqa
@@ -1,314 +1,314 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import hashlib
17 import hashlib
18 import re
18 import re
19 import logging
19 import logging
20
20
21 from gunicorn.http.errors import NoMoreData
21 from gunicorn.http.errors import NoMoreData
22 from pyramid.config import Configurator
22 from pyramid.config import Configurator
23 from pyramid.response import Response, FileIter
23 from pyramid.response import Response, FileIter
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPUnprocessableEntity)
26 HTTPUnprocessableEntity)
27
27
28 from vcsserver.lib.ext_json import json
28 from vcsserver.lib.ext_json import json
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.lib.str_utils import safe_int
31 from vcsserver.lib.str_utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired:
51 class AuthHeaderRequired:
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 log.debug('No auth header found, returning 403')
63 log.debug('No auth header found, returning 403')
64 return write_response_error(HTTPForbidden)
64 return write_response_error(HTTPForbidden)
65 return func(*fargs[1:], **fkwargs)
65 return func(*fargs[1:], **fkwargs)
66
66
67
67
68 # views
68 # views
69
69
70 def lfs_objects(request):
70 def lfs_objects(request):
71 # indicate not supported, V1 API
71 # indicate not supported, V1 API
72 log.warning('LFS: v1 api not supported, reporting it back to client')
72 log.warning('LFS: v1 api not supported, reporting it back to client')
73 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
74
74
75
75
76 @AuthHeaderRequired()
76 @AuthHeaderRequired()
77 def lfs_objects_batch(request):
77 def lfs_objects_batch(request):
78 """
78 """
79 The client sends the following information to the Batch endpoint to transfer some objects:
79 The client sends the following information to the Batch endpoint to transfer some objects:
80
80
81 operation - Should be download or upload.
81 operation - Should be download or upload.
82 transfers - An optional Array of String identifiers for transfer
82 transfers - An optional Array of String identifiers for transfer
83 adapters that the client has configured. If omitted, the basic
83 adapters that the client has configured. If omitted, the basic
84 transfer adapter MUST be assumed by the server.
84 transfer adapter MUST be assumed by the server.
85 objects - An Array of objects to download.
85 objects - An Array of objects to download.
86 oid - String OID of the LFS object.
86 oid - String OID of the LFS object.
87 size - Integer byte size of the LFS object. Must be at least zero.
87 size - Integer byte size of the LFS object. Must be at least zero.
88 """
88 """
89 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
90 auth = request.authorization
90 auth = request.authorization
91 repo = request.matchdict.get('repo')
91 repo = request.matchdict.get('repo')
92 data = request.json
92 data = request.json
93 operation = data.get('operation')
93 operation = data.get('operation')
94 http_scheme = request.registry.git_lfs_http_scheme
94 http_scheme = request.registry.git_lfs_http_scheme
95
95
96 if operation not in ('download', 'upload'):
96 if operation not in ('download', 'upload'):
97 log.debug('LFS: unsupported operation:%s', operation)
97 log.debug('LFS: unsupported operation:%s', operation)
98 return write_response_error(
98 return write_response_error(
99 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
99 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
100
100
101 if 'objects' not in data:
101 if 'objects' not in data:
102 log.debug('LFS: missing objects data')
102 log.debug('LFS: missing objects data')
103 return write_response_error(
103 return write_response_error(
104 HTTPBadRequest, 'missing objects data')
104 HTTPBadRequest, 'missing objects data')
105
105
106 log.debug('LFS: handling operation of type: %s', operation)
106 log.debug('LFS: handling operation of type: %s', operation)
107
107
108 objects = []
108 objects = []
109 for o in data['objects']:
109 for o in data['objects']:
110 try:
110 try:
111 oid = o['oid']
111 oid = o['oid']
112 obj_size = o['size']
112 obj_size = o['size']
113 except KeyError:
113 except KeyError:
114 log.exception('LFS, failed to extract data')
114 log.exception('LFS, failed to extract data')
115 return write_response_error(
115 return write_response_error(
116 HTTPBadRequest, 'unsupported data in objects')
116 HTTPBadRequest, 'unsupported data in objects')
117
117
118 obj_data = {'oid': oid}
118 obj_data = {'oid': oid}
119 if http_scheme == 'http':
119 if http_scheme == 'http':
120 # Note(marcink): when using http, we might have a custom port
120 # Note(marcink): when using http, we might have a custom port
121 # so we skip setting it to http, url dispatch then wont generate a port in URL
121 # so we skip setting it to http, url dispatch then wont generate a port in URL
122 # for development we need this
122 # for development we need this
123 http_scheme = None
123 http_scheme = None
124
124
125 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
125 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
126 _scheme=http_scheme)
126 _scheme=http_scheme)
127 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
127 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
128 _scheme=http_scheme)
128 _scheme=http_scheme)
129 store = LFSOidStore(
129 store = LFSOidStore(
130 oid, repo, store_location=request.registry.git_lfs_store_path)
130 oid, repo, store_location=request.registry.git_lfs_store_path)
131 handler = OidHandler(
131 handler = OidHandler(
132 store, repo, auth, oid, obj_size, obj_data,
132 store, repo, auth, oid, obj_size, obj_data,
133 obj_href, obj_verify_href)
133 obj_href, obj_verify_href)
134
134
135 # this verifies also OIDs
135 # this verifies also OIDs
136 actions, errors = handler.exec_operation(operation)
136 actions, errors = handler.exec_operation(operation)
137 if errors:
137 if errors:
138 log.warning('LFS: got following errors: %s', errors)
138 log.warning('LFS: got following errors: %s', errors)
139 obj_data['errors'] = errors
139 obj_data['errors'] = errors
140
140
141 if actions:
141 if actions:
142 obj_data['actions'] = actions
142 obj_data['actions'] = actions
143
143
144 obj_data['size'] = obj_size
144 obj_data['size'] = obj_size
145 obj_data['authenticated'] = True
145 obj_data['authenticated'] = True
146 objects.append(obj_data)
146 objects.append(obj_data)
147
147
148 result = {'objects': objects, 'transfer': 'basic'}
148 result = {'objects': objects, 'transfer': 'basic'}
149 log.debug('LFS Response %s', safe_result(result))
149 log.debug('LFS Response %s', safe_result(result))
150
150
151 return result
151 return result
152
152
153
153
154 def lfs_objects_oid_upload(request):
154 def lfs_objects_oid_upload(request):
155 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
155 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
156 repo = request.matchdict.get('repo')
156 repo = request.matchdict.get('repo')
157 oid = request.matchdict.get('oid')
157 oid = request.matchdict.get('oid')
158 store = LFSOidStore(
158 store = LFSOidStore(
159 oid, repo, store_location=request.registry.git_lfs_store_path)
159 oid, repo, store_location=request.registry.git_lfs_store_path)
160 engine = store.get_engine(mode='wb')
160 engine = store.get_engine(mode='wb')
161 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
161 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
162
162
163 # validate if OID is not by any chance already in the store
163 # validate if OID is not by any chance already in the store
164 if store.has_oid():
164 if store.has_oid():
165 log.debug('LFS: oid %s exists in store', oid)
165 log.debug('LFS: oid %s exists in store', oid)
166 return {'upload': 'ok', 'state': 'in-store'}
166 return {'upload': 'ok', 'state': 'in-store'}
167
167
168 body = request.environ['wsgi.input']
168 body = request.environ['wsgi.input']
169
169
170 digest = hashlib.sha256()
170 digest = hashlib.sha256()
171 with engine as f:
171 with engine as f:
172 blksize = 64 * 1024 # 64kb
172 blksize = 64 * 1024 # 64kb
173 while True:
173 while True:
174 # read in chunks as stream comes in from Gunicorn
174 # read in chunks as stream comes in from Gunicorn
175 # this is a specific Gunicorn support function.
175 # this is a specific Gunicorn support function.
176 # might work differently on waitress
176 # might work differently on waitress
177 try:
177 try:
178 chunk = body.read(blksize)
178 chunk = body.read(blksize)
179 except NoMoreData:
179 except NoMoreData:
180 chunk = None
180 chunk = None
181
181
182 if not chunk:
182 if not chunk:
183 break
183 break
184 f.write(chunk)
184 f.write(chunk)
185 digest.update(chunk)
185 digest.update(chunk)
186
186
187 hex_digest = digest.hexdigest()
187 hex_digest = digest.hexdigest()
188 digest_check = hex_digest == oid
188 digest_check = hex_digest == oid
189 if not digest_check:
189 if not digest_check:
190 engine.cleanup() # trigger cleanup so we don't save mismatch OID into the store
190 engine.cleanup() # trigger cleanup so we don't save mismatch OID into the store
191 return write_response_error(
191 return write_response_error(
192 HTTPBadRequest, f'oid {oid} does not match expected sha {hex_digest}')
192 HTTPBadRequest, f'oid {oid} does not match expected sha {hex_digest}')
193
193
194 return {'upload': 'ok', 'state': 'written'}
194 return {'upload': 'ok', 'state': 'written'}
195
195
196
196
197 def lfs_objects_oid_download(request):
197 def lfs_objects_oid_download(request):
198 repo = request.matchdict.get('repo')
198 repo = request.matchdict.get('repo')
199 oid = request.matchdict.get('oid')
199 oid = request.matchdict.get('oid')
200
200
201 store = LFSOidStore(
201 store = LFSOidStore(
202 oid, repo, store_location=request.registry.git_lfs_store_path)
202 oid, repo, store_location=request.registry.git_lfs_store_path)
203 if not store.has_oid():
203 if not store.has_oid():
204 log.debug('LFS: oid %s does not exists in store', oid)
204 log.debug('LFS: oid %s does not exists in store', oid)
205 return write_response_error(
205 return write_response_error(
206 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
206 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
207
207
208 # TODO(marcink): support range header ?
208 # TODO(marcink): support range header ?
209 # Range: bytes=0-, `bytes=(\d+)\-.*`
209 # Range: bytes=0-, `bytes=(\d+)\-.*`
210
210
211 f = open(store.oid_path, 'rb')
211 f = open(store.oid_path, 'rb')
212 response = Response(
212 response = Response(
213 content_type='application/octet-stream', app_iter=FileIter(f))
213 content_type='application/octet-stream', app_iter=FileIter(f))
214 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
214 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
215 return response
215 return response
216
216
217
217
218 def lfs_objects_verify(request):
218 def lfs_objects_verify(request):
219 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
219 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
220 repo = request.matchdict.get('repo')
220 repo = request.matchdict.get('repo')
221
221
222 data = request.json
222 data = request.json
223 oid = data.get('oid')
223 oid = data.get('oid')
224 size = safe_int(data.get('size'))
224 size = safe_int(data.get('size'))
225
225
226 if not (oid and size):
226 if not (oid and size):
227 return write_response_error(
227 return write_response_error(
228 HTTPBadRequest, 'missing oid and size in request data')
228 HTTPBadRequest, 'missing oid and size in request data')
229
229
230 store = LFSOidStore(
230 store = LFSOidStore(
231 oid, repo, store_location=request.registry.git_lfs_store_path)
231 oid, repo, store_location=request.registry.git_lfs_store_path)
232 if not store.has_oid():
232 if not store.has_oid():
233 log.debug('LFS: oid %s does not exists in store', oid)
233 log.debug('LFS: oid %s does not exists in store', oid)
234 return write_response_error(
234 return write_response_error(
235 HTTPNotFound, f'oid `{oid}` does not exists in store')
235 HTTPNotFound, f'oid `{oid}` does not exists in store')
236
236
237 store_size = store.size_oid()
237 store_size = store.size_oid()
238 if store_size != size:
238 if store_size != size:
239 msg = f'requested file size mismatch store size:{store_size} requested:{size}'
239 msg = f'requested file size mismatch store size:{store_size} requested:{size}'
240 return write_response_error(HTTPUnprocessableEntity, msg)
240 return write_response_error(HTTPUnprocessableEntity, msg)
241
241
242 return {'message': {'size': store_size, 'oid': oid}}
242 return {'message': {'size': store_size, 'oid': oid}}
243
243
244
244
245 def lfs_objects_lock(request):
245 def lfs_objects_lock(request):
246 return write_response_error(
246 return write_response_error(
247 HTTPNotImplemented, 'GIT LFS locking api not supported')
247 HTTPNotImplemented, 'GIT LFS locking api not supported')
248
248
249
249
250 def not_found(request):
250 def not_found(request):
251 return write_response_error(
251 return write_response_error(
252 HTTPNotFound, 'request path not found')
252 HTTPNotFound, 'request path not found')
253
253
254
254
255 def lfs_disabled(request):
255 def lfs_disabled(request):
256 return write_response_error(
256 return write_response_error(
257 HTTPNotImplemented, 'GIT LFS disabled for this repo')
257 HTTPNotImplemented, 'GIT LFS disabled for this repo')
258
258
259
259
260 def git_lfs_app(config):
260 def git_lfs_app(config):
261
261
262 # v1 API deprecation endpoint
262 # v1 API deprecation endpoint
263 config.add_route('lfs_objects',
263 config.add_route('lfs_objects',
264 '/{repo:.*?[^/]}/info/lfs/objects')
264 '/{repo:.*?[^/]}/info/lfs/objects')
265 config.add_view(lfs_objects, route_name='lfs_objects',
265 config.add_view(lfs_objects, route_name='lfs_objects',
266 request_method='POST', renderer='json')
266 request_method='POST', renderer='json')
267
267
268 # locking API
268 # locking API
269 config.add_route('lfs_objects_lock',
269 config.add_route('lfs_objects_lock',
270 '/{repo:.*?[^/]}/info/lfs/locks')
270 '/{repo:.*?[^/]}/info/lfs/locks')
271 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
271 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
272 request_method=('POST', 'GET'), renderer='json')
272 request_method=('POST', 'GET'), renderer='json')
273
273
274 config.add_route('lfs_objects_lock_verify',
274 config.add_route('lfs_objects_lock_verify',
275 '/{repo:.*?[^/]}/info/lfs/locks/verify')
275 '/{repo:.*?[^/]}/info/lfs/locks/verify')
276 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
276 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
277 request_method=('POST', 'GET'), renderer='json')
277 request_method=('POST', 'GET'), renderer='json')
278
278
279 # batch API
279 # batch API
280 config.add_route('lfs_objects_batch',
280 config.add_route('lfs_objects_batch',
281 '/{repo:.*?[^/]}/info/lfs/objects/batch')
281 '/{repo:.*?[^/]}/info/lfs/objects/batch')
282 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
282 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
283 request_method='POST', renderer='json')
283 request_method='POST', renderer='json')
284
284
285 # oid upload/download API
285 # oid upload/download API
286 config.add_route('lfs_objects_oid',
286 config.add_route('lfs_objects_oid',
287 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
287 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
288 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
288 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
289 request_method='PUT', renderer='json')
289 request_method='PUT', renderer='json')
290 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
290 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
291 request_method='GET', renderer='json')
291 request_method='GET', renderer='json')
292
292
293 # verification API
293 # verification API
294 config.add_route('lfs_objects_verify',
294 config.add_route('lfs_objects_verify',
295 '/{repo:.*?[^/]}/info/lfs/verify')
295 '/{repo:.*?[^/]}/info/lfs/verify')
296 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
296 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
297 request_method='POST', renderer='json')
297 request_method='POST', renderer='json')
298
298
299 # not found handler for API
299 # not found handler for API
300 config.add_notfound_view(not_found, renderer='json')
300 config.add_notfound_view(not_found, renderer='json')
301
301
302
302
303 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
303 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
304 config = Configurator()
304 config = Configurator()
305 if git_lfs_enabled:
305 if git_lfs_enabled:
306 config.include(git_lfs_app)
306 config.include(git_lfs_app)
307 config.registry.git_lfs_store_path = git_lfs_store_path
307 config.registry.git_lfs_store_path = git_lfs_store_path
308 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
308 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
309 else:
309 else:
310 # not found handler for API, reporting disabled LFS support
310 # not found handler for API, reporting disabled LFS support
311 config.add_notfound_view(lfs_disabled, renderer='json')
311 config.add_notfound_view(lfs_disabled, renderer='json')
312
312
313 app = config.make_wsgi_app()
313 app = config.make_wsgi_app()
314 return app
314 return app
@@ -1,185 +1,185 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import logging
20 import logging
21 from collections import OrderedDict
21 from collections import OrderedDict
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class OidHandler:
26 class OidHandler:
27
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
29 obj_verify_href=None):
30 self.current_store = store
30 self.current_store = store
31 self.repo_name = repo_name
31 self.repo_name = repo_name
32 self.auth = auth
32 self.auth = auth
33 self.oid = oid
33 self.oid = oid
34 self.obj_size = obj_size
34 self.obj_size = obj_size
35 self.obj_data = obj_data
35 self.obj_data = obj_data
36 self.obj_href = obj_href
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
37 self.obj_verify_href = obj_verify_href
38
38
39 def get_store(self, mode=None):
39 def get_store(self, mode=None):
40 return self.current_store
40 return self.current_store
41
41
42 def get_auth(self):
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
44 return " ".join(self.auth)
45
45
46 def download(self):
46 def download(self):
47
47
48 store = self.get_store()
48 store = self.get_store()
49 response = None
49 response = None
50 has_errors = None
50 has_errors = None
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = f'object: {store.oid} does not exist in store'
54 err_msg = f'object: {store.oid} does not exist in store'
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
58 message=err_msg
58 message=err_msg
59 )
59 )
60 )
60 )
61
61
62 download_action = OrderedDict(
62 download_action = OrderedDict(
63 href=self.obj_href,
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
65 )
66 if not has_errors:
66 if not has_errors:
67 response = OrderedDict(download=download_action)
67 response = OrderedDict(download=download_action)
68 return response, has_errors
68 return response, has_errors
69
69
70 def upload(self, skip_existing=True):
70 def upload(self, skip_existing=True):
71 """
71 """
72 Write upload action for git-lfs server
72 Write upload action for git-lfs server
73 """
73 """
74
74
75 store = self.get_store()
75 store = self.get_store()
76 response = None
76 response = None
77 has_errors = None
77 has_errors = None
78
78
79 # verify if we have the OID before, if we do, reply with empty
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
81 log.debug('LFS: store already has oid %s', store.oid)
82
82
83 # validate size
83 # validate size
84 store_size = store.size_oid()
84 store_size = store.size_oid()
85 size_match = store_size == self.obj_size
85 size_match = store_size == self.obj_size
86 if not size_match:
86 if not size_match:
87 log.warning(
87 log.warning(
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 self.oid, store_size, self.obj_size)
89 self.oid, store_size, self.obj_size)
90 elif skip_existing:
90 elif skip_existing:
91 log.debug('LFS: skipping further action as oid is existing')
91 log.debug('LFS: skipping further action as oid is existing')
92 return response, has_errors
92 return response, has_errors
93
93
94 chunked = ("Transfer-Encoding", "chunked")
94 chunked = ("Transfer-Encoding", "chunked")
95 upload_action = OrderedDict(
95 upload_action = OrderedDict(
96 href=self.obj_href,
96 href=self.obj_href,
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 )
98 )
99 if not has_errors:
99 if not has_errors:
100 response = OrderedDict(upload=upload_action)
100 response = OrderedDict(upload=upload_action)
101 # if specified in handler, return the verification endpoint
101 # if specified in handler, return the verification endpoint
102 if self.obj_verify_href:
102 if self.obj_verify_href:
103 verify_action = OrderedDict(
103 verify_action = OrderedDict(
104 href=self.obj_verify_href,
104 href=self.obj_verify_href,
105 header=OrderedDict([("Authorization", self.get_auth())])
105 header=OrderedDict([("Authorization", self.get_auth())])
106 )
106 )
107 response['verify'] = verify_action
107 response['verify'] = verify_action
108 return response, has_errors
108 return response, has_errors
109
109
110 def exec_operation(self, operation, *args, **kwargs):
110 def exec_operation(self, operation, *args, **kwargs):
111 handler = getattr(self, operation)
111 handler = getattr(self, operation)
112 log.debug('LFS: handling request using %s handler', handler)
112 log.debug('LFS: handling request using %s handler', handler)
113 return handler(*args, **kwargs)
113 return handler(*args, **kwargs)
114
114
115
115
116 class LFSOidStore:
116 class LFSOidStore:
117
117
118 def __init__(self, oid, repo, store_location=None):
118 def __init__(self, oid, repo, store_location=None):
119 self.oid = oid
119 self.oid = oid
120 self.repo = repo
120 self.repo = repo
121 defined_store_path = store_location or self.get_default_store()
121 defined_store_path = store_location or self.get_default_store()
122 self.store_suffix = f"/objects/{oid[:2]}/{oid[2:4]}"
122 self.store_suffix = f"/objects/{oid[:2]}/{oid[2:4]}"
123 self.store_path = f"{defined_store_path.rstrip('/')}{self.store_suffix}"
123 self.store_path = f"{defined_store_path.rstrip('/')}{self.store_suffix}"
124 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
124 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
125 self.oid_path = os.path.join(self.store_path, oid)
125 self.oid_path = os.path.join(self.store_path, oid)
126 self.fd = None
126 self.fd = None
127
127
128 def get_engine(self, mode):
128 def get_engine(self, mode):
129 """
129 """
130 engine = .get_engine(mode='wb')
130 engine = .get_engine(mode='wb')
131 with engine as f:
131 with engine as f:
132 f.write('...')
132 f.write('...')
133 """
133 """
134
134
135 class StoreEngine:
135 class StoreEngine:
136 _cleanup = None
136 _cleanup = None
137 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
137 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
138 self.mode = mode
138 self.mode = mode
139 self.store_path = store_path
139 self.store_path = store_path
140 self.oid_path = oid_path
140 self.oid_path = oid_path
141 self.tmp_oid_path = tmp_oid_path
141 self.tmp_oid_path = tmp_oid_path
142
142
143 def cleanup(self):
143 def cleanup(self):
144 self._cleanup = True
144 self._cleanup = True
145
145
146 def __enter__(self):
146 def __enter__(self):
147 if not os.path.isdir(self.store_path):
147 if not os.path.isdir(self.store_path):
148 os.makedirs(self.store_path)
148 os.makedirs(self.store_path)
149
149
150 # TODO(marcink): maybe write metadata here with size/oid ?
150 # TODO(marcink): maybe write metadata here with size/oid ?
151 fd = open(self.tmp_oid_path, self.mode)
151 fd = open(self.tmp_oid_path, self.mode)
152 self.fd = fd
152 self.fd = fd
153 return fd
153 return fd
154
154
155 def __exit__(self, exc_type, exc_value, traceback):
155 def __exit__(self, exc_type, exc_value, traceback):
156 self.fd.close()
156 self.fd.close()
157
157
158 if self._cleanup is None:
158 if self._cleanup is None:
159 # close tmp file, and rename to final destination
159 # close tmp file, and rename to final destination
160 shutil.move(self.tmp_oid_path, self.oid_path)
160 shutil.move(self.tmp_oid_path, self.oid_path)
161 else:
161 else:
162 os.remove(self.tmp_oid_path)
162 os.remove(self.tmp_oid_path)
163
163
164 return StoreEngine(
164 return StoreEngine(
165 mode, self.store_path, self.oid_path, self.tmp_oid_path)
165 mode, self.store_path, self.oid_path, self.tmp_oid_path)
166
166
167 def get_default_store(self):
167 def get_default_store(self):
168 """
168 """
169 Default store, consistent with defaults of Mercurial large files store
169 Default store, consistent with defaults of Mercurial large files store
170 which is /home/username/.cache/largefiles
170 which is /home/username/.cache/largefiles
171 """
171 """
172 user_home = os.path.expanduser("~")
172 user_home = os.path.expanduser("~")
173 return os.path.join(user_home, '.cache', 'lfs-store')
173 return os.path.join(user_home, '.cache', 'lfs-store')
174
174
175 def has_oid(self):
175 def has_oid(self):
176 return os.path.exists(os.path.join(self.store_path, self.oid))
176 return os.path.exists(os.path.join(self.store_path, self.oid))
177
177
178 def size_oid(self):
178 def size_oid(self):
179 size = -1
179 size = -1
180
180
181 if self.has_oid():
181 if self.has_oid():
182 oid = os.path.join(self.store_path, self.oid)
182 oid = os.path.join(self.store_path, self.oid)
183 size = os.stat(oid).st_size
183 size = os.stat(oid).st_size
184
184
185 return size
185 return size
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,310 +1,310 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21
21
22 from vcsserver.lib.ext_json import json
22 from vcsserver.lib.ext_json import json
23 from vcsserver.lib.str_utils import safe_bytes
23 from vcsserver.lib.str_utils import safe_bytes
24 from vcsserver.git_lfs.app import create_app
24 from vcsserver.git_lfs.app import create_app
25 from vcsserver.git_lfs.lib import LFSOidStore
25 from vcsserver.git_lfs.lib import LFSOidStore
26
26
27
27
28 @pytest.fixture(scope='function')
28 @pytest.fixture(scope='function')
29 def git_lfs_app(tmpdir):
29 def git_lfs_app(tmpdir):
30 custom_app = WebObTestApp(create_app(
30 custom_app = WebObTestApp(create_app(
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
32 git_lfs_http_scheme='http'))
32 git_lfs_http_scheme='http'))
33 custom_app._store = str(tmpdir)
33 custom_app._store = str(tmpdir)
34 return custom_app
34 return custom_app
35
35
36
36
37 @pytest.fixture(scope='function')
37 @pytest.fixture(scope='function')
38 def git_lfs_https_app(tmpdir):
38 def git_lfs_https_app(tmpdir):
39 custom_app = WebObTestApp(create_app(
39 custom_app = WebObTestApp(create_app(
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
41 git_lfs_http_scheme='https'))
41 git_lfs_http_scheme='https'))
42 custom_app._store = str(tmpdir)
42 custom_app._store = str(tmpdir)
43 return custom_app
43 return custom_app
44
44
45
45
46 @pytest.fixture()
46 @pytest.fixture()
47 def http_auth():
47 def http_auth():
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
49
49
50
50
51 class TestLFSApplication:
51 class TestLFSApplication:
52
52
53 def test_app_wrong_path(self, git_lfs_app):
53 def test_app_wrong_path(self, git_lfs_app):
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
55
55
56 def test_app_deprecated_endpoint(self, git_lfs_app):
56 def test_app_deprecated_endpoint(self, git_lfs_app):
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
58 assert response.status_code == 501
58 assert response.status_code == 501
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
60
60
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
63 assert response.status_code == 501
63 assert response.status_code == 501
64 assert json.loads(response.text) == {
64 assert json.loads(response.text) == {
65 'message': 'GIT LFS locking api not supported'}
65 'message': 'GIT LFS locking api not supported'}
66
66
67 def test_app_lock_api_not_available(self, git_lfs_app):
67 def test_app_lock_api_not_available(self, git_lfs_app):
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
69 assert response.status_code == 501
69 assert response.status_code == 501
70 assert json.loads(response.text) == {
70 assert json.loads(response.text) == {
71 'message': 'GIT LFS locking api not supported'}
71 'message': 'GIT LFS locking api not supported'}
72
72
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
74 git_lfs_app.post_json(
74 git_lfs_app.post_json(
75 '/repo/info/lfs/objects/batch', params={}, status=403)
75 '/repo/info/lfs/objects/batch', params={}, status=403)
76
76
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
78 response = git_lfs_app.post_json(
78 response = git_lfs_app.post_json(
79 '/repo/info/lfs/objects/batch', params={}, status=400,
79 '/repo/info/lfs/objects/batch', params={}, status=400,
80 extra_environ=http_auth)
80 extra_environ=http_auth)
81 assert json.loads(response.text) == {
81 assert json.loads(response.text) == {
82 'message': 'unsupported operation mode: `None`'}
82 'message': 'unsupported operation mode: `None`'}
83
83
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
85 response = git_lfs_app.post_json(
85 response = git_lfs_app.post_json(
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
87 status=400, extra_environ=http_auth)
87 status=400, extra_environ=http_auth)
88 assert json.loads(response.text) == {
88 assert json.loads(response.text) == {
89 'message': 'missing objects data'}
89 'message': 'missing objects data'}
90
90
91 def test_app_batch_api_unsupported_data_in_objects(
91 def test_app_batch_api_unsupported_data_in_objects(
92 self, git_lfs_app, http_auth):
92 self, git_lfs_app, http_auth):
93 params = {'operation': 'download',
93 params = {'operation': 'download',
94 'objects': [{}]}
94 'objects': [{}]}
95 response = git_lfs_app.post_json(
95 response = git_lfs_app.post_json(
96 '/repo/info/lfs/objects/batch', params=params, status=400,
96 '/repo/info/lfs/objects/batch', params=params, status=400,
97 extra_environ=http_auth)
97 extra_environ=http_auth)
98 assert json.loads(response.text) == {
98 assert json.loads(response.text) == {
99 'message': 'unsupported data in objects'}
99 'message': 'unsupported data in objects'}
100
100
101 def test_app_batch_api_download_missing_object(
101 def test_app_batch_api_download_missing_object(
102 self, git_lfs_app, http_auth):
102 self, git_lfs_app, http_auth):
103 params = {
103 params = {
104 'operation': 'download',
104 'operation': 'download',
105 'objects': [{'oid': '123', 'size': '1024'}]
105 'objects': [{'oid': '123', 'size': '1024'}]
106 }
106 }
107 response = git_lfs_app.post_json(
107 response = git_lfs_app.post_json(
108 '/repo/info/lfs/objects/batch', params=params,
108 '/repo/info/lfs/objects/batch', params=params,
109 extra_environ=http_auth)
109 extra_environ=http_auth)
110
110
111 expected_objects = [
111 expected_objects = [
112 {
112 {
113 'oid': '123',
113 'oid': '123',
114 'size': '1024',
114 'size': '1024',
115 'authenticated': True,
115 'authenticated': True,
116 'errors': {'error': {'code': 404, 'message': 'object: 123 does not exist in store'}},
116 'errors': {'error': {'code': 404, 'message': 'object: 123 does not exist in store'}},
117 }
117 }
118 ]
118 ]
119
119
120 assert json.loads(response.text) == {
120 assert json.loads(response.text) == {
121 'objects': expected_objects,
121 'objects': expected_objects,
122 'transfer': 'basic'
122 'transfer': 'basic'
123 }
123 }
124
124
125 def test_app_batch_api_download(self, git_lfs_app, http_auth):
125 def test_app_batch_api_download(self, git_lfs_app, http_auth):
126 oid = '456'
126 oid = '456'
127 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
127 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
128 if not os.path.isdir(os.path.dirname(oid_path)):
128 if not os.path.isdir(os.path.dirname(oid_path)):
129 os.makedirs(os.path.dirname(oid_path))
129 os.makedirs(os.path.dirname(oid_path))
130 with open(oid_path, 'wb') as f:
130 with open(oid_path, 'wb') as f:
131 f.write(safe_bytes('OID_CONTENT'))
131 f.write(safe_bytes('OID_CONTENT'))
132
132
133 params = {'operation': 'download',
133 params = {'operation': 'download',
134 'objects': [{'oid': oid, 'size': '1024'}]}
134 'objects': [{'oid': oid, 'size': '1024'}]}
135 response = git_lfs_app.post_json(
135 response = git_lfs_app.post_json(
136 '/repo/info/lfs/objects/batch', params=params,
136 '/repo/info/lfs/objects/batch', params=params,
137 extra_environ=http_auth)
137 extra_environ=http_auth)
138
138
139 expected_objects = [
139 expected_objects = [
140 {'authenticated': True,
140 {'authenticated': True,
141 'actions': {
141 'actions': {
142 'download': {
142 'download': {
143 'header': {'Authorization': 'Basic XXXXX'},
143 'header': {'Authorization': 'Basic XXXXX'},
144 'href': 'http://localhost/repo/info/lfs/objects/456'},
144 'href': 'http://localhost/repo/info/lfs/objects/456'},
145 },
145 },
146 'oid': '456',
146 'oid': '456',
147 'size': '1024'}
147 'size': '1024'}
148 ]
148 ]
149 assert json.loads(response.text) == {
149 assert json.loads(response.text) == {
150 'objects': expected_objects,
150 'objects': expected_objects,
151 'transfer': 'basic'
151 'transfer': 'basic'
152 }
152 }
153
153
154 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
154 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
155 params = {'operation': 'upload',
155 params = {'operation': 'upload',
156 'objects': [{'oid': '123', 'size': '1024'}]}
156 'objects': [{'oid': '123', 'size': '1024'}]}
157 response = git_lfs_app.post_json(
157 response = git_lfs_app.post_json(
158 '/repo/info/lfs/objects/batch', params=params,
158 '/repo/info/lfs/objects/batch', params=params,
159 extra_environ=http_auth)
159 extra_environ=http_auth)
160 expected_objects = [
160 expected_objects = [
161 {
161 {
162 'authenticated': True,
162 'authenticated': True,
163 'actions': {
163 'actions': {
164 'upload': {
164 'upload': {
165 'header': {
165 'header': {
166 'Authorization': 'Basic XXXXX',
166 'Authorization': 'Basic XXXXX',
167 'Transfer-Encoding': 'chunked'
167 'Transfer-Encoding': 'chunked'
168 },
168 },
169 'href': 'http://localhost/repo/info/lfs/objects/123'
169 'href': 'http://localhost/repo/info/lfs/objects/123'
170 },
170 },
171 'verify': {
171 'verify': {
172 'header': {
172 'header': {
173 'Authorization': 'Basic XXXXX'
173 'Authorization': 'Basic XXXXX'
174 },
174 },
175 'href': 'http://localhost/repo/info/lfs/verify'
175 'href': 'http://localhost/repo/info/lfs/verify'
176 }
176 }
177 },
177 },
178 'oid': '123',
178 'oid': '123',
179 'size': '1024'
179 'size': '1024'
180 }
180 }
181 ]
181 ]
182 assert json.loads(response.text) == {
182 assert json.loads(response.text) == {
183 'objects': expected_objects,
183 'objects': expected_objects,
184 'transfer': 'basic'
184 'transfer': 'basic'
185 }
185 }
186
186
187 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
187 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
188 params = {'operation': 'upload',
188 params = {'operation': 'upload',
189 'objects': [{'oid': '123', 'size': '1024'}]}
189 'objects': [{'oid': '123', 'size': '1024'}]}
190 response = git_lfs_https_app.post_json(
190 response = git_lfs_https_app.post_json(
191 '/repo/info/lfs/objects/batch', params=params,
191 '/repo/info/lfs/objects/batch', params=params,
192 extra_environ=http_auth)
192 extra_environ=http_auth)
193 expected_objects = [
193 expected_objects = [
194 {'authenticated': True,
194 {'authenticated': True,
195 'actions': {
195 'actions': {
196 'upload': {
196 'upload': {
197 'header': {'Authorization': 'Basic XXXXX',
197 'header': {'Authorization': 'Basic XXXXX',
198 'Transfer-Encoding': 'chunked'},
198 'Transfer-Encoding': 'chunked'},
199 'href': 'https://localhost/repo/info/lfs/objects/123'},
199 'href': 'https://localhost/repo/info/lfs/objects/123'},
200 'verify': {
200 'verify': {
201 'header': {'Authorization': 'Basic XXXXX'},
201 'header': {'Authorization': 'Basic XXXXX'},
202 'href': 'https://localhost/repo/info/lfs/verify'}
202 'href': 'https://localhost/repo/info/lfs/verify'}
203 },
203 },
204 'oid': '123',
204 'oid': '123',
205 'size': '1024'}
205 'size': '1024'}
206 ]
206 ]
207 assert json.loads(response.text) == {
207 assert json.loads(response.text) == {
208 'objects': expected_objects, 'transfer': 'basic'}
208 'objects': expected_objects, 'transfer': 'basic'}
209
209
210 def test_app_verify_api_missing_data(self, git_lfs_app):
210 def test_app_verify_api_missing_data(self, git_lfs_app):
211 params = {'oid': 'missing'}
211 params = {'oid': 'missing'}
212 response = git_lfs_app.post_json(
212 response = git_lfs_app.post_json(
213 '/repo/info/lfs/verify', params=params,
213 '/repo/info/lfs/verify', params=params,
214 status=400)
214 status=400)
215
215
216 assert json.loads(response.text) == {
216 assert json.loads(response.text) == {
217 'message': 'missing oid and size in request data'}
217 'message': 'missing oid and size in request data'}
218
218
219 def test_app_verify_api_missing_obj(self, git_lfs_app):
219 def test_app_verify_api_missing_obj(self, git_lfs_app):
220 params = {'oid': 'missing', 'size': '1024'}
220 params = {'oid': 'missing', 'size': '1024'}
221 response = git_lfs_app.post_json(
221 response = git_lfs_app.post_json(
222 '/repo/info/lfs/verify', params=params,
222 '/repo/info/lfs/verify', params=params,
223 status=404)
223 status=404)
224
224
225 assert json.loads(response.text) == {
225 assert json.loads(response.text) == {
226 'message': 'oid `missing` does not exists in store'
226 'message': 'oid `missing` does not exists in store'
227 }
227 }
228
228
229 def test_app_verify_api_size_mismatch(self, git_lfs_app):
229 def test_app_verify_api_size_mismatch(self, git_lfs_app):
230 oid = 'existing'
230 oid = 'existing'
231 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
231 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
232 if not os.path.isdir(os.path.dirname(oid_path)):
232 if not os.path.isdir(os.path.dirname(oid_path)):
233 os.makedirs(os.path.dirname(oid_path))
233 os.makedirs(os.path.dirname(oid_path))
234 with open(oid_path, 'wb') as f:
234 with open(oid_path, 'wb') as f:
235 f.write(safe_bytes('OID_CONTENT'))
235 f.write(safe_bytes('OID_CONTENT'))
236
236
237 params = {'oid': oid, 'size': '1024'}
237 params = {'oid': oid, 'size': '1024'}
238 response = git_lfs_app.post_json(
238 response = git_lfs_app.post_json(
239 '/repo/info/lfs/verify', params=params, status=422)
239 '/repo/info/lfs/verify', params=params, status=422)
240
240
241 assert json.loads(response.text) == {
241 assert json.loads(response.text) == {
242 'message': 'requested file size mismatch store size:11 requested:1024'
242 'message': 'requested file size mismatch store size:11 requested:1024'
243 }
243 }
244
244
245 def test_app_verify_api(self, git_lfs_app):
245 def test_app_verify_api(self, git_lfs_app):
246 oid = 'existing'
246 oid = 'existing'
247 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
247 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
248 if not os.path.isdir(os.path.dirname(oid_path)):
248 if not os.path.isdir(os.path.dirname(oid_path)):
249 os.makedirs(os.path.dirname(oid_path))
249 os.makedirs(os.path.dirname(oid_path))
250 with open(oid_path, 'wb') as f:
250 with open(oid_path, 'wb') as f:
251 f.write(safe_bytes('OID_CONTENT'))
251 f.write(safe_bytes('OID_CONTENT'))
252
252
253 params = {'oid': oid, 'size': 11}
253 params = {'oid': oid, 'size': 11}
254 response = git_lfs_app.post_json(
254 response = git_lfs_app.post_json(
255 '/repo/info/lfs/verify', params=params)
255 '/repo/info/lfs/verify', params=params)
256
256
257 assert json.loads(response.text) == {
257 assert json.loads(response.text) == {
258 'message': {'size': 11, 'oid': oid}
258 'message': {'size': 11, 'oid': oid}
259 }
259 }
260
260
261 def test_app_download_api_oid_not_existing(self, git_lfs_app):
261 def test_app_download_api_oid_not_existing(self, git_lfs_app):
262 oid = 'missing'
262 oid = 'missing'
263
263
264 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
264 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
265
265
266 assert json.loads(response.text) == {
266 assert json.loads(response.text) == {
267 'message': 'requested file with oid `missing` not found in store'}
267 'message': 'requested file with oid `missing` not found in store'}
268
268
269 def test_app_download_api(self, git_lfs_app):
269 def test_app_download_api(self, git_lfs_app):
270 oid = 'existing'
270 oid = 'existing'
271 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
271 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
272 if not os.path.isdir(os.path.dirname(oid_path)):
272 if not os.path.isdir(os.path.dirname(oid_path)):
273 os.makedirs(os.path.dirname(oid_path))
273 os.makedirs(os.path.dirname(oid_path))
274 with open(oid_path, 'wb') as f:
274 with open(oid_path, 'wb') as f:
275 f.write(safe_bytes('OID_CONTENT'))
275 f.write(safe_bytes('OID_CONTENT'))
276
276
277 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}')
277 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}')
278 assert response
278 assert response
279
279
280 def test_app_upload(self, git_lfs_app):
280 def test_app_upload(self, git_lfs_app):
281 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
281 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
282
282
283 response = git_lfs_app.put(
283 response = git_lfs_app.put(
284 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
284 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
285
285
286 assert json.loads(response.text) == {'upload': 'ok', 'state': 'written'}
286 assert json.loads(response.text) == {'upload': 'ok', 'state': 'written'}
287
287
288 # verify that we actually wrote that OID
288 # verify that we actually wrote that OID
289 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
289 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
290 assert os.path.isfile(oid_path)
290 assert os.path.isfile(oid_path)
291 assert 'CONTENT' == open(oid_path).read()
291 assert 'CONTENT' == open(oid_path).read()
292
292
293 response = git_lfs_app.put(
293 response = git_lfs_app.put(
294 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
294 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
295
295
296 assert json.loads(response.text) == {'upload': 'ok', 'state': 'in-store'}
296 assert json.loads(response.text) == {'upload': 'ok', 'state': 'in-store'}
297
297
298
298
299 def test_app_upload_wrong_sha(self, git_lfs_app):
299 def test_app_upload_wrong_sha(self, git_lfs_app):
300 oid = 'i-am-a-wrong-sha'
300 oid = 'i-am-a-wrong-sha'
301
301
302 response = git_lfs_app.put(f'/repo/info/lfs/objects/{oid}', params='CONTENT', status=400)
302 response = git_lfs_app.put(f'/repo/info/lfs/objects/{oid}', params='CONTENT', status=400)
303
303
304 assert json.loads(response.text) == {
304 assert json.loads(response.text) == {
305 'message': 'oid i-am-a-wrong-sha does not match expected sha '
305 'message': 'oid i-am-a-wrong-sha does not match expected sha '
306 '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'}
306 '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'}
307
307
308 # check this OID wasn't written to store
308 # check this OID wasn't written to store
309 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
309 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
310 assert json.loads(response.text) == {'message': 'requested file with oid `i-am-a-wrong-sha` not found in store'}
310 assert json.loads(response.text) == {'message': 'requested file with oid `i-am-a-wrong-sha` not found in store'}
@@ -1,143 +1,143 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.lib.str_utils import safe_bytes
20 from vcsserver.lib.str_utils import safe_bytes
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
22
22
23
23
24 @pytest.fixture()
24 @pytest.fixture()
25 def lfs_store(tmpdir):
25 def lfs_store(tmpdir):
26 repo = 'test'
26 repo = 'test'
27 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
27 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
29 return store
29 return store
30
30
31
31
32 @pytest.fixture()
32 @pytest.fixture()
33 def oid_handler(lfs_store):
33 def oid_handler(lfs_store):
34 store = lfs_store
34 store = lfs_store
35 repo = store.repo
35 repo = store.repo
36 oid = store.oid
36 oid = store.oid
37
37
38 oid_handler = OidHandler(
38 oid_handler = OidHandler(
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
40 oid=oid,
40 oid=oid,
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
42 obj_verify_href='http://localhost/verify')
42 obj_verify_href='http://localhost/verify')
43 return oid_handler
43 return oid_handler
44
44
45
45
46 class TestOidHandler:
46 class TestOidHandler:
47
47
48 @pytest.mark.parametrize('exec_action', [
48 @pytest.mark.parametrize('exec_action', [
49 'download',
49 'download',
50 'upload',
50 'upload',
51 ])
51 ])
52 def test_exec_action(self, exec_action, oid_handler):
52 def test_exec_action(self, exec_action, oid_handler):
53 handler = oid_handler.exec_operation(exec_action)
53 handler = oid_handler.exec_operation(exec_action)
54 assert handler
54 assert handler
55
55
56 def test_exec_action_undefined(self, oid_handler):
56 def test_exec_action_undefined(self, oid_handler):
57 with pytest.raises(AttributeError):
57 with pytest.raises(AttributeError):
58 oid_handler.exec_operation('wrong')
58 oid_handler.exec_operation('wrong')
59
59
60 def test_download_oid_not_existing(self, oid_handler):
60 def test_download_oid_not_existing(self, oid_handler):
61 response, has_errors = oid_handler.exec_operation('download')
61 response, has_errors = oid_handler.exec_operation('download')
62
62
63 assert response is None
63 assert response is None
64 assert has_errors['error'] == {
64 assert has_errors['error'] == {
65 'code': 404,
65 'code': 404,
66 'message': 'object: 65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12 does not exist in store'
66 'message': 'object: 65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12 does not exist in store'
67 }
67 }
68
68
69 def test_download_oid(self, oid_handler):
69 def test_download_oid(self, oid_handler):
70 store = oid_handler.get_store()
70 store = oid_handler.get_store()
71 if not os.path.isdir(os.path.dirname(store.oid_path)):
71 if not os.path.isdir(os.path.dirname(store.oid_path)):
72 os.makedirs(os.path.dirname(store.oid_path))
72 os.makedirs(os.path.dirname(store.oid_path))
73
73
74 with open(store.oid_path, 'wb') as f:
74 with open(store.oid_path, 'wb') as f:
75 f.write(safe_bytes('CONTENT'))
75 f.write(safe_bytes('CONTENT'))
76
76
77 response, has_errors = oid_handler.exec_operation('download')
77 response, has_errors = oid_handler.exec_operation('download')
78
78
79 assert has_errors is None
79 assert has_errors is None
80 assert response['download'] == {
80 assert response['download'] == {
81 'header': {'Authorization': 'basic xxxx'},
81 'header': {'Authorization': 'basic xxxx'},
82 'href': 'http://localhost/handle_oid'
82 'href': 'http://localhost/handle_oid'
83 }
83 }
84
84
85 def test_upload_oid_that_exists(self, oid_handler):
85 def test_upload_oid_that_exists(self, oid_handler):
86 store = oid_handler.get_store()
86 store = oid_handler.get_store()
87 if not os.path.isdir(os.path.dirname(store.oid_path)):
87 if not os.path.isdir(os.path.dirname(store.oid_path)):
88 os.makedirs(os.path.dirname(store.oid_path))
88 os.makedirs(os.path.dirname(store.oid_path))
89
89
90 with open(store.oid_path, 'wb') as f:
90 with open(store.oid_path, 'wb') as f:
91 f.write(safe_bytes('CONTENT'))
91 f.write(safe_bytes('CONTENT'))
92 oid_handler.obj_size = 7
92 oid_handler.obj_size = 7
93 response, has_errors = oid_handler.exec_operation('upload')
93 response, has_errors = oid_handler.exec_operation('upload')
94 assert has_errors is None
94 assert has_errors is None
95 assert response is None
95 assert response is None
96
96
97 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
97 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
98 store = oid_handler.get_store()
98 store = oid_handler.get_store()
99 if not os.path.isdir(os.path.dirname(store.oid_path)):
99 if not os.path.isdir(os.path.dirname(store.oid_path)):
100 os.makedirs(os.path.dirname(store.oid_path))
100 os.makedirs(os.path.dirname(store.oid_path))
101
101
102 with open(store.oid_path, 'wb') as f:
102 with open(store.oid_path, 'wb') as f:
103 f.write(safe_bytes('CONTENT'))
103 f.write(safe_bytes('CONTENT'))
104
104
105 oid_handler.obj_size = 10240
105 oid_handler.obj_size = 10240
106 response, has_errors = oid_handler.exec_operation('upload')
106 response, has_errors = oid_handler.exec_operation('upload')
107 assert has_errors is None
107 assert has_errors is None
108 assert response['upload'] == {
108 assert response['upload'] == {
109 'header': {'Authorization': 'basic xxxx',
109 'header': {'Authorization': 'basic xxxx',
110 'Transfer-Encoding': 'chunked'},
110 'Transfer-Encoding': 'chunked'},
111 'href': 'http://localhost/handle_oid',
111 'href': 'http://localhost/handle_oid',
112 }
112 }
113
113
114 def test_upload_oid(self, oid_handler):
114 def test_upload_oid(self, oid_handler):
115 response, has_errors = oid_handler.exec_operation('upload')
115 response, has_errors = oid_handler.exec_operation('upload')
116 assert has_errors is None
116 assert has_errors is None
117 assert response['upload'] == {
117 assert response['upload'] == {
118 'header': {'Authorization': 'basic xxxx',
118 'header': {'Authorization': 'basic xxxx',
119 'Transfer-Encoding': 'chunked'},
119 'Transfer-Encoding': 'chunked'},
120 'href': 'http://localhost/handle_oid'
120 'href': 'http://localhost/handle_oid'
121 }
121 }
122
122
123
123
124 class TestLFSStore:
124 class TestLFSStore:
125 def test_write_oid(self, lfs_store):
125 def test_write_oid(self, lfs_store):
126 oid_location = lfs_store.oid_path
126 oid_location = lfs_store.oid_path
127
127
128 assert not os.path.isfile(oid_location)
128 assert not os.path.isfile(oid_location)
129
129
130 engine = lfs_store.get_engine(mode='wb')
130 engine = lfs_store.get_engine(mode='wb')
131 with engine as f:
131 with engine as f:
132 f.write(safe_bytes('CONTENT'))
132 f.write(safe_bytes('CONTENT'))
133
133
134 assert os.path.isfile(oid_location)
134 assert os.path.isfile(oid_location)
135
135
136 def test_detect_has_oid(self, lfs_store):
136 def test_detect_has_oid(self, lfs_store):
137
137
138 assert lfs_store.has_oid() is False
138 assert lfs_store.has_oid() is False
139 engine = lfs_store.get_engine(mode='wb')
139 engine = lfs_store.get_engine(mode='wb')
140 with engine as f:
140 with engine as f:
141 f.write(safe_bytes('CONTENT'))
141 f.write(safe_bytes('CONTENT'))
142
142
143 assert lfs_store.has_oid() is True
143 assert lfs_store.has_oid() is True
@@ -1,50 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
17 import copy
18 from functools import wraps
18 from functools import wraps
19
19
20
20
21 def get_cython_compat_decorator(wrapper, func):
21 def get_cython_compat_decorator(wrapper, func):
22 """
22 """
23 Creates a cython compatible decorator. The previously used
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
24 decorator.decorator() function seems to be incompatible with cython.
25
25
26 :param wrapper: __wrapper method of the decorator class
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
27 :param func: decorated function
28 """
28 """
29 @wraps(func)
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
33 return local_wrapper
34
34
35
35
36 def safe_result(result):
36 def safe_result(result):
37 """clean result for better representation in logs"""
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
38 clean_copy = copy.deepcopy(result)
39
39
40 try:
40 try:
41 if 'objects' in clean_copy:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
47 except Exception:
48 return result
48 return result
49
49
50 return clean_copy
50 return clean_copy
@@ -1,92 +1,92 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24
24
25 # patch demandimport, due to bug in mercurial when it always triggers
25 # patch demandimport, due to bug in mercurial when it always triggers
26 # demandimport.enable()
26 # demandimport.enable()
27 from vcsserver.lib.str_utils import safe_bytes
27 from vcsserver.lib.str_utils import safe_bytes
28
28
29 demandimport.enable = lambda *args, **kwargs: 1
29 demandimport.enable = lambda *args, **kwargs: 1
30
30
31 from mercurial import ui
31 from mercurial import ui
32 from mercurial import patch
32 from mercurial import patch
33 from mercurial import config
33 from mercurial import config
34 from mercurial import extensions
34 from mercurial import extensions
35 from mercurial import scmutil
35 from mercurial import scmutil
36 from mercurial import archival
36 from mercurial import archival
37 from mercurial import discovery
37 from mercurial import discovery
38 from mercurial import unionrepo
38 from mercurial import unionrepo
39 from mercurial import localrepo
39 from mercurial import localrepo
40 from mercurial import merge as hg_merge
40 from mercurial import merge as hg_merge
41 from mercurial import subrepo
41 from mercurial import subrepo
42 from mercurial import subrepoutil
42 from mercurial import subrepoutil
43 from mercurial import tags as hg_tag
43 from mercurial import tags as hg_tag
44 from mercurial import util as hgutil
44 from mercurial import util as hgutil
45 from mercurial.commands import clone, pull
45 from mercurial.commands import clone, pull
46 from mercurial.node import nullid
46 from mercurial.node import nullid
47 from mercurial.context import memctx, memfilectx
47 from mercurial.context import memctx, memfilectx
48 from mercurial.error import (
48 from mercurial.error import (
49 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
49 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
50 RequirementError, ProgrammingError)
50 RequirementError, ProgrammingError)
51 from mercurial.hgweb import hgweb_mod
51 from mercurial.hgweb import hgweb_mod
52 from mercurial.localrepo import instance
52 from mercurial.localrepo import instance
53 from mercurial.match import match, alwaysmatcher, patternmatcher
53 from mercurial.match import match, alwaysmatcher, patternmatcher
54 from mercurial.mdiff import diffopts
54 from mercurial.mdiff import diffopts
55 from mercurial.node import bin, hex
55 from mercurial.node import bin, hex
56 from mercurial.encoding import tolocal
56 from mercurial.encoding import tolocal
57 from mercurial.discovery import findcommonoutgoing
57 from mercurial.discovery import findcommonoutgoing
58 from mercurial.hg import peer
58 from mercurial.hg import peer
59 from mercurial.httppeer import make_peer
59 from mercurial.httppeer import make_peer
60 from mercurial.utils.urlutil import url as hg_url
60 from mercurial.utils.urlutil import url as hg_url
61 from mercurial.scmutil import revrange, revsymbol
61 from mercurial.scmutil import revrange, revsymbol
62 from mercurial.node import nullrev
62 from mercurial.node import nullrev
63 from mercurial import exchange
63 from mercurial import exchange
64 from hgext import largefiles
64 from hgext import largefiles
65
65
66 # those authnadlers are patched for python 2.6.5 bug an
66 # those authnadlers are patched for python 2.6.5 bug an
67 # infinit looping when given invalid resources
67 # infinit looping when given invalid resources
68 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
68 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
69
69
70 # hg strip is in core now
70 # hg strip is in core now
71 from mercurial import strip as hgext_strip
71 from mercurial import strip as hgext_strip
72
72
73
73
74 def get_ctx(repo, ref):
74 def get_ctx(repo, ref):
75 if not isinstance(ref, int):
75 if not isinstance(ref, int):
76 ref = safe_bytes(ref)
76 ref = safe_bytes(ref)
77
77
78 try:
78 try:
79 ctx = repo[ref]
79 ctx = repo[ref]
80 return ctx
80 return ctx
81 except (ProgrammingError, TypeError):
81 except (ProgrammingError, TypeError):
82 # we're unable to find the rev using a regular lookup, we fallback
82 # we're unable to find the rev using a regular lookup, we fallback
83 # to slower, but backward compat revsymbol usage
83 # to slower, but backward compat revsymbol usage
84 pass
84 pass
85 except (LookupError, RepoLookupError):
85 except (LookupError, RepoLookupError):
86 # Similar case as above but only for refs that are not numeric
86 # Similar case as above but only for refs that are not numeric
87 if isinstance(ref, int):
87 if isinstance(ref, int):
88 raise
88 raise
89
89
90 ctx = revsymbol(repo, ref)
90 ctx = revsymbol(repo, ref)
91
91
92 return ctx
92 return ctx
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto._capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto._capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(orig, repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 return calc_capabilities(orig, repo, proto)
56 else:
56 else:
57 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
58 return orig(repo, proto)
58 return orig(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from .hgcompat import subrepo, subrepoutil
65 from .hgcompat import subrepo, subrepoutil
66 from vcsserver.exceptions import SubrepoMergeException
66 from vcsserver.exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepoutil.state(self._ctx, self.ui)
100 substate = subrepoutil.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,238 +1,238 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import re
19 import re
20 import os
20 import os
21 import sys
21 import sys
22 import datetime
22 import datetime
23 import logging
23 import logging
24 import pkg_resources
24 import pkg_resources
25
25
26 import vcsserver
26 import vcsserver
27 import vcsserver.settings
27 import vcsserver.settings
28 from vcsserver.lib.str_utils import safe_bytes
28 from vcsserver.lib.str_utils import safe_bytes
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32 HOOKS_DIR_MODE = 0o755
32 HOOKS_DIR_MODE = 0o755
33 HOOKS_FILE_MODE = 0o755
33 HOOKS_FILE_MODE = 0o755
34
34
35
35
36 def set_permissions_if_needed(path_to_check, perms: oct):
36 def set_permissions_if_needed(path_to_check, perms: oct):
37 # Get current permissions
37 # Get current permissions
38 current_permissions = os.stat(path_to_check).st_mode & 0o777 # Extract permission bits
38 current_permissions = os.stat(path_to_check).st_mode & 0o777 # Extract permission bits
39
39
40 # Check if current permissions are lower than required
40 # Check if current permissions are lower than required
41 if current_permissions < int(perms):
41 if current_permissions < int(perms):
42 # Change the permissions if they are lower than required
42 # Change the permissions if they are lower than required
43 os.chmod(path_to_check, perms)
43 os.chmod(path_to_check, perms)
44
44
45
45
46 def get_git_hooks_path(repo_path, bare):
46 def get_git_hooks_path(repo_path, bare):
47 hooks_path = os.path.join(repo_path, 'hooks')
47 hooks_path = os.path.join(repo_path, 'hooks')
48 if not bare:
48 if not bare:
49 hooks_path = os.path.join(repo_path, '.git', 'hooks')
49 hooks_path = os.path.join(repo_path, '.git', 'hooks')
50
50
51 return hooks_path
51 return hooks_path
52
52
53
53
54 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
54 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
55 """
55 """
56 Creates a RhodeCode hook inside a git repository
56 Creates a RhodeCode hook inside a git repository
57
57
58 :param repo_path: path to repository
58 :param repo_path: path to repository
59 :param bare: defines if repository is considered a bare git repo
59 :param bare: defines if repository is considered a bare git repo
60 :param executable: binary executable to put in the hooks
60 :param executable: binary executable to put in the hooks
61 :param force_create: Creates even if the same name hook exists
61 :param force_create: Creates even if the same name hook exists
62 """
62 """
63 executable = executable or sys.executable
63 executable = executable or sys.executable
64 hooks_path = get_git_hooks_path(repo_path, bare)
64 hooks_path = get_git_hooks_path(repo_path, bare)
65
65
66 # we always call it to ensure dir exists and it has a proper mode
66 # we always call it to ensure dir exists and it has a proper mode
67 if not os.path.exists(hooks_path):
67 if not os.path.exists(hooks_path):
68 # If it doesn't exist, create a new directory with the specified mode
68 # If it doesn't exist, create a new directory with the specified mode
69 os.makedirs(hooks_path, mode=HOOKS_DIR_MODE, exist_ok=True)
69 os.makedirs(hooks_path, mode=HOOKS_DIR_MODE, exist_ok=True)
70 # If it exists, change the directory's mode to the specified mode
70 # If it exists, change the directory's mode to the specified mode
71 set_permissions_if_needed(hooks_path, perms=HOOKS_DIR_MODE)
71 set_permissions_if_needed(hooks_path, perms=HOOKS_DIR_MODE)
72
72
73 tmpl_post = pkg_resources.resource_string(
73 tmpl_post = pkg_resources.resource_string(
74 'vcsserver', '/'.join(
74 'vcsserver', '/'.join(
75 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
75 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
76 tmpl_pre = pkg_resources.resource_string(
76 tmpl_pre = pkg_resources.resource_string(
77 'vcsserver', '/'.join(
77 'vcsserver', '/'.join(
78 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
78 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
79
79
80 path = '' # not used for now
80 path = '' # not used for now
81 timestamp = datetime.datetime.utcnow().isoformat()
81 timestamp = datetime.datetime.utcnow().isoformat()
82
82
83 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
83 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
84 log.debug('Installing git hook in repo %s', repo_path)
84 log.debug('Installing git hook in repo %s', repo_path)
85 _hook_file = os.path.join(hooks_path, f'{h_type}-receive')
85 _hook_file = os.path.join(hooks_path, f'{h_type}-receive')
86 _rhodecode_hook = check_rhodecode_hook(_hook_file)
86 _rhodecode_hook = check_rhodecode_hook(_hook_file)
87
87
88 if _rhodecode_hook or force_create:
88 if _rhodecode_hook or force_create:
89 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
89 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
90 env_expand = str([
90 env_expand = str([
91 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
91 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
92 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
92 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
93 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
93 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
94 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
94 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
95 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
95 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
96 ])
96 ])
97 try:
97 try:
98 with open(_hook_file, 'wb') as f:
98 with open(_hook_file, 'wb') as f:
99 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
99 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
100 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
100 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
101 template = template.replace(b'_DATE_', safe_bytes(timestamp))
101 template = template.replace(b'_DATE_', safe_bytes(timestamp))
102 template = template.replace(b'_ENV_', safe_bytes(executable))
102 template = template.replace(b'_ENV_', safe_bytes(executable))
103 template = template.replace(b'_PATH_', safe_bytes(path))
103 template = template.replace(b'_PATH_', safe_bytes(path))
104 f.write(template)
104 f.write(template)
105 set_permissions_if_needed(_hook_file, perms=HOOKS_FILE_MODE)
105 set_permissions_if_needed(_hook_file, perms=HOOKS_FILE_MODE)
106 except OSError:
106 except OSError:
107 log.exception('error writing hook file %s', _hook_file)
107 log.exception('error writing hook file %s', _hook_file)
108 else:
108 else:
109 log.debug('skipping writing hook file')
109 log.debug('skipping writing hook file')
110
110
111 return True
111 return True
112
112
113
113
114 def get_svn_hooks_path(repo_path):
114 def get_svn_hooks_path(repo_path):
115 hooks_path = os.path.join(repo_path, 'hooks')
115 hooks_path = os.path.join(repo_path, 'hooks')
116
116
117 return hooks_path
117 return hooks_path
118
118
119
119
120 def install_svn_hooks(repo_path, executable=None, force_create=False):
120 def install_svn_hooks(repo_path, executable=None, force_create=False):
121 """
121 """
122 Creates RhodeCode hooks inside a svn repository
122 Creates RhodeCode hooks inside a svn repository
123
123
124 :param repo_path: path to repository
124 :param repo_path: path to repository
125 :param executable: binary executable to put in the hooks
125 :param executable: binary executable to put in the hooks
126 :param force_create: Create even if same name hook exists
126 :param force_create: Create even if same name hook exists
127 """
127 """
128 executable = executable or sys.executable
128 executable = executable or sys.executable
129 hooks_path = get_svn_hooks_path(repo_path)
129 hooks_path = get_svn_hooks_path(repo_path)
130 if not os.path.isdir(hooks_path):
130 if not os.path.isdir(hooks_path):
131 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
131 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
132
132
133 tmpl_post = pkg_resources.resource_string(
133 tmpl_post = pkg_resources.resource_string(
134 'vcsserver', '/'.join(
134 'vcsserver', '/'.join(
135 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
135 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
136 tmpl_pre = pkg_resources.resource_string(
136 tmpl_pre = pkg_resources.resource_string(
137 'vcsserver', '/'.join(
137 'vcsserver', '/'.join(
138 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
138 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
139
139
140 path = '' # not used for now
140 path = '' # not used for now
141 timestamp = datetime.datetime.utcnow().isoformat()
141 timestamp = datetime.datetime.utcnow().isoformat()
142
142
143 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
143 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
144 log.debug('Installing svn hook in repo %s', repo_path)
144 log.debug('Installing svn hook in repo %s', repo_path)
145 _hook_file = os.path.join(hooks_path, f'{h_type}-commit')
145 _hook_file = os.path.join(hooks_path, f'{h_type}-commit')
146 _rhodecode_hook = check_rhodecode_hook(_hook_file)
146 _rhodecode_hook = check_rhodecode_hook(_hook_file)
147
147
148 if _rhodecode_hook or force_create:
148 if _rhodecode_hook or force_create:
149 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
149 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
150
150
151 env_expand = str([
151 env_expand = str([
152 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
152 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
153 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
153 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
154 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
154 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
155 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
155 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
156 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
156 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
157 ])
157 ])
158 try:
158 try:
159 with open(_hook_file, 'wb') as f:
159 with open(_hook_file, 'wb') as f:
160 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
160 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
161 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
161 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
162 template = template.replace(b'_DATE_', safe_bytes(timestamp))
162 template = template.replace(b'_DATE_', safe_bytes(timestamp))
163 template = template.replace(b'_ENV_', safe_bytes(executable))
163 template = template.replace(b'_ENV_', safe_bytes(executable))
164 template = template.replace(b'_PATH_', safe_bytes(path))
164 template = template.replace(b'_PATH_', safe_bytes(path))
165
165
166 f.write(template)
166 f.write(template)
167 os.chmod(_hook_file, 0o755)
167 os.chmod(_hook_file, 0o755)
168 except OSError:
168 except OSError:
169 log.exception('error writing hook file %s', _hook_file)
169 log.exception('error writing hook file %s', _hook_file)
170 else:
170 else:
171 log.debug('skipping writing hook file')
171 log.debug('skipping writing hook file')
172
172
173 return True
173 return True
174
174
175
175
176 def get_version_from_hook(hook_path):
176 def get_version_from_hook(hook_path):
177 version = b''
177 version = b''
178 hook_content = read_hook_content(hook_path)
178 hook_content = read_hook_content(hook_path)
179 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
179 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
180 if matches:
180 if matches:
181 try:
181 try:
182 version = matches.groups()[0]
182 version = matches.groups()[0]
183 log.debug('got version %s from hooks.', version)
183 log.debug('got version %s from hooks.', version)
184 except Exception:
184 except Exception:
185 log.exception("Exception while reading the hook version.")
185 log.exception("Exception while reading the hook version.")
186 return version.replace(b"'", b"")
186 return version.replace(b"'", b"")
187
187
188
188
189 def check_rhodecode_hook(hook_path):
189 def check_rhodecode_hook(hook_path):
190 """
190 """
191 Check if the hook was created by RhodeCode
191 Check if the hook was created by RhodeCode
192 """
192 """
193 if not os.path.exists(hook_path):
193 if not os.path.exists(hook_path):
194 return True
194 return True
195
195
196 log.debug('hook exists, checking if it is from RhodeCode')
196 log.debug('hook exists, checking if it is from RhodeCode')
197
197
198 version = get_version_from_hook(hook_path)
198 version = get_version_from_hook(hook_path)
199 if version:
199 if version:
200 return True
200 return True
201
201
202 return False
202 return False
203
203
204
204
205 def read_hook_content(hook_path) -> bytes:
205 def read_hook_content(hook_path) -> bytes:
206 content = b''
206 content = b''
207 if os.path.isfile(hook_path):
207 if os.path.isfile(hook_path):
208 with open(hook_path, 'rb') as f:
208 with open(hook_path, 'rb') as f:
209 content = f.read()
209 content = f.read()
210 return content
210 return content
211
211
212
212
213 def get_git_pre_hook_version(repo_path, bare):
213 def get_git_pre_hook_version(repo_path, bare):
214 hooks_path = get_git_hooks_path(repo_path, bare)
214 hooks_path = get_git_hooks_path(repo_path, bare)
215 _hook_file = os.path.join(hooks_path, 'pre-receive')
215 _hook_file = os.path.join(hooks_path, 'pre-receive')
216 version = get_version_from_hook(_hook_file)
216 version = get_version_from_hook(_hook_file)
217 return version
217 return version
218
218
219
219
220 def get_git_post_hook_version(repo_path, bare):
220 def get_git_post_hook_version(repo_path, bare):
221 hooks_path = get_git_hooks_path(repo_path, bare)
221 hooks_path = get_git_hooks_path(repo_path, bare)
222 _hook_file = os.path.join(hooks_path, 'post-receive')
222 _hook_file = os.path.join(hooks_path, 'post-receive')
223 version = get_version_from_hook(_hook_file)
223 version = get_version_from_hook(_hook_file)
224 return version
224 return version
225
225
226
226
227 def get_svn_pre_hook_version(repo_path):
227 def get_svn_pre_hook_version(repo_path):
228 hooks_path = get_svn_hooks_path(repo_path)
228 hooks_path = get_svn_hooks_path(repo_path)
229 _hook_file = os.path.join(hooks_path, 'pre-commit')
229 _hook_file = os.path.join(hooks_path, 'pre-commit')
230 version = get_version_from_hook(_hook_file)
230 version = get_version_from_hook(_hook_file)
231 return version
231 return version
232
232
233
233
234 def get_svn_post_hook_version(repo_path):
234 def get_svn_post_hook_version(repo_path):
235 hooks_path = get_svn_hooks_path(repo_path)
235 hooks_path = get_svn_hooks_path(repo_path)
236 _hook_file = os.path.join(hooks_path, 'post-commit')
236 _hook_file = os.path.join(hooks_path, 'post-commit')
237 version = get_version_from_hook(_hook_file)
237 version = get_version_from_hook(_hook_file)
238 return version
238 return version
@@ -1,765 +1,765 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import platform
20 import platform
21 import sys
21 import sys
22 import locale
22 import locale
23 import logging
23 import logging
24 import uuid
24 import uuid
25 import time
25 import time
26 import wsgiref.util
26 import wsgiref.util
27 import tempfile
27 import tempfile
28 import psutil
28 import psutil
29
29
30 from itertools import chain
30 from itertools import chain
31
31
32 import msgpack
32 import msgpack
33 import configparser
33 import configparser
34
34
35 from pyramid.config import Configurator
35 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
36 from pyramid.wsgi import wsgiapp
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40
40
41 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.config.settings_maker import SettingsMaker
42
42
43 from vcsserver.tweens.request_wrapper import get_headers_call_context
43 from vcsserver.tweens.request_wrapper import get_headers_call_context
44
44
45 from vcsserver import remote_wsgi, scm_app, hgpatches
45 from vcsserver import remote_wsgi, scm_app, hgpatches
46 from vcsserver.server import VcsServer
46 from vcsserver.server import VcsServer
47 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
47 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
48 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
48 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
49 from vcsserver.echo_stub.echo_app import EchoApp
49 from vcsserver.echo_stub.echo_app import EchoApp
50 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected, HTTPClientNotSupported
50 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected, HTTPClientNotSupported
51 from vcsserver.lib.exc_tracking import store_exception, format_exc
51 from vcsserver.lib.exc_tracking import store_exception, format_exc
52 from vcsserver.lib.str_utils import safe_int
52 from vcsserver.lib.str_utils import safe_int
53 from vcsserver.lib.statsd_client import StatsdClient
53 from vcsserver.lib.statsd_client import StatsdClient
54 from vcsserver.lib.ext_json import json
54 from vcsserver.lib.ext_json import json
55 from vcsserver.lib.config_utils import configure_and_store_settings
55 from vcsserver.lib.config_utils import configure_and_store_settings
56
56
57
57
58 strict_vcs = True
58 strict_vcs = True
59
59
60 git_import_err = None
60 git_import_err = None
61 try:
61 try:
62 from vcsserver.remote.git_remote import GitFactory, GitRemote
62 from vcsserver.remote.git_remote import GitFactory, GitRemote
63 except ImportError as e:
63 except ImportError as e:
64 GitFactory = None
64 GitFactory = None
65 GitRemote = None
65 GitRemote = None
66 git_import_err = e
66 git_import_err = e
67 if strict_vcs:
67 if strict_vcs:
68 raise
68 raise
69
69
70
70
71 hg_import_err = None
71 hg_import_err = None
72 try:
72 try:
73 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
73 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
74 except ImportError as e:
74 except ImportError as e:
75 MercurialFactory = None
75 MercurialFactory = None
76 HgRemote = None
76 HgRemote = None
77 hg_import_err = e
77 hg_import_err = e
78 if strict_vcs:
78 if strict_vcs:
79 raise
79 raise
80
80
81
81
82 svn_import_err = None
82 svn_import_err = None
83 try:
83 try:
84 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
84 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
85 except ImportError as e:
85 except ImportError as e:
86 SubversionFactory = None
86 SubversionFactory = None
87 SvnRemote = None
87 SvnRemote = None
88 svn_import_err = e
88 svn_import_err = e
89 if strict_vcs:
89 if strict_vcs:
90 raise
90 raise
91
91
92 log = logging.getLogger(__name__)
92 log = logging.getLogger(__name__)
93
93
94 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
94 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
95 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
95 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
96
96
97 try:
97 try:
98 locale.setlocale(locale.LC_ALL, '')
98 locale.setlocale(locale.LC_ALL, '')
99 except locale.Error as e:
99 except locale.Error as e:
100 log.error('LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
100 log.error('LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
101 os.environ['LC_ALL'] = 'C'
101 os.environ['LC_ALL'] = 'C'
102
102
103
103
104 def _is_request_chunked(environ):
104 def _is_request_chunked(environ):
105 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
105 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
106 return stream
106 return stream
107
107
108
108
109 def log_max_fd():
109 def log_max_fd():
110 try:
110 try:
111 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
111 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
112 log.info('Max file descriptors value: %s', maxfd)
112 log.info('Max file descriptors value: %s', maxfd)
113 except Exception:
113 except Exception:
114 pass
114 pass
115
115
116
116
117 class VCS:
117 class VCS:
118 def __init__(self, locale_conf=None, cache_config=None):
118 def __init__(self, locale_conf=None, cache_config=None):
119 self.locale = locale_conf
119 self.locale = locale_conf
120 self.cache_config = cache_config
120 self.cache_config = cache_config
121 self._configure_locale()
121 self._configure_locale()
122
122
123 log_max_fd()
123 log_max_fd()
124
124
125 if GitFactory and GitRemote:
125 if GitFactory and GitRemote:
126 git_factory = GitFactory()
126 git_factory = GitFactory()
127 self._git_remote = GitRemote(git_factory)
127 self._git_remote = GitRemote(git_factory)
128 else:
128 else:
129 log.error("Git client import failed: %s", git_import_err)
129 log.error("Git client import failed: %s", git_import_err)
130
130
131 if MercurialFactory and HgRemote:
131 if MercurialFactory and HgRemote:
132 hg_factory = MercurialFactory()
132 hg_factory = MercurialFactory()
133 self._hg_remote = HgRemote(hg_factory)
133 self._hg_remote = HgRemote(hg_factory)
134 else:
134 else:
135 log.error("Mercurial client import failed: %s", hg_import_err)
135 log.error("Mercurial client import failed: %s", hg_import_err)
136
136
137 if SubversionFactory and SvnRemote:
137 if SubversionFactory and SvnRemote:
138 svn_factory = SubversionFactory()
138 svn_factory = SubversionFactory()
139
139
140 # hg factory is used for svn url validation
140 # hg factory is used for svn url validation
141 hg_factory = MercurialFactory()
141 hg_factory = MercurialFactory()
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
143 else:
143 else:
144 log.error("Subversion client import failed: %s", svn_import_err)
144 log.error("Subversion client import failed: %s", svn_import_err)
145
145
146 self._vcsserver = VcsServer()
146 self._vcsserver = VcsServer()
147
147
148 def _configure_locale(self):
148 def _configure_locale(self):
149 if self.locale:
149 if self.locale:
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
151 else:
151 else:
152 log.info('Configuring locale subsystem based on environment variables')
152 log.info('Configuring locale subsystem based on environment variables')
153 try:
153 try:
154 # If self.locale is the empty string, then the locale
154 # If self.locale is the empty string, then the locale
155 # module will use the environment variables. See the
155 # module will use the environment variables. See the
156 # documentation of the package `locale`.
156 # documentation of the package `locale`.
157 locale.setlocale(locale.LC_ALL, self.locale)
157 locale.setlocale(locale.LC_ALL, self.locale)
158
158
159 language_code, encoding = locale.getlocale()
159 language_code, encoding = locale.getlocale()
160 log.info(
160 log.info(
161 'Locale set to language code "%s" with encoding "%s".',
161 'Locale set to language code "%s" with encoding "%s".',
162 language_code, encoding)
162 language_code, encoding)
163 except locale.Error:
163 except locale.Error:
164 log.exception('Cannot set locale, not configuring the locale system')
164 log.exception('Cannot set locale, not configuring the locale system')
165
165
166
166
167 class WsgiProxy:
167 class WsgiProxy:
168 def __init__(self, wsgi):
168 def __init__(self, wsgi):
169 self.wsgi = wsgi
169 self.wsgi = wsgi
170
170
171 def __call__(self, environ, start_response):
171 def __call__(self, environ, start_response):
172 input_data = environ['wsgi.input'].read()
172 input_data = environ['wsgi.input'].read()
173 input_data = msgpack.unpackb(input_data)
173 input_data = msgpack.unpackb(input_data)
174
174
175 error = None
175 error = None
176 try:
176 try:
177 data, status, headers = self.wsgi.handle(
177 data, status, headers = self.wsgi.handle(
178 input_data['environment'], input_data['input_data'],
178 input_data['environment'], input_data['input_data'],
179 *input_data['args'], **input_data['kwargs'])
179 *input_data['args'], **input_data['kwargs'])
180 except Exception as e:
180 except Exception as e:
181 data, status, headers = [], None, None
181 data, status, headers = [], None, None
182 error = {
182 error = {
183 'message': str(e),
183 'message': str(e),
184 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 '_vcs_kind': getattr(e, '_vcs_kind', None)
185 }
185 }
186
186
187 start_response(200, {})
187 start_response(200, {})
188 return self._iterator(error, status, headers, data)
188 return self._iterator(error, status, headers, data)
189
189
190 def _iterator(self, error, status, headers, data):
190 def _iterator(self, error, status, headers, data):
191 initial_data = [
191 initial_data = [
192 error,
192 error,
193 status,
193 status,
194 headers,
194 headers,
195 ]
195 ]
196
196
197 for d in chain(initial_data, data):
197 for d in chain(initial_data, data):
198 yield msgpack.packb(d)
198 yield msgpack.packb(d)
199
199
200
200
201 def not_found(request):
201 def not_found(request):
202 return {'status': '404 NOT FOUND'}
202 return {'status': '404 NOT FOUND'}
203
203
204
204
205 class VCSViewPredicate:
205 class VCSViewPredicate:
206 def __init__(self, val, config):
206 def __init__(self, val, config):
207 self.remotes = val
207 self.remotes = val
208
208
209 def text(self):
209 def text(self):
210 return f'vcs view method = {list(self.remotes.keys())}'
210 return f'vcs view method = {list(self.remotes.keys())}'
211
211
212 phash = text
212 phash = text
213
213
214 def __call__(self, context, request):
214 def __call__(self, context, request):
215 """
215 """
216 View predicate that returns true if given backend is supported by
216 View predicate that returns true if given backend is supported by
217 defined remotes.
217 defined remotes.
218 """
218 """
219 backend = request.matchdict.get('backend')
219 backend = request.matchdict.get('backend')
220 return backend in self.remotes
220 return backend in self.remotes
221
221
222
222
223 class HTTPApplication:
223 class HTTPApplication:
224 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
225
225
226 remote_wsgi = remote_wsgi
226 remote_wsgi = remote_wsgi
227 _use_echo_app = False
227 _use_echo_app = False
228
228
229 def __init__(self, settings=None, global_config=None):
229 def __init__(self, settings=None, global_config=None):
230
230
231 self.config = Configurator(settings=settings)
231 self.config = Configurator(settings=settings)
232 # Init our statsd at very start
232 # Init our statsd at very start
233 self.config.registry.statsd = StatsdClient.statsd
233 self.config.registry.statsd = StatsdClient.statsd
234 self.config.registry.vcs_call_context = {}
234 self.config.registry.vcs_call_context = {}
235
235
236 self.global_config = global_config
236 self.global_config = global_config
237 self.config.include('vcsserver.lib.rc_cache')
237 self.config.include('vcsserver.lib.rc_cache')
238 self.config.include('vcsserver.lib.archive_cache')
238 self.config.include('vcsserver.lib.archive_cache')
239
239
240 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
240 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
241 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
241 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
242 self._remotes = {
242 self._remotes = {
243 'hg': vcs._hg_remote,
243 'hg': vcs._hg_remote,
244 'git': vcs._git_remote,
244 'git': vcs._git_remote,
245 'svn': vcs._svn_remote,
245 'svn': vcs._svn_remote,
246 'server': vcs._vcsserver,
246 'server': vcs._vcsserver,
247 }
247 }
248 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
248 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
249 self._use_echo_app = True
249 self._use_echo_app = True
250 log.warning("Using EchoApp for VCS operations.")
250 log.warning("Using EchoApp for VCS operations.")
251 self.remote_wsgi = remote_wsgi_stub
251 self.remote_wsgi = remote_wsgi_stub
252
252
253 configure_and_store_settings(global_config, settings)
253 configure_and_store_settings(global_config, settings)
254
254
255 self._configure()
255 self._configure()
256
256
257 def _configure(self):
257 def _configure(self):
258 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
258 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
259
259
260 self.config.add_route('service', '/_service')
260 self.config.add_route('service', '/_service')
261 self.config.add_route('status', '/status')
261 self.config.add_route('status', '/status')
262 self.config.add_route('hg_proxy', '/proxy/hg')
262 self.config.add_route('hg_proxy', '/proxy/hg')
263 self.config.add_route('git_proxy', '/proxy/git')
263 self.config.add_route('git_proxy', '/proxy/git')
264
264
265 # rpc methods
265 # rpc methods
266 self.config.add_route('vcs', '/{backend}')
266 self.config.add_route('vcs', '/{backend}')
267
267
268 # streaming rpc remote methods
268 # streaming rpc remote methods
269 self.config.add_route('vcs_stream', '/{backend}/stream')
269 self.config.add_route('vcs_stream', '/{backend}/stream')
270
270
271 # vcs operations clone/push as streaming
271 # vcs operations clone/push as streaming
272 self.config.add_route('stream_git', '/stream/git/*repo_name')
272 self.config.add_route('stream_git', '/stream/git/*repo_name')
273 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
273 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
274
274
275 self.config.add_view(self.status_view, route_name='status', renderer='json')
275 self.config.add_view(self.status_view, route_name='status', renderer='json')
276 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
276 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
277
277
278 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
278 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
279 self.config.add_view(self.git_proxy(), route_name='git_proxy')
279 self.config.add_view(self.git_proxy(), route_name='git_proxy')
280 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
280 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
281 vcs_view=self._remotes)
281 vcs_view=self._remotes)
282 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
282 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
283 vcs_view=self._remotes)
283 vcs_view=self._remotes)
284
284
285 self.config.add_view(self.hg_stream(), route_name='stream_hg')
285 self.config.add_view(self.hg_stream(), route_name='stream_hg')
286 self.config.add_view(self.git_stream(), route_name='stream_git')
286 self.config.add_view(self.git_stream(), route_name='stream_git')
287
287
288 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
288 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
289
289
290 self.config.add_notfound_view(not_found, renderer='json')
290 self.config.add_notfound_view(not_found, renderer='json')
291
291
292 self.config.add_view(self.handle_vcs_exception, context=Exception)
292 self.config.add_view(self.handle_vcs_exception, context=Exception)
293
293
294 self.config.add_tween(
294 self.config.add_tween(
295 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
295 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
296 )
296 )
297 self.config.add_request_method(
297 self.config.add_request_method(
298 'vcsserver.lib.request_counter.get_request_counter',
298 'vcsserver.lib.request_counter.get_request_counter',
299 'request_count')
299 'request_count')
300
300
301 def wsgi_app(self):
301 def wsgi_app(self):
302 return self.config.make_wsgi_app()
302 return self.config.make_wsgi_app()
303
303
304 def _vcs_view_params(self, request):
304 def _vcs_view_params(self, request):
305 remote = self._remotes[request.matchdict['backend']]
305 remote = self._remotes[request.matchdict['backend']]
306 payload = msgpack.unpackb(request.body, use_list=True)
306 payload = msgpack.unpackb(request.body, use_list=True)
307
307
308 method = payload.get('method')
308 method = payload.get('method')
309 params = payload['params']
309 params = payload['params']
310 wire = params.get('wire')
310 wire = params.get('wire')
311 args = params.get('args')
311 args = params.get('args')
312 kwargs = params.get('kwargs')
312 kwargs = params.get('kwargs')
313 context_uid = None
313 context_uid = None
314
314
315 request.registry.vcs_call_context = {
315 request.registry.vcs_call_context = {
316 'method': method,
316 'method': method,
317 'repo_name': payload.get('_repo_name'),
317 'repo_name': payload.get('_repo_name'),
318 }
318 }
319
319
320 if wire:
320 if wire:
321 try:
321 try:
322 wire['context'] = context_uid = uuid.UUID(wire['context'])
322 wire['context'] = context_uid = uuid.UUID(wire['context'])
323 except KeyError:
323 except KeyError:
324 pass
324 pass
325 args.insert(0, wire)
325 args.insert(0, wire)
326 repo_state_uid = wire.get('repo_state_uid') if wire else None
326 repo_state_uid = wire.get('repo_state_uid') if wire else None
327
327
328 # NOTE(marcink): trading complexity for slight performance
328 # NOTE(marcink): trading complexity for slight performance
329 if log.isEnabledFor(logging.DEBUG):
329 if log.isEnabledFor(logging.DEBUG):
330 # also we SKIP printing out any of those methods args since they maybe excessive
330 # also we SKIP printing out any of those methods args since they maybe excessive
331 just_args_methods = {
331 just_args_methods = {
332 'commitctx': ('content', 'removed', 'updated'),
332 'commitctx': ('content', 'removed', 'updated'),
333 'commit': ('content', 'removed', 'updated')
333 'commit': ('content', 'removed', 'updated')
334 }
334 }
335 if method in just_args_methods:
335 if method in just_args_methods:
336 skip_args = just_args_methods[method]
336 skip_args = just_args_methods[method]
337 call_args = ''
337 call_args = ''
338 call_kwargs = {}
338 call_kwargs = {}
339 for k in kwargs:
339 for k in kwargs:
340 if k in skip_args:
340 if k in skip_args:
341 # replace our skip key with dummy
341 # replace our skip key with dummy
342 call_kwargs[k] = f'RemovedParam({k})'
342 call_kwargs[k] = f'RemovedParam({k})'
343 else:
343 else:
344 call_kwargs[k] = kwargs[k]
344 call_kwargs[k] = kwargs[k]
345 else:
345 else:
346 call_args = args[1:]
346 call_args = args[1:]
347 call_kwargs = kwargs
347 call_kwargs = kwargs
348
348
349 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
349 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
350 method, call_args, call_kwargs, context_uid, repo_state_uid)
350 method, call_args, call_kwargs, context_uid, repo_state_uid)
351
351
352 statsd = request.registry.statsd
352 statsd = request.registry.statsd
353 if statsd:
353 if statsd:
354 statsd.incr(
354 statsd.incr(
355 'vcsserver_method_total', tags=[
355 'vcsserver_method_total', tags=[
356 f"method:{method}",
356 f"method:{method}",
357 ])
357 ])
358 return payload, remote, method, args, kwargs
358 return payload, remote, method, args, kwargs
359
359
360 def vcs_view(self, request):
360 def vcs_view(self, request):
361
361
362 payload, remote, method, args, kwargs = self._vcs_view_params(request)
362 payload, remote, method, args, kwargs = self._vcs_view_params(request)
363 payload_id = payload.get('id')
363 payload_id = payload.get('id')
364
364
365 try:
365 try:
366 resp = getattr(remote, method)(*args, **kwargs)
366 resp = getattr(remote, method)(*args, **kwargs)
367 except Exception as e:
367 except Exception as e:
368 exc_info = list(sys.exc_info())
368 exc_info = list(sys.exc_info())
369 exc_type, exc_value, exc_traceback = exc_info
369 exc_type, exc_value, exc_traceback = exc_info
370
370
371 org_exc = getattr(e, '_org_exc', None)
371 org_exc = getattr(e, '_org_exc', None)
372 org_exc_name = None
372 org_exc_name = None
373 org_exc_tb = ''
373 org_exc_tb = ''
374 if org_exc:
374 if org_exc:
375 org_exc_name = org_exc.__class__.__name__
375 org_exc_name = org_exc.__class__.__name__
376 org_exc_tb = getattr(e, '_org_exc_tb', '')
376 org_exc_tb = getattr(e, '_org_exc_tb', '')
377 # replace our "faked" exception with our org
377 # replace our "faked" exception with our org
378 exc_info[0] = org_exc.__class__
378 exc_info[0] = org_exc.__class__
379 exc_info[1] = org_exc
379 exc_info[1] = org_exc
380
380
381 should_store_exc = True
381 should_store_exc = True
382 if org_exc:
382 if org_exc:
383 def get_exc_fqn(_exc_obj):
383 def get_exc_fqn(_exc_obj):
384 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
384 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
385 return module_name + '.' + org_exc_name
385 return module_name + '.' + org_exc_name
386
386
387 exc_fqn = get_exc_fqn(org_exc)
387 exc_fqn = get_exc_fqn(org_exc)
388
388
389 if exc_fqn in ['mercurial.error.RepoLookupError',
389 if exc_fqn in ['mercurial.error.RepoLookupError',
390 'vcsserver.exceptions.RefNotFoundException']:
390 'vcsserver.exceptions.RefNotFoundException']:
391 should_store_exc = False
391 should_store_exc = False
392
392
393 if should_store_exc:
393 if should_store_exc:
394 store_exception(id(exc_info), exc_info, request_path=request.path)
394 store_exception(id(exc_info), exc_info, request_path=request.path)
395
395
396 tb_info = format_exc(exc_info)
396 tb_info = format_exc(exc_info)
397
397
398 type_ = e.__class__.__name__
398 type_ = e.__class__.__name__
399 if type_ not in self.ALLOWED_EXCEPTIONS:
399 if type_ not in self.ALLOWED_EXCEPTIONS:
400 type_ = None
400 type_ = None
401
401
402 resp = {
402 resp = {
403 'id': payload_id,
403 'id': payload_id,
404 'error': {
404 'error': {
405 'message': str(e),
405 'message': str(e),
406 'traceback': tb_info,
406 'traceback': tb_info,
407 'org_exc': org_exc_name,
407 'org_exc': org_exc_name,
408 'org_exc_tb': org_exc_tb,
408 'org_exc_tb': org_exc_tb,
409 'type': type_
409 'type': type_
410 }
410 }
411 }
411 }
412
412
413 try:
413 try:
414 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
414 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
415 except AttributeError:
415 except AttributeError:
416 pass
416 pass
417 else:
417 else:
418 resp = {
418 resp = {
419 'id': payload_id,
419 'id': payload_id,
420 'result': resp
420 'result': resp
421 }
421 }
422 log.debug('Serving data for method %s', method)
422 log.debug('Serving data for method %s', method)
423 return resp
423 return resp
424
424
425 def vcs_stream_view(self, request):
425 def vcs_stream_view(self, request):
426 payload, remote, method, args, kwargs = self._vcs_view_params(request)
426 payload, remote, method, args, kwargs = self._vcs_view_params(request)
427 # this method has a stream: marker we remove it here
427 # this method has a stream: marker we remove it here
428 method = method.split('stream:')[-1]
428 method = method.split('stream:')[-1]
429 chunk_size = safe_int(payload.get('chunk_size')) or 4096
429 chunk_size = safe_int(payload.get('chunk_size')) or 4096
430
430
431 resp = getattr(remote, method)(*args, **kwargs)
431 resp = getattr(remote, method)(*args, **kwargs)
432
432
433 def get_chunked_data(method_resp):
433 def get_chunked_data(method_resp):
434 stream = io.BytesIO(method_resp)
434 stream = io.BytesIO(method_resp)
435 while 1:
435 while 1:
436 chunk = stream.read(chunk_size)
436 chunk = stream.read(chunk_size)
437 if not chunk:
437 if not chunk:
438 break
438 break
439 yield chunk
439 yield chunk
440
440
441 response = Response(app_iter=get_chunked_data(resp))
441 response = Response(app_iter=get_chunked_data(resp))
442 response.content_type = 'application/octet-stream'
442 response.content_type = 'application/octet-stream'
443
443
444 return response
444 return response
445
445
446 def status_view(self, request):
446 def status_view(self, request):
447 import vcsserver
447 import vcsserver
448 _platform_id = platform.uname()[1] or 'instance'
448 _platform_id = platform.uname()[1] or 'instance'
449
449
450 return {
450 return {
451 "status": "OK",
451 "status": "OK",
452 "vcsserver_version": vcsserver.get_version(),
452 "vcsserver_version": vcsserver.get_version(),
453 "platform": _platform_id,
453 "platform": _platform_id,
454 "pid": os.getpid(),
454 "pid": os.getpid(),
455 }
455 }
456
456
457 def service_view(self, request):
457 def service_view(self, request):
458 import vcsserver
458 import vcsserver
459
459
460 payload = msgpack.unpackb(request.body, use_list=True)
460 payload = msgpack.unpackb(request.body, use_list=True)
461 server_config, app_config = {}, {}
461 server_config, app_config = {}, {}
462
462
463 try:
463 try:
464 path = self.global_config['__file__']
464 path = self.global_config['__file__']
465 config = configparser.RawConfigParser()
465 config = configparser.RawConfigParser()
466
466
467 config.read(path)
467 config.read(path)
468
468
469 if config.has_section('server:main'):
469 if config.has_section('server:main'):
470 server_config = dict(config.items('server:main'))
470 server_config = dict(config.items('server:main'))
471 if config.has_section('app:main'):
471 if config.has_section('app:main'):
472 app_config = dict(config.items('app:main'))
472 app_config = dict(config.items('app:main'))
473
473
474 except Exception:
474 except Exception:
475 log.exception('Failed to read .ini file for display')
475 log.exception('Failed to read .ini file for display')
476
476
477 environ = list(os.environ.items())
477 environ = list(os.environ.items())
478
478
479 resp = {
479 resp = {
480 'id': payload.get('id'),
480 'id': payload.get('id'),
481 'result': dict(
481 'result': dict(
482 version=vcsserver.get_version(),
482 version=vcsserver.get_version(),
483 config=server_config,
483 config=server_config,
484 app_config=app_config,
484 app_config=app_config,
485 environ=environ,
485 environ=environ,
486 payload=payload,
486 payload=payload,
487 )
487 )
488 }
488 }
489 return resp
489 return resp
490
490
491 def _msgpack_renderer_factory(self, info):
491 def _msgpack_renderer_factory(self, info):
492
492
493 def _render(value, system):
493 def _render(value, system):
494 bin_type = False
494 bin_type = False
495 res = value.get('result')
495 res = value.get('result')
496 if isinstance(res, BytesEnvelope):
496 if isinstance(res, BytesEnvelope):
497 log.debug('Result is wrapped in BytesEnvelope type')
497 log.debug('Result is wrapped in BytesEnvelope type')
498 bin_type = True
498 bin_type = True
499 elif isinstance(res, BinaryEnvelope):
499 elif isinstance(res, BinaryEnvelope):
500 log.debug('Result is wrapped in BinaryEnvelope type')
500 log.debug('Result is wrapped in BinaryEnvelope type')
501 value['result'] = res.val
501 value['result'] = res.val
502 bin_type = True
502 bin_type = True
503
503
504 request = system.get('request')
504 request = system.get('request')
505 if request is not None:
505 if request is not None:
506 response = request.response
506 response = request.response
507 ct = response.content_type
507 ct = response.content_type
508 if ct == response.default_content_type:
508 if ct == response.default_content_type:
509 response.content_type = 'application/x-msgpack'
509 response.content_type = 'application/x-msgpack'
510 if bin_type:
510 if bin_type:
511 response.content_type = 'application/x-msgpack-bin'
511 response.content_type = 'application/x-msgpack-bin'
512
512
513 return msgpack.packb(value, use_bin_type=bin_type)
513 return msgpack.packb(value, use_bin_type=bin_type)
514 return _render
514 return _render
515
515
516 def set_env_from_config(self, environ, config):
516 def set_env_from_config(self, environ, config):
517 dict_conf = {}
517 dict_conf = {}
518 try:
518 try:
519 for elem in config:
519 for elem in config:
520 if elem[0] == 'rhodecode':
520 if elem[0] == 'rhodecode':
521 dict_conf = json.loads(elem[2])
521 dict_conf = json.loads(elem[2])
522 break
522 break
523 except Exception:
523 except Exception:
524 log.exception('Failed to fetch SCM CONFIG')
524 log.exception('Failed to fetch SCM CONFIG')
525 return
525 return
526
526
527 username = dict_conf.get('username')
527 username = dict_conf.get('username')
528 if username:
528 if username:
529 environ['REMOTE_USER'] = username
529 environ['REMOTE_USER'] = username
530 # mercurial specific, some extension api rely on this
530 # mercurial specific, some extension api rely on this
531 environ['HGUSER'] = username
531 environ['HGUSER'] = username
532
532
533 ip = dict_conf.get('ip')
533 ip = dict_conf.get('ip')
534 if ip:
534 if ip:
535 environ['REMOTE_HOST'] = ip
535 environ['REMOTE_HOST'] = ip
536
536
537 if _is_request_chunked(environ):
537 if _is_request_chunked(environ):
538 # set the compatibility flag for webob
538 # set the compatibility flag for webob
539 environ['wsgi.input_terminated'] = True
539 environ['wsgi.input_terminated'] = True
540
540
541 def hg_proxy(self):
541 def hg_proxy(self):
542 @wsgiapp
542 @wsgiapp
543 def _hg_proxy(environ, start_response):
543 def _hg_proxy(environ, start_response):
544 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
544 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
545 return app(environ, start_response)
545 return app(environ, start_response)
546 return _hg_proxy
546 return _hg_proxy
547
547
548 def git_proxy(self):
548 def git_proxy(self):
549 @wsgiapp
549 @wsgiapp
550 def _git_proxy(environ, start_response):
550 def _git_proxy(environ, start_response):
551 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
551 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
552 return app(environ, start_response)
552 return app(environ, start_response)
553 return _git_proxy
553 return _git_proxy
554
554
555 def hg_stream(self):
555 def hg_stream(self):
556 if self._use_echo_app:
556 if self._use_echo_app:
557 @wsgiapp
557 @wsgiapp
558 def _hg_stream(environ, start_response):
558 def _hg_stream(environ, start_response):
559 app = EchoApp('fake_path', 'fake_name', None)
559 app = EchoApp('fake_path', 'fake_name', None)
560 return app(environ, start_response)
560 return app(environ, start_response)
561 return _hg_stream
561 return _hg_stream
562 else:
562 else:
563 @wsgiapp
563 @wsgiapp
564 def _hg_stream(environ, start_response):
564 def _hg_stream(environ, start_response):
565 log.debug('http-app: handling hg stream')
565 log.debug('http-app: handling hg stream')
566 call_context = get_headers_call_context(environ)
566 call_context = get_headers_call_context(environ)
567
567
568 repo_path = call_context['repo_path']
568 repo_path = call_context['repo_path']
569 repo_name = call_context['repo_name']
569 repo_name = call_context['repo_name']
570 config = call_context['repo_config']
570 config = call_context['repo_config']
571
571
572 app = scm_app.create_hg_wsgi_app(
572 app = scm_app.create_hg_wsgi_app(
573 repo_path, repo_name, config)
573 repo_path, repo_name, config)
574
574
575 # Consistent path information for hgweb
575 # Consistent path information for hgweb
576 environ['PATH_INFO'] = call_context['path_info']
576 environ['PATH_INFO'] = call_context['path_info']
577 environ['REPO_NAME'] = repo_name
577 environ['REPO_NAME'] = repo_name
578 self.set_env_from_config(environ, config)
578 self.set_env_from_config(environ, config)
579
579
580 log.debug('http-app: starting app handler '
580 log.debug('http-app: starting app handler '
581 'with %s and process request', app)
581 'with %s and process request', app)
582 return app(environ, ResponseFilter(start_response))
582 return app(environ, ResponseFilter(start_response))
583 return _hg_stream
583 return _hg_stream
584
584
585 def git_stream(self):
585 def git_stream(self):
586 if self._use_echo_app:
586 if self._use_echo_app:
587 @wsgiapp
587 @wsgiapp
588 def _git_stream(environ, start_response):
588 def _git_stream(environ, start_response):
589 app = EchoApp('fake_path', 'fake_name', None)
589 app = EchoApp('fake_path', 'fake_name', None)
590 return app(environ, start_response)
590 return app(environ, start_response)
591 return _git_stream
591 return _git_stream
592 else:
592 else:
593 @wsgiapp
593 @wsgiapp
594 def _git_stream(environ, start_response):
594 def _git_stream(environ, start_response):
595 log.debug('http-app: handling git stream')
595 log.debug('http-app: handling git stream')
596
596
597 call_context = get_headers_call_context(environ)
597 call_context = get_headers_call_context(environ)
598
598
599 repo_path = call_context['repo_path']
599 repo_path = call_context['repo_path']
600 repo_name = call_context['repo_name']
600 repo_name = call_context['repo_name']
601 config = call_context['repo_config']
601 config = call_context['repo_config']
602
602
603 environ['PATH_INFO'] = call_context['path_info']
603 environ['PATH_INFO'] = call_context['path_info']
604 self.set_env_from_config(environ, config)
604 self.set_env_from_config(environ, config)
605
605
606 content_type = environ.get('CONTENT_TYPE', '')
606 content_type = environ.get('CONTENT_TYPE', '')
607
607
608 path = environ['PATH_INFO']
608 path = environ['PATH_INFO']
609 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
609 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
610 log.debug(
610 log.debug(
611 'LFS: Detecting if request `%s` is LFS server path based '
611 'LFS: Detecting if request `%s` is LFS server path based '
612 'on content type:`%s`, is_lfs:%s',
612 'on content type:`%s`, is_lfs:%s',
613 path, content_type, is_lfs_request)
613 path, content_type, is_lfs_request)
614
614
615 if not is_lfs_request:
615 if not is_lfs_request:
616 # fallback detection by path
616 # fallback detection by path
617 if GIT_LFS_PROTO_PAT.match(path):
617 if GIT_LFS_PROTO_PAT.match(path):
618 is_lfs_request = True
618 is_lfs_request = True
619 log.debug(
619 log.debug(
620 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
620 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
621 path, is_lfs_request)
621 path, is_lfs_request)
622
622
623 if is_lfs_request:
623 if is_lfs_request:
624 app = scm_app.create_git_lfs_wsgi_app(
624 app = scm_app.create_git_lfs_wsgi_app(
625 repo_path, repo_name, config)
625 repo_path, repo_name, config)
626 else:
626 else:
627 app = scm_app.create_git_wsgi_app(
627 app = scm_app.create_git_wsgi_app(
628 repo_path, repo_name, config)
628 repo_path, repo_name, config)
629
629
630 log.debug('http-app: starting app handler '
630 log.debug('http-app: starting app handler '
631 'with %s and process request', app)
631 'with %s and process request', app)
632
632
633 return app(environ, start_response)
633 return app(environ, start_response)
634
634
635 return _git_stream
635 return _git_stream
636
636
637 def handle_vcs_exception(self, exception, request):
637 def handle_vcs_exception(self, exception, request):
638
638
639 match _vcs_kind := getattr(exception, '_vcs_kind', ''):
639 match _vcs_kind := getattr(exception, '_vcs_kind', ''):
640 case 'repo_locked':
640 case 'repo_locked':
641 headers_call_context = get_headers_call_context(request.environ)
641 headers_call_context = get_headers_call_context(request.environ)
642 status_code = safe_int(headers_call_context['locked_status_code'])
642 status_code = safe_int(headers_call_context['locked_status_code'])
643
643
644 return HTTPRepoLocked(
644 return HTTPRepoLocked(
645 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
645 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
646 case 'repo_branch_protected':
646 case 'repo_branch_protected':
647 # Get custom repo-branch-protected status code if present.
647 # Get custom repo-branch-protected status code if present.
648 return HTTPRepoBranchProtected(
648 return HTTPRepoBranchProtected(
649 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
649 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
650 case 'client_not_supported':
650 case 'client_not_supported':
651 return HTTPClientNotSupported(
651 return HTTPClientNotSupported(
652 title=str(exception), headers=[('X-Rc-Client-Not-Supported', '1')])
652 title=str(exception), headers=[('X-Rc-Client-Not-Supported', '1')])
653
653
654 exc_info = request.exc_info
654 exc_info = request.exc_info
655 store_exception(id(exc_info), exc_info)
655 store_exception(id(exc_info), exc_info)
656
656
657 traceback_info = 'unavailable'
657 traceback_info = 'unavailable'
658 if request.exc_info:
658 if request.exc_info:
659 traceback_info = format_exc(request.exc_info)
659 traceback_info = format_exc(request.exc_info)
660
660
661 log.error(
661 log.error(
662 'error occurred handling this request for path: %s, \n%s',
662 'error occurred handling this request for path: %s, \n%s',
663 request.path, traceback_info)
663 request.path, traceback_info)
664
664
665 statsd = request.registry.statsd
665 statsd = request.registry.statsd
666 if statsd:
666 if statsd:
667 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
667 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
668 statsd.incr('vcsserver_exception_total',
668 statsd.incr('vcsserver_exception_total',
669 tags=[f"type:{exc_type}"])
669 tags=[f"type:{exc_type}"])
670 raise exception
670 raise exception
671
671
672
672
673 class ResponseFilter:
673 class ResponseFilter:
674
674
675 def __init__(self, start_response):
675 def __init__(self, start_response):
676 self._start_response = start_response
676 self._start_response = start_response
677
677
678 def __call__(self, status, response_headers, exc_info=None):
678 def __call__(self, status, response_headers, exc_info=None):
679 headers = tuple(
679 headers = tuple(
680 (h, v) for h, v in response_headers
680 (h, v) for h, v in response_headers
681 if not wsgiref.util.is_hop_by_hop(h))
681 if not wsgiref.util.is_hop_by_hop(h))
682 return self._start_response(status, headers, exc_info)
682 return self._start_response(status, headers, exc_info)
683
683
684
684
685 def sanitize_settings_and_apply_defaults(global_config, settings):
685 def sanitize_settings_and_apply_defaults(global_config, settings):
686 _global_settings_maker = SettingsMaker(global_config)
686 _global_settings_maker = SettingsMaker(global_config)
687 settings_maker = SettingsMaker(settings)
687 settings_maker = SettingsMaker(settings)
688
688
689 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
689 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
690
690
691 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
691 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
692 settings_maker.enable_logging(logging_conf)
692 settings_maker.enable_logging(logging_conf)
693
693
694 # Default includes, possible to change as a user
694 # Default includes, possible to change as a user
695 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
695 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
696 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
696 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
697
697
698 settings_maker.make_setting('__file__', global_config.get('__file__'))
698 settings_maker.make_setting('__file__', global_config.get('__file__'))
699
699
700 settings_maker.make_setting('pyramid.default_locale_name', 'en')
700 settings_maker.make_setting('pyramid.default_locale_name', 'en')
701 settings_maker.make_setting('locale', 'en_US.UTF-8')
701 settings_maker.make_setting('locale', 'en_US.UTF-8')
702
702
703 settings_maker.make_setting(
703 settings_maker.make_setting(
704 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
704 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
705 default_when_empty=True, parser='string:noquote')
705 default_when_empty=True, parser='string:noquote')
706
706
707 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
707 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
708
708
709 temp_store = tempfile.gettempdir()
709 temp_store = tempfile.gettempdir()
710 default_cache_dir = os.path.join(temp_store, 'rc_cache')
710 default_cache_dir = os.path.join(temp_store, 'rc_cache')
711 # save default, cache dir, and use it for all backends later.
711 # save default, cache dir, and use it for all backends later.
712 default_cache_dir = settings_maker.make_setting(
712 default_cache_dir = settings_maker.make_setting(
713 'cache_dir',
713 'cache_dir',
714 default=default_cache_dir, default_when_empty=True,
714 default=default_cache_dir, default_when_empty=True,
715 parser='dir:ensured')
715 parser='dir:ensured')
716
716
717 # exception store cache
717 # exception store cache
718 settings_maker.make_setting(
718 settings_maker.make_setting(
719 'exception_tracker.store_path',
719 'exception_tracker.store_path',
720 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
720 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
721 parser='dir:ensured'
721 parser='dir:ensured'
722 )
722 )
723
723
724 # repo_object cache defaults
724 # repo_object cache defaults
725 settings_maker.make_setting(
725 settings_maker.make_setting(
726 'rc_cache.repo_object.backend',
726 'rc_cache.repo_object.backend',
727 default='dogpile.cache.rc.file_namespace',
727 default='dogpile.cache.rc.file_namespace',
728 parser='string')
728 parser='string')
729 settings_maker.make_setting(
729 settings_maker.make_setting(
730 'rc_cache.repo_object.expiration_time',
730 'rc_cache.repo_object.expiration_time',
731 default=30 * 24 * 60 * 60, # 30days
731 default=30 * 24 * 60 * 60, # 30days
732 parser='int')
732 parser='int')
733 settings_maker.make_setting(
733 settings_maker.make_setting(
734 'rc_cache.repo_object.arguments.filename',
734 'rc_cache.repo_object.arguments.filename',
735 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
735 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
736 parser='string')
736 parser='string')
737
737
738 # statsd
738 # statsd
739 settings_maker.make_setting('statsd.enabled', False, parser='bool')
739 settings_maker.make_setting('statsd.enabled', False, parser='bool')
740 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
740 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
741 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
741 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
742 settings_maker.make_setting('statsd.statsd_prefix', '')
742 settings_maker.make_setting('statsd.statsd_prefix', '')
743 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
743 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
744
744
745 settings_maker.env_expand()
745 settings_maker.env_expand()
746
746
747
747
748 def main(global_config, **settings):
748 def main(global_config, **settings):
749 start_time = time.time()
749 start_time = time.time()
750 log.info('Pyramid app config starting')
750 log.info('Pyramid app config starting')
751
751
752 if MercurialFactory:
752 if MercurialFactory:
753 hgpatches.patch_largefiles_capabilities()
753 hgpatches.patch_largefiles_capabilities()
754 hgpatches.patch_subrepo_type_mapping()
754 hgpatches.patch_subrepo_type_mapping()
755
755
756 # Fill in and sanitize the defaults & do ENV expansion
756 # Fill in and sanitize the defaults & do ENV expansion
757 sanitize_settings_and_apply_defaults(global_config, settings)
757 sanitize_settings_and_apply_defaults(global_config, settings)
758
758
759 # init and bootstrap StatsdClient
759 # init and bootstrap StatsdClient
760 StatsdClient.setup(settings)
760 StatsdClient.setup(settings)
761
761
762 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
762 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
763 total_time = time.time() - start_time
763 total_time = time.time() - start_time
764 log.info('Pyramid app created and configured in %.2fs', total_time)
764 log.info('Pyramid app created and configured in %.2fs', total_time)
765 return pyramid_app
765 return pyramid_app
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,26 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 # This package contains non rhodecode licensed packages that are
18 # This package contains non rhodecode licensed packages that are
19 # vendored for various reasons
19 # vendored for various reasons
20
20
21 import os
21 import os
22 import sys
22 import sys
23
23
24 vendor_dir = os.path.abspath(os.path.dirname(__file__))
24 vendor_dir = os.path.abspath(os.path.dirname(__file__))
25
25
26 sys.path.append(vendor_dir)
26 sys.path.append(vendor_dir)
@@ -1,58 +1,58 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import os
18 import os
19 import vcsserver
19 import vcsserver
20 import vcsserver.settings
20 import vcsserver.settings
21
21
22
22
23 def get_config(ini_path, **kwargs):
23 def get_config(ini_path, **kwargs):
24 import configparser
24 import configparser
25 parser = configparser.ConfigParser(**kwargs)
25 parser = configparser.ConfigParser(**kwargs)
26 parser.read(ini_path)
26 parser.read(ini_path)
27 return parser
27 return parser
28
28
29
29
30 def get_app_config_lightweight(ini_path):
30 def get_app_config_lightweight(ini_path):
31 parser = get_config(ini_path)
31 parser = get_config(ini_path)
32 parser.set('app:main', 'here', os.getcwd())
32 parser.set('app:main', 'here', os.getcwd())
33 parser.set('app:main', '__file__', ini_path)
33 parser.set('app:main', '__file__', ini_path)
34 return dict(parser.items('app:main'))
34 return dict(parser.items('app:main'))
35
35
36
36
37 def get_app_config(ini_path):
37 def get_app_config(ini_path):
38 """
38 """
39 This loads the app context and provides a heavy type iniliaziation of config
39 This loads the app context and provides a heavy type iniliaziation of config
40 """
40 """
41 from paste.deploy.loadwsgi import appconfig
41 from paste.deploy.loadwsgi import appconfig
42 return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
42 return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
43
43
44
44
45 def configure_and_store_settings(global_config, app_settings):
45 def configure_and_store_settings(global_config, app_settings):
46 """
46 """
47 Configure the settings module.
47 Configure the settings module.
48 """
48 """
49 settings_merged = global_config.copy()
49 settings_merged = global_config.copy()
50 settings_merged.update(app_settings)
50 settings_merged.update(app_settings)
51
51
52 binary_dir = app_settings['core.binary_dir']
52 binary_dir = app_settings['core.binary_dir']
53
53
54 vcsserver.settings.BINARY_DIR = binary_dir
54 vcsserver.settings.BINARY_DIR = binary_dir
55
55
56 # Store the settings to make them available to other modules.
56 # Store the settings to make them available to other modules.
57 vcsserver.PYRAMID_SETTINGS = settings_merged
57 vcsserver.PYRAMID_SETTINGS = settings_merged
58 vcsserver.CONFIG = settings_merged
58 vcsserver.CONFIG = settings_merged
@@ -1,273 +1,273 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import time
20 import time
21 import sys
21 import sys
22 import datetime
22 import datetime
23 import msgpack
23 import msgpack
24 import logging
24 import logging
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 import glob
27 import glob
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
31 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
32 global_prefix = 'vcsserver'
32 global_prefix = 'vcsserver'
33 exc_store_dir_name = 'rc_exception_store_v1'
33 exc_store_dir_name = 'rc_exception_store_v1'
34
34
35
35
36 def exc_serialize(exc_id, tb, exc_type, extra_data=None):
36 def exc_serialize(exc_id, tb, exc_type, extra_data=None):
37 data = {
37 data = {
38 "version": "v1",
38 "version": "v1",
39 "exc_id": exc_id,
39 "exc_id": exc_id,
40 "exc_utc_date": datetime.datetime.utcnow().isoformat(),
40 "exc_utc_date": datetime.datetime.utcnow().isoformat(),
41 "exc_timestamp": repr(time.time()),
41 "exc_timestamp": repr(time.time()),
42 "exc_message": tb,
42 "exc_message": tb,
43 "exc_type": exc_type,
43 "exc_type": exc_type,
44 }
44 }
45 if extra_data:
45 if extra_data:
46 data.update(extra_data)
46 data.update(extra_data)
47 return msgpack.packb(data), data
47 return msgpack.packb(data), data
48
48
49
49
50 def exc_unserialize(tb):
50 def exc_unserialize(tb):
51 return msgpack.unpackb(tb)
51 return msgpack.unpackb(tb)
52
52
53
53
54 _exc_store = None
54 _exc_store = None
55
55
56
56
57 def get_exc_store():
57 def get_exc_store():
58 """
58 """
59 Get and create exception store if it's not existing
59 Get and create exception store if it's not existing
60 """
60 """
61 global _exc_store
61 global _exc_store
62
62
63 if _exc_store is not None:
63 if _exc_store is not None:
64 # quick global cache
64 # quick global cache
65 return _exc_store
65 return _exc_store
66
66
67 import vcsserver as app
67 import vcsserver as app
68
68
69 exc_store_dir = (
69 exc_store_dir = (
70 app.CONFIG.get("exception_tracker.store_path", "") or tempfile.gettempdir()
70 app.CONFIG.get("exception_tracker.store_path", "") or tempfile.gettempdir()
71 )
71 )
72 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
72 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
73
73
74 _exc_store_path = os.path.abspath(_exc_store_path)
74 _exc_store_path = os.path.abspath(_exc_store_path)
75 if not os.path.isdir(_exc_store_path):
75 if not os.path.isdir(_exc_store_path):
76 os.makedirs(_exc_store_path)
76 os.makedirs(_exc_store_path)
77 log.debug("Initializing exceptions store at %s", _exc_store_path)
77 log.debug("Initializing exceptions store at %s", _exc_store_path)
78 _exc_store = _exc_store_path
78 _exc_store = _exc_store_path
79
79
80 return _exc_store_path
80 return _exc_store_path
81
81
82
82
83 def get_detailed_tb(exc_info):
83 def get_detailed_tb(exc_info):
84 try:
84 try:
85 from pip._vendor.rich import (
85 from pip._vendor.rich import (
86 traceback as rich_tb,
86 traceback as rich_tb,
87 scope as rich_scope,
87 scope as rich_scope,
88 console as rich_console,
88 console as rich_console,
89 )
89 )
90 except ImportError:
90 except ImportError:
91 try:
91 try:
92 from rich import (
92 from rich import (
93 traceback as rich_tb,
93 traceback as rich_tb,
94 scope as rich_scope,
94 scope as rich_scope,
95 console as rich_console,
95 console as rich_console,
96 )
96 )
97 except ImportError:
97 except ImportError:
98 return None
98 return None
99
99
100 console = rich_console.Console(width=160, file=io.StringIO())
100 console = rich_console.Console(width=160, file=io.StringIO())
101
101
102 exc = rich_tb.Traceback.extract(*exc_info, show_locals=True)
102 exc = rich_tb.Traceback.extract(*exc_info, show_locals=True)
103
103
104 tb_rich = rich_tb.Traceback(
104 tb_rich = rich_tb.Traceback(
105 trace=exc,
105 trace=exc,
106 width=160,
106 width=160,
107 extra_lines=3,
107 extra_lines=3,
108 theme=None,
108 theme=None,
109 word_wrap=False,
109 word_wrap=False,
110 show_locals=False,
110 show_locals=False,
111 max_frames=100,
111 max_frames=100,
112 )
112 )
113
113
114 # last_stack = exc.stacks[-1]
114 # last_stack = exc.stacks[-1]
115 # last_frame = last_stack.frames[-1]
115 # last_frame = last_stack.frames[-1]
116 # if last_frame and last_frame.locals:
116 # if last_frame and last_frame.locals:
117 # console.print(
117 # console.print(
118 # rich_scope.render_scope(
118 # rich_scope.render_scope(
119 # last_frame.locals,
119 # last_frame.locals,
120 # title=f'{last_frame.filename}:{last_frame.lineno}'))
120 # title=f'{last_frame.filename}:{last_frame.lineno}'))
121
121
122 console.print(tb_rich)
122 console.print(tb_rich)
123 formatted_locals = console.file.getvalue()
123 formatted_locals = console.file.getvalue()
124
124
125 return formatted_locals
125 return formatted_locals
126
126
127
127
128 def get_request_metadata(request=None) -> dict:
128 def get_request_metadata(request=None) -> dict:
129 request_metadata = {}
129 request_metadata = {}
130 if not request:
130 if not request:
131 from pyramid.threadlocal import get_current_request
131 from pyramid.threadlocal import get_current_request
132
132
133 request = get_current_request()
133 request = get_current_request()
134
134
135 # NOTE(marcink): store request information into exc_data
135 # NOTE(marcink): store request information into exc_data
136 if request:
136 if request:
137 request_metadata["client_address"] = getattr(request, "client_addr", "")
137 request_metadata["client_address"] = getattr(request, "client_addr", "")
138 request_metadata["user_agent"] = getattr(request, "user_agent", "")
138 request_metadata["user_agent"] = getattr(request, "user_agent", "")
139 request_metadata["method"] = getattr(request, "method", "")
139 request_metadata["method"] = getattr(request, "method", "")
140 request_metadata["url"] = getattr(request, "url", "")
140 request_metadata["url"] = getattr(request, "url", "")
141 return request_metadata
141 return request_metadata
142
142
143
143
144 def format_exc(exc_info, use_detailed_tb=True):
144 def format_exc(exc_info, use_detailed_tb=True):
145 exc_type, exc_value, exc_traceback = exc_info
145 exc_type, exc_value, exc_traceback = exc_info
146 tb = "++ TRACEBACK ++\n\n"
146 tb = "++ TRACEBACK ++\n\n"
147 tb += "".join(traceback.format_exception(exc_type, exc_value, exc_traceback, None))
147 tb += "".join(traceback.format_exception(exc_type, exc_value, exc_traceback, None))
148
148
149 detailed_tb = getattr(exc_value, "_org_exc_tb", None)
149 detailed_tb = getattr(exc_value, "_org_exc_tb", None)
150
150
151 if detailed_tb:
151 if detailed_tb:
152 remote_tb = detailed_tb
152 remote_tb = detailed_tb
153 if isinstance(detailed_tb, str):
153 if isinstance(detailed_tb, str):
154 remote_tb = [detailed_tb]
154 remote_tb = [detailed_tb]
155
155
156 tb += (
156 tb += (
157 "\n+++ BEG SOURCE EXCEPTION +++\n\n"
157 "\n+++ BEG SOURCE EXCEPTION +++\n\n"
158 "{}\n"
158 "{}\n"
159 "+++ END SOURCE EXCEPTION +++\n"
159 "+++ END SOURCE EXCEPTION +++\n"
160 "".format("\n".join(remote_tb))
160 "".format("\n".join(remote_tb))
161 )
161 )
162
162
163 # Avoid that remote_tb also appears in the frame
163 # Avoid that remote_tb also appears in the frame
164 del remote_tb
164 del remote_tb
165
165
166 if use_detailed_tb:
166 if use_detailed_tb:
167 locals_tb = get_detailed_tb(exc_info)
167 locals_tb = get_detailed_tb(exc_info)
168 if locals_tb:
168 if locals_tb:
169 tb += f"\n+++ DETAILS +++\n\n{locals_tb}\n" ""
169 tb += f"\n+++ DETAILS +++\n\n{locals_tb}\n" ""
170 return tb
170 return tb
171
171
172
172
173 def _store_exception(exc_id, exc_info, prefix, request_path=''):
173 def _store_exception(exc_id, exc_info, prefix, request_path=''):
174 """
174 """
175 Low level function to store exception in the exception tracker
175 Low level function to store exception in the exception tracker
176 """
176 """
177
177
178 extra_data = {}
178 extra_data = {}
179 extra_data.update(get_request_metadata())
179 extra_data.update(get_request_metadata())
180
180
181 exc_type, exc_value, exc_traceback = exc_info
181 exc_type, exc_value, exc_traceback = exc_info
182 tb = format_exc(exc_info)
182 tb = format_exc(exc_info)
183
183
184 exc_type_name = exc_type.__name__
184 exc_type_name = exc_type.__name__
185 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name, extra_data=extra_data)
185 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name, extra_data=extra_data)
186
186
187 exc_pref_id = f"{exc_id}_{prefix}_{org_data['exc_timestamp']}"
187 exc_pref_id = f"{exc_id}_{prefix}_{org_data['exc_timestamp']}"
188 exc_store_path = get_exc_store()
188 exc_store_path = get_exc_store()
189 if not os.path.isdir(exc_store_path):
189 if not os.path.isdir(exc_store_path):
190 os.makedirs(exc_store_path)
190 os.makedirs(exc_store_path)
191 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
191 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
192 with open(stored_exc_path, "wb") as f:
192 with open(stored_exc_path, "wb") as f:
193 f.write(exc_data)
193 f.write(exc_data)
194 log.debug("Stored generated exception %s as: %s", exc_id, stored_exc_path)
194 log.debug("Stored generated exception %s as: %s", exc_id, stored_exc_path)
195
195
196 if request_path:
196 if request_path:
197 log.error(
197 log.error(
198 'error occurred handling this request.\n'
198 'error occurred handling this request.\n'
199 'Path: `%s`, %s',
199 'Path: `%s`, %s',
200 request_path, tb)
200 request_path, tb)
201
201
202
202
203 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
203 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
204 """
204 """
205 Example usage::
205 Example usage::
206
206
207 exc_info = sys.exc_info()
207 exc_info = sys.exc_info()
208 store_exception(id(exc_info), exc_info)
208 store_exception(id(exc_info), exc_info)
209 """
209 """
210
210
211 try:
211 try:
212 exc_type = exc_info[0]
212 exc_type = exc_info[0]
213 exc_type_name = exc_type.__name__
213 exc_type_name = exc_type.__name__
214
214
215 _store_exception(
215 _store_exception(
216 exc_id=exc_id, exc_info=exc_info, prefix=prefix, request_path=request_path,
216 exc_id=exc_id, exc_info=exc_info, prefix=prefix, request_path=request_path,
217 )
217 )
218 return exc_id, exc_type_name
218 return exc_id, exc_type_name
219 except Exception:
219 except Exception:
220 log.exception("Failed to store exception `%s` information", exc_id)
220 log.exception("Failed to store exception `%s` information", exc_id)
221 # there's no way this can fail, it will crash server badly if it does.
221 # there's no way this can fail, it will crash server badly if it does.
222 pass
222 pass
223
223
224
224
225 def _find_exc_file(exc_id, prefix=global_prefix):
225 def _find_exc_file(exc_id, prefix=global_prefix):
226 exc_store_path = get_exc_store()
226 exc_store_path = get_exc_store()
227 if prefix:
227 if prefix:
228 exc_id = f"{exc_id}_{prefix}"
228 exc_id = f"{exc_id}_{prefix}"
229 else:
229 else:
230 # search without a prefix
230 # search without a prefix
231 exc_id = f"{exc_id}"
231 exc_id = f"{exc_id}"
232
232
233 found_exc_id = None
233 found_exc_id = None
234 matches = glob.glob(os.path.join(exc_store_path, exc_id) + "*")
234 matches = glob.glob(os.path.join(exc_store_path, exc_id) + "*")
235 if matches:
235 if matches:
236 found_exc_id = matches[0]
236 found_exc_id = matches[0]
237
237
238 return found_exc_id
238 return found_exc_id
239
239
240
240
241 def _read_exception(exc_id, prefix):
241 def _read_exception(exc_id, prefix):
242 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
242 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
243 if exc_id_file_path:
243 if exc_id_file_path:
244 with open(exc_id_file_path, "rb") as f:
244 with open(exc_id_file_path, "rb") as f:
245 return exc_unserialize(f.read())
245 return exc_unserialize(f.read())
246 else:
246 else:
247 log.debug("Exception File `%s` not found", exc_id_file_path)
247 log.debug("Exception File `%s` not found", exc_id_file_path)
248 return None
248 return None
249
249
250
250
251 def read_exception(exc_id, prefix=global_prefix):
251 def read_exception(exc_id, prefix=global_prefix):
252 try:
252 try:
253 return _read_exception(exc_id=exc_id, prefix=prefix)
253 return _read_exception(exc_id=exc_id, prefix=prefix)
254 except Exception:
254 except Exception:
255 log.exception("Failed to read exception `%s` information", exc_id)
255 log.exception("Failed to read exception `%s` information", exc_id)
256 # there's no way this can fail, it will crash server badly if it does.
256 # there's no way this can fail, it will crash server badly if it does.
257 return None
257 return None
258
258
259
259
260 def delete_exception(exc_id, prefix=global_prefix):
260 def delete_exception(exc_id, prefix=global_prefix):
261 try:
261 try:
262 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
262 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
263 if exc_id_file_path:
263 if exc_id_file_path:
264 os.remove(exc_id_file_path)
264 os.remove(exc_id_file_path)
265
265
266 except Exception:
266 except Exception:
267 log.exception("Failed to remove exception `%s` information", exc_id)
267 log.exception("Failed to remove exception `%s` information", exc_id)
268 # there's no way this can fail, it will crash server badly if it does.
268 # there's no way this can fail, it will crash server badly if it does.
269 pass
269 pass
270
270
271
271
272 def generate_id():
272 def generate_id():
273 return id(object())
273 return id(object())
@@ -1,53 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import hashlib
18 import hashlib
19 from vcsserver.lib.str_utils import safe_bytes, safe_str
19 from vcsserver.lib.str_utils import safe_bytes, safe_str
20
20
21
21
22 def md5(s):
22 def md5(s):
23 return hashlib.md5(s).hexdigest()
23 return hashlib.md5(s).hexdigest()
24
24
25
25
26 def md5_safe(s, return_type=''):
26 def md5_safe(s, return_type=''):
27
27
28 val = md5(safe_bytes(s))
28 val = md5(safe_bytes(s))
29 if return_type == 'str':
29 if return_type == 'str':
30 val = safe_str(val)
30 val = safe_str(val)
31 return val
31 return val
32
32
33
33
34 def sha1(s):
34 def sha1(s):
35 return hashlib.sha1(s).hexdigest()
35 return hashlib.sha1(s).hexdigest()
36
36
37
37
38 def sha1_safe(s, return_type=''):
38 def sha1_safe(s, return_type=''):
39 val = sha1(safe_bytes(s))
39 val = sha1(safe_bytes(s))
40 if return_type == 'str':
40 if return_type == 'str':
41 val = safe_str(val)
41 val = safe_str(val)
42 return val
42 return val
43
43
44
44
45 def sha256(s):
45 def sha256(s):
46 return hashlib.sha256(s).hexdigest()
46 return hashlib.sha256(s).hexdigest()
47
47
48
48
49 def sha256_safe(s, return_type=''):
49 def sha256_safe(s, return_type=''):
50 val = sha256(safe_bytes(s))
50 val = sha256(safe_bytes(s))
51 if return_type == 'str':
51 if return_type == 'str':
52 val = safe_str(val)
52 val = safe_str(val)
53 return val
53 return val
@@ -1,63 +1,63 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import sys
19 import sys
20 import logging
20 import logging
21
21
22
22
23 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(30, 38))
23 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(30, 38))
24
24
25 # Sequences
25 # Sequences
26 RESET_SEQ = "\033[0m"
26 RESET_SEQ = "\033[0m"
27 COLOR_SEQ = "\033[0;%dm"
27 COLOR_SEQ = "\033[0;%dm"
28 BOLD_SEQ = "\033[1m"
28 BOLD_SEQ = "\033[1m"
29
29
30 COLORS = {
30 COLORS = {
31 'CRITICAL': MAGENTA,
31 'CRITICAL': MAGENTA,
32 'ERROR': RED,
32 'ERROR': RED,
33 'WARNING': CYAN,
33 'WARNING': CYAN,
34 'INFO': GREEN,
34 'INFO': GREEN,
35 'DEBUG': BLUE,
35 'DEBUG': BLUE,
36 'SQL': YELLOW
36 'SQL': YELLOW
37 }
37 }
38
38
39
39
40 def _inject_req_id(record, *args, **kwargs):
40 def _inject_req_id(record, *args, **kwargs):
41 return record
41 return record
42
42
43
43
44 class ExceptionAwareFormatter(logging.Formatter):
44 class ExceptionAwareFormatter(logging.Formatter):
45 pass
45 pass
46
46
47
47
48 class ColorFormatter(logging.Formatter):
48 class ColorFormatter(logging.Formatter):
49
49
50 def format(self, record):
50 def format(self, record):
51 """
51 """
52 Changes record's levelname to use with COLORS enum
52 Changes record's levelname to use with COLORS enum
53 """
53 """
54 def_record = super().format(record)
54 def_record = super().format(record)
55
55
56 levelname = record.levelname
56 levelname = record.levelname
57 start = COLOR_SEQ % (COLORS[levelname])
57 start = COLOR_SEQ % (COLORS[levelname])
58 end = RESET_SEQ
58 end = RESET_SEQ
59
59
60 colored_record = ''.join([start, def_record, end])
60 colored_record = ''.join([start, def_record, end])
61 return colored_record
61 return colored_record
62
62
63
63
@@ -1,63 +1,63 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import logging
19 import logging
20
20
21 from repoze.lru import LRUCache
21 from repoze.lru import LRUCache
22
22
23 from vcsserver.lib.str_utils import safe_str
23 from vcsserver.lib.str_utils import safe_str
24
24
25 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
26
26
27
27
28 class LRUDict(LRUCache):
28 class LRUDict(LRUCache):
29 """
29 """
30 Wrapper to provide partial dict access
30 Wrapper to provide partial dict access
31 """
31 """
32
32
33 def __setitem__(self, key, value):
33 def __setitem__(self, key, value):
34 return self.put(key, value)
34 return self.put(key, value)
35
35
36 def __getitem__(self, key):
36 def __getitem__(self, key):
37 return self.get(key)
37 return self.get(key)
38
38
39 def __contains__(self, key):
39 def __contains__(self, key):
40 return bool(self.get(key))
40 return bool(self.get(key))
41
41
42 def __delitem__(self, key):
42 def __delitem__(self, key):
43 del self.data[key]
43 del self.data[key]
44
44
45 def keys(self):
45 def keys(self):
46 return list(self.data.keys())
46 return list(self.data.keys())
47
47
48
48
49 class LRUDictDebug(LRUDict):
49 class LRUDictDebug(LRUDict):
50 """
50 """
51 Wrapper to provide some debug options
51 Wrapper to provide some debug options
52 """
52 """
53 def _report_keys(self):
53 def _report_keys(self):
54 elems_cnt = f'{len(list(self.keys()))}/{self.size}'
54 elems_cnt = f'{len(list(self.keys()))}/{self.size}'
55 # trick for pformat print it more nicely
55 # trick for pformat print it more nicely
56 fmt = '\n'
56 fmt = '\n'
57 for cnt, elem in enumerate(self.keys()):
57 for cnt, elem in enumerate(self.keys()):
58 fmt += f'{cnt+1} - {safe_str(elem)}\n'
58 fmt += f'{cnt+1} - {safe_str(elem)}\n'
59 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
59 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
60
60
61 def __getitem__(self, key):
61 def __getitem__(self, key):
62 self._report_keys()
62 self._report_keys()
63 return self.get(key)
63 return self.get(key)
@@ -1,114 +1,114 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import threading
19 import threading
20
20
21 from dogpile.cache import register_backend
21 from dogpile.cache import register_backend
22
22
23 from . import region_meta
23 from . import region_meta
24 from .utils import (
24 from .utils import (
25 backend_key_generator,
25 backend_key_generator,
26 clear_cache_namespace,
26 clear_cache_namespace,
27 get_default_cache_settings,
27 get_default_cache_settings,
28 get_or_create_region,
28 get_or_create_region,
29 make_region,
29 make_region,
30 str2bool,
30 str2bool,
31 )
31 )
32
32
33 module_name = 'vcsserver'
33 module_name = 'vcsserver'
34
34
35 register_backend(
35 register_backend(
36 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
36 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
37 "LRUMemoryBackend")
37 "LRUMemoryBackend")
38
38
39 register_backend(
39 register_backend(
40 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
40 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
41 "FileNamespaceBackend")
41 "FileNamespaceBackend")
42
42
43 register_backend(
43 register_backend(
44 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
44 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
45 "RedisPickleBackend")
45 "RedisPickleBackend")
46
46
47 register_backend(
47 register_backend(
48 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
48 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
49 "RedisMsgPackBackend")
49 "RedisMsgPackBackend")
50
50
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 CACHE_OBJ_CACHE_VER = 'v2'
55 CACHE_OBJ_CACHE_VER = 'v2'
56
56
57 CLEAR_DELETE = 'delete'
57 CLEAR_DELETE = 'delete'
58 CLEAR_INVALIDATE = 'invalidate'
58 CLEAR_INVALIDATE = 'invalidate'
59
59
60
60
61 def async_creation_runner(cache, cache_key, creator, mutex):
61 def async_creation_runner(cache, cache_key, creator, mutex):
62
62
63 def runner():
63 def runner():
64 try:
64 try:
65 value = creator()
65 value = creator()
66 cache.set(cache_key, value)
66 cache.set(cache_key, value)
67 finally:
67 finally:
68 mutex.release()
68 mutex.release()
69
69
70 thread = threading.Thread(target=runner)
70 thread = threading.Thread(target=runner)
71 thread.start()
71 thread.start()
72
72
73
73
74 def configure_dogpile_cache(settings):
74 def configure_dogpile_cache(settings):
75 cache_dir = settings.get('cache_dir')
75 cache_dir = settings.get('cache_dir')
76 if cache_dir:
76 if cache_dir:
77 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
77 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
78
78
79 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
79 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
80
80
81 # inspect available namespaces
81 # inspect available namespaces
82 avail_regions = set()
82 avail_regions = set()
83 for key in rc_cache_data.keys():
83 for key in rc_cache_data.keys():
84 namespace_name = key.split('.', 1)[0]
84 namespace_name = key.split('.', 1)[0]
85 if namespace_name in avail_regions:
85 if namespace_name in avail_regions:
86 continue
86 continue
87
87
88 avail_regions.add(namespace_name)
88 avail_regions.add(namespace_name)
89 log.debug('dogpile: found following cache regions: %s', namespace_name)
89 log.debug('dogpile: found following cache regions: %s', namespace_name)
90
90
91 new_region = make_region(
91 new_region = make_region(
92 name=namespace_name,
92 name=namespace_name,
93 function_key_generator=None,
93 function_key_generator=None,
94 async_creation_runner=None
94 async_creation_runner=None
95 )
95 )
96
96
97 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
97 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
98 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
98 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
99
99
100 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
100 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
101 if async_creator:
101 if async_creator:
102 log.debug('configuring region %s with async creator', new_region)
102 log.debug('configuring region %s with async creator', new_region)
103 new_region.async_creation_runner = async_creation_runner
103 new_region.async_creation_runner = async_creation_runner
104
104
105 if log.isEnabledFor(logging.DEBUG):
105 if log.isEnabledFor(logging.DEBUG):
106 region_args = dict(backend=new_region.actual_backend,
106 region_args = dict(backend=new_region.actual_backend,
107 region_invalidator=new_region.region_invalidator.__class__)
107 region_invalidator=new_region.region_invalidator.__class__)
108 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
108 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
109
109
110 region_meta.dogpile_cache_regions[namespace_name] = new_region
110 region_meta.dogpile_cache_regions[namespace_name] = new_region
111
111
112
112
113 def includeme(config):
113 def includeme(config):
114 configure_dogpile_cache(config.registry.settings)
114 configure_dogpile_cache(config.registry.settings)
@@ -1,313 +1,313 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 #import errno
18 #import errno
19 import fcntl
19 import fcntl
20 import functools
20 import functools
21 import logging
21 import logging
22 import os
22 import os
23 import pickle
23 import pickle
24 #import time
24 #import time
25
25
26 #import gevent
26 #import gevent
27 import msgpack
27 import msgpack
28 import redis
28 import redis
29
29
30 flock_org = fcntl.flock
30 flock_org = fcntl.flock
31 from typing import Union
31 from typing import Union
32
32
33 from dogpile.cache.api import Deserializer, Serializer
33 from dogpile.cache.api import Deserializer, Serializer
34 from dogpile.cache.backends import file as file_backend
34 from dogpile.cache.backends import file as file_backend
35 from dogpile.cache.backends import memory as memory_backend
35 from dogpile.cache.backends import memory as memory_backend
36 from dogpile.cache.backends import redis as redis_backend
36 from dogpile.cache.backends import redis as redis_backend
37 from dogpile.cache.backends.file import FileLock
37 from dogpile.cache.backends.file import FileLock
38 from dogpile.cache.util import memoized_property
38 from dogpile.cache.util import memoized_property
39
39
40 from ...lib.memory_lru_dict import LRUDict, LRUDictDebug
40 from ...lib.memory_lru_dict import LRUDict, LRUDictDebug
41 from ...lib.str_utils import safe_bytes, safe_str
41 from ...lib.str_utils import safe_bytes, safe_str
42 from ...lib.type_utils import str2bool
42 from ...lib.type_utils import str2bool
43
43
44 _default_max_size = 1024
44 _default_max_size = 1024
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 class LRUMemoryBackend(memory_backend.MemoryBackend):
49 class LRUMemoryBackend(memory_backend.MemoryBackend):
50 key_prefix = 'lru_mem_backend'
50 key_prefix = 'lru_mem_backend'
51 pickle_values = False
51 pickle_values = False
52
52
53 def __init__(self, arguments):
53 def __init__(self, arguments):
54 self.max_size = arguments.pop('max_size', _default_max_size)
54 self.max_size = arguments.pop('max_size', _default_max_size)
55
55
56 LRUDictClass = LRUDict
56 LRUDictClass = LRUDict
57 if arguments.pop('log_key_count', None):
57 if arguments.pop('log_key_count', None):
58 LRUDictClass = LRUDictDebug
58 LRUDictClass = LRUDictDebug
59
59
60 arguments['cache_dict'] = LRUDictClass(self.max_size)
60 arguments['cache_dict'] = LRUDictClass(self.max_size)
61 super().__init__(arguments)
61 super().__init__(arguments)
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return f'{self.__class__}(maxsize=`{self.max_size}`)'
64 return f'{self.__class__}(maxsize=`{self.max_size}`)'
65
65
66 def __str__(self):
66 def __str__(self):
67 return self.__repr__()
67 return self.__repr__()
68
68
69 def delete(self, key):
69 def delete(self, key):
70 try:
70 try:
71 del self._cache[key]
71 del self._cache[key]
72 except KeyError:
72 except KeyError:
73 # we don't care if key isn't there at deletion
73 # we don't care if key isn't there at deletion
74 pass
74 pass
75
75
76 def list_keys(self, prefix):
76 def list_keys(self, prefix):
77 return list(self._cache.keys())
77 return list(self._cache.keys())
78
78
79 def delete_multi(self, keys):
79 def delete_multi(self, keys):
80 for key in keys:
80 for key in keys:
81 self.delete(key)
81 self.delete(key)
82
82
83 def delete_multi_by_prefix(self, prefix):
83 def delete_multi_by_prefix(self, prefix):
84 cache_keys = self.list_keys(prefix=prefix)
84 cache_keys = self.list_keys(prefix=prefix)
85 num_affected_keys = len(cache_keys)
85 num_affected_keys = len(cache_keys)
86 if num_affected_keys:
86 if num_affected_keys:
87 self.delete_multi(cache_keys)
87 self.delete_multi(cache_keys)
88 return num_affected_keys
88 return num_affected_keys
89
89
90
90
91 class PickleSerializer:
91 class PickleSerializer:
92 serializer: None | Serializer = staticmethod( # type: ignore
92 serializer: None | Serializer = staticmethod( # type: ignore
93 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
93 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
94 )
94 )
95 deserializer: None | Deserializer = staticmethod( # type: ignore
95 deserializer: None | Deserializer = staticmethod( # type: ignore
96 functools.partial(pickle.loads)
96 functools.partial(pickle.loads)
97 )
97 )
98
98
99
99
100 class MsgPackSerializer:
100 class MsgPackSerializer:
101 serializer: None | Serializer = staticmethod( # type: ignore
101 serializer: None | Serializer = staticmethod( # type: ignore
102 msgpack.packb
102 msgpack.packb
103 )
103 )
104 deserializer: None | Deserializer = staticmethod( # type: ignore
104 deserializer: None | Deserializer = staticmethod( # type: ignore
105 functools.partial(msgpack.unpackb, use_list=False)
105 functools.partial(msgpack.unpackb, use_list=False)
106 )
106 )
107
107
108
108
109 class CustomLockFactory(FileLock):
109 class CustomLockFactory(FileLock):
110
110
111 pass
111 pass
112
112
113
113
114 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
114 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
115 key_prefix = 'file_backend'
115 key_prefix = 'file_backend'
116
116
117 def __init__(self, arguments):
117 def __init__(self, arguments):
118 arguments['lock_factory'] = CustomLockFactory
118 arguments['lock_factory'] = CustomLockFactory
119 db_file = arguments.get('filename')
119 db_file = arguments.get('filename')
120
120
121 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
121 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
122 db_file_dir = os.path.dirname(db_file)
122 db_file_dir = os.path.dirname(db_file)
123 if not os.path.isdir(db_file_dir):
123 if not os.path.isdir(db_file_dir):
124 os.makedirs(db_file_dir)
124 os.makedirs(db_file_dir)
125
125
126 try:
126 try:
127 super().__init__(arguments)
127 super().__init__(arguments)
128 except Exception:
128 except Exception:
129 log.exception('Failed to initialize db at: %s', db_file)
129 log.exception('Failed to initialize db at: %s', db_file)
130 raise
130 raise
131
131
132 def __repr__(self):
132 def __repr__(self):
133 return f'{self.__class__}(file=`{self.filename}`)'
133 return f'{self.__class__}(file=`{self.filename}`)'
134
134
135 def __str__(self):
135 def __str__(self):
136 return self.__repr__()
136 return self.__repr__()
137
137
138 def _get_keys_pattern(self, prefix: bytes = b''):
138 def _get_keys_pattern(self, prefix: bytes = b''):
139 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
139 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
140
140
141 def list_keys(self, prefix: bytes = b''):
141 def list_keys(self, prefix: bytes = b''):
142 prefix = self._get_keys_pattern(prefix)
142 prefix = self._get_keys_pattern(prefix)
143
143
144 def cond(dbm_key: bytes):
144 def cond(dbm_key: bytes):
145 if not prefix:
145 if not prefix:
146 return True
146 return True
147
147
148 if dbm_key.startswith(prefix):
148 if dbm_key.startswith(prefix):
149 return True
149 return True
150 return False
150 return False
151
151
152 with self._dbm_file(True) as dbm:
152 with self._dbm_file(True) as dbm:
153 try:
153 try:
154 return list(filter(cond, dbm.keys()))
154 return list(filter(cond, dbm.keys()))
155 except Exception:
155 except Exception:
156 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
156 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 raise
157 raise
158
158
159 def delete_multi_by_prefix(self, prefix):
159 def delete_multi_by_prefix(self, prefix):
160 cache_keys = self.list_keys(prefix=prefix)
160 cache_keys = self.list_keys(prefix=prefix)
161 num_affected_keys = len(cache_keys)
161 num_affected_keys = len(cache_keys)
162 if num_affected_keys:
162 if num_affected_keys:
163 self.delete_multi(cache_keys)
163 self.delete_multi(cache_keys)
164 return num_affected_keys
164 return num_affected_keys
165
165
166 def get_store(self):
166 def get_store(self):
167 return self.filename
167 return self.filename
168
168
169 def cleanup_store(self):
169 def cleanup_store(self):
170 for ext in ("db", "dat", "pag", "dir"):
170 for ext in ("db", "dat", "pag", "dir"):
171 final_filename = self.filename + os.extsep + ext
171 final_filename = self.filename + os.extsep + ext
172 if os.path.exists(final_filename):
172 if os.path.exists(final_filename):
173 os.remove(final_filename)
173 os.remove(final_filename)
174 log.warning('Removed dbm file %s', final_filename)
174 log.warning('Removed dbm file %s', final_filename)
175
175
176
176
177 class BaseRedisBackend(redis_backend.RedisBackend):
177 class BaseRedisBackend(redis_backend.RedisBackend):
178 key_prefix = ''
178 key_prefix = ''
179
179
180 def __init__(self, arguments):
180 def __init__(self, arguments):
181 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
181 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
182 super().__init__(arguments)
182 super().__init__(arguments)
183
183
184 self._lock_timeout = self.lock_timeout
184 self._lock_timeout = self.lock_timeout
185 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
185 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
186
186
187 self._store_key_prefix = arguments.pop('key_prefix', '')
187 self._store_key_prefix = arguments.pop('key_prefix', '')
188 self.key_prefix = f'{self._store_key_prefix}{self.key_prefix}'
188 self.key_prefix = f'{self._store_key_prefix}{self.key_prefix}'
189
189
190 if self._lock_auto_renewal and not self._lock_timeout:
190 if self._lock_auto_renewal and not self._lock_timeout:
191 # set default timeout for auto_renewal
191 # set default timeout for auto_renewal
192 self._lock_timeout = 30
192 self._lock_timeout = 30
193
193
194 def __repr__(self):
194 def __repr__(self):
195 return f'{self.__class__}(conn=`{self.db_conn}`)'
195 return f'{self.__class__}(conn=`{self.db_conn}`)'
196
196
197 def __str__(self):
197 def __str__(self):
198 return self.__repr__()
198 return self.__repr__()
199
199
200 def _create_client(self):
200 def _create_client(self):
201 args = {}
201 args = {}
202
202
203 if self.url is not None:
203 if self.url is not None:
204 args.update(url=self.url)
204 args.update(url=self.url)
205
205
206 else:
206 else:
207 args.update(
207 args.update(
208 host=self.host, password=self.password,
208 host=self.host, password=self.password,
209 port=self.port, db=self.db
209 port=self.port, db=self.db
210 )
210 )
211
211
212 connection_pool = redis.ConnectionPool(**args)
212 connection_pool = redis.ConnectionPool(**args)
213 self.writer_client = redis.StrictRedis(
213 self.writer_client = redis.StrictRedis(
214 connection_pool=connection_pool
214 connection_pool=connection_pool
215 )
215 )
216 self.reader_client = self.writer_client
216 self.reader_client = self.writer_client
217
217
218 def _get_keys_pattern(self, prefix: bytes = b''):
218 def _get_keys_pattern(self, prefix: bytes = b''):
219 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
219 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
220
220
221 def list_keys(self, prefix: bytes = b''):
221 def list_keys(self, prefix: bytes = b''):
222 prefix = self._get_keys_pattern(prefix)
222 prefix = self._get_keys_pattern(prefix)
223 return self.reader_client.keys(prefix)
223 return self.reader_client.keys(prefix)
224
224
225 def delete_multi_by_prefix(self, prefix, use_lua=False):
225 def delete_multi_by_prefix(self, prefix, use_lua=False):
226 if use_lua:
226 if use_lua:
227 # high efficient LUA script to delete ALL keys by prefix...
227 # high efficient LUA script to delete ALL keys by prefix...
228 lua = """local keys = redis.call('keys', ARGV[1])
228 lua = """local keys = redis.call('keys', ARGV[1])
229 for i=1,#keys,5000 do
229 for i=1,#keys,5000 do
230 redis.call('del', unpack(keys, i, math.min(i+(5000-1), #keys)))
230 redis.call('del', unpack(keys, i, math.min(i+(5000-1), #keys)))
231 end
231 end
232 return #keys"""
232 return #keys"""
233 num_affected_keys = self.writer_client.eval(
233 num_affected_keys = self.writer_client.eval(
234 lua,
234 lua,
235 0,
235 0,
236 f"{prefix}*")
236 f"{prefix}*")
237 else:
237 else:
238 cache_keys = self.list_keys(prefix=prefix)
238 cache_keys = self.list_keys(prefix=prefix)
239 num_affected_keys = len(cache_keys)
239 num_affected_keys = len(cache_keys)
240 if num_affected_keys:
240 if num_affected_keys:
241 self.delete_multi(cache_keys)
241 self.delete_multi(cache_keys)
242 return num_affected_keys
242 return num_affected_keys
243
243
244 def get_store(self):
244 def get_store(self):
245 return self.reader_client.connection_pool
245 return self.reader_client.connection_pool
246
246
247 def get_mutex(self, key):
247 def get_mutex(self, key):
248 if self.distributed_lock:
248 if self.distributed_lock:
249 lock_key = f'{self._store_key_prefix}_lock_{safe_str(key)}'
249 lock_key = f'{self._store_key_prefix}_lock_{safe_str(key)}'
250 return get_mutex_lock(
250 return get_mutex_lock(
251 self.writer_client, lock_key,
251 self.writer_client, lock_key,
252 self._lock_timeout,
252 self._lock_timeout,
253 auto_renewal=self._lock_auto_renewal
253 auto_renewal=self._lock_auto_renewal
254 )
254 )
255 else:
255 else:
256 return None
256 return None
257
257
258
258
259 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
259 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
260 key_prefix = 'redis_pickle_backend'
260 key_prefix = 'redis_pickle_backend'
261 pass
261 pass
262
262
263
263
264 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
264 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
265 key_prefix = 'redis_msgpack_backend'
265 key_prefix = 'redis_msgpack_backend'
266 pass
266 pass
267
267
268
268
269 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
269 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
270 from ...lib._vendor import redis_lock
270 from ...lib._vendor import redis_lock
271
271
272 class _RedisLockWrapper:
272 class _RedisLockWrapper:
273 """LockWrapper for redis_lock"""
273 """LockWrapper for redis_lock"""
274
274
275 @classmethod
275 @classmethod
276 def get_lock(cls):
276 def get_lock(cls):
277 return redis_lock.Lock(
277 return redis_lock.Lock(
278 redis_client=client,
278 redis_client=client,
279 name=lock_key,
279 name=lock_key,
280 expire=lock_timeout,
280 expire=lock_timeout,
281 auto_renewal=auto_renewal,
281 auto_renewal=auto_renewal,
282 strict=True,
282 strict=True,
283 )
283 )
284
284
285 def __repr__(self):
285 def __repr__(self):
286 return f"{self.__class__.__name__}:{lock_key}"
286 return f"{self.__class__.__name__}:{lock_key}"
287
287
288 def __str__(self):
288 def __str__(self):
289 return f"{self.__class__.__name__}:{lock_key}"
289 return f"{self.__class__.__name__}:{lock_key}"
290
290
291 def __init__(self):
291 def __init__(self):
292 self.lock = self.get_lock()
292 self.lock = self.get_lock()
293 self.lock_key = lock_key
293 self.lock_key = lock_key
294
294
295 def acquire(self, wait=True):
295 def acquire(self, wait=True):
296 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
296 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
297 try:
297 try:
298 acquired = self.lock.acquire(wait)
298 acquired = self.lock.acquire(wait)
299 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
299 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
300 return acquired
300 return acquired
301 except redis_lock.AlreadyAcquired:
301 except redis_lock.AlreadyAcquired:
302 return False
302 return False
303 except redis_lock.AlreadyStarted:
303 except redis_lock.AlreadyStarted:
304 # refresh thread exists, but it also means we acquired the lock
304 # refresh thread exists, but it also means we acquired the lock
305 return True
305 return True
306
306
307 def release(self):
307 def release(self):
308 try:
308 try:
309 self.lock.release()
309 self.lock.release()
310 except redis_lock.NotAcquired:
310 except redis_lock.NotAcquired:
311 pass
311 pass
312
312
313 return _RedisLockWrapper()
313 return _RedisLockWrapper()
@@ -1,26 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20
20
21 dogpile_config_defaults = {
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
23 }
24
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
26 dogpile_cache_regions = {}
@@ -1,243 +1,243 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import functools
18 import functools
19 import logging
19 import logging
20 import os
20 import os
21 import threading
21 import threading
22 import time
22 import time
23
23
24 import decorator
24 import decorator
25 from dogpile.cache import CacheRegion
25 from dogpile.cache import CacheRegion
26
26
27
27
28 from ...lib.hash_utils import sha1
28 from ...lib.hash_utils import sha1
29 from ...lib.str_utils import safe_bytes
29 from ...lib.str_utils import safe_bytes
30 from ...lib.type_utils import str2bool # noqa :required by imports from .utils
30 from ...lib.type_utils import str2bool # noqa :required by imports from .utils
31
31
32 from . import region_meta
32 from . import region_meta
33
33
34 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
35
35
36
36
37 class RhodeCodeCacheRegion(CacheRegion):
37 class RhodeCodeCacheRegion(CacheRegion):
38
38
39 def __repr__(self):
39 def __repr__(self):
40 return f'`{self.__class__.__name__}(name={self.name}, backend={self.backend.__class__})`'
40 return f'`{self.__class__.__name__}(name={self.name}, backend={self.backend.__class__})`'
41
41
42 def conditional_cache_on_arguments(
42 def conditional_cache_on_arguments(
43 self, namespace=None,
43 self, namespace=None,
44 expiration_time=None,
44 expiration_time=None,
45 should_cache_fn=None,
45 should_cache_fn=None,
46 to_str=str,
46 to_str=str,
47 function_key_generator=None,
47 function_key_generator=None,
48 condition=True):
48 condition=True):
49 """
49 """
50 Custom conditional decorator, that will not touch any dogpile internals if
50 Custom conditional decorator, that will not touch any dogpile internals if
51 condition isn't meet. This works a bit different from should_cache_fn
51 condition isn't meet. This works a bit different from should_cache_fn
52 And it's faster in cases we don't ever want to compute cached values
52 And it's faster in cases we don't ever want to compute cached values
53 """
53 """
54 expiration_time_is_callable = callable(expiration_time)
54 expiration_time_is_callable = callable(expiration_time)
55 if not namespace:
55 if not namespace:
56 namespace = getattr(self, '_default_namespace', None)
56 namespace = getattr(self, '_default_namespace', None)
57
57
58 if function_key_generator is None:
58 if function_key_generator is None:
59 function_key_generator = self.function_key_generator
59 function_key_generator = self.function_key_generator
60
60
61 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
61 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
62
62
63 if not condition:
63 if not condition:
64 log.debug('Calling un-cached method:%s', user_func.__name__)
64 log.debug('Calling un-cached method:%s', user_func.__name__)
65 start = time.time()
65 start = time.time()
66 result = user_func(*arg, **kw)
66 result = user_func(*arg, **kw)
67 total = time.time() - start
67 total = time.time() - start
68 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
68 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
69 return result
69 return result
70
70
71 key = func_key_generator(*arg, **kw)
71 key = func_key_generator(*arg, **kw)
72 timeout = expiration_time() if expiration_time_is_callable else expiration_time
72 timeout = expiration_time() if expiration_time_is_callable else expiration_time
73 log.debug('Calling cached (timeout=%s) method:`%s`', timeout, user_func.__name__)
73 log.debug('Calling cached (timeout=%s) method:`%s`', timeout, user_func.__name__)
74
74
75 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
75 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
76
76
77 def cache_decorator(user_func):
77 def cache_decorator(user_func):
78 if to_str is str:
78 if to_str is str:
79 # backwards compatible
79 # backwards compatible
80 key_generator = function_key_generator(namespace, user_func)
80 key_generator = function_key_generator(namespace, user_func)
81 else:
81 else:
82 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
82 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
83
83
84 def refresh(*arg, **kw):
84 def refresh(*arg, **kw):
85 """
85 """
86 Like invalidate, but regenerates the value instead
86 Like invalidate, but regenerates the value instead
87 """
87 """
88 key = key_generator(*arg, **kw)
88 key = key_generator(*arg, **kw)
89 value = user_func(*arg, **kw)
89 value = user_func(*arg, **kw)
90 self.set(key, value)
90 self.set(key, value)
91 return value
91 return value
92
92
93 def invalidate(*arg, **kw):
93 def invalidate(*arg, **kw):
94 key = key_generator(*arg, **kw)
94 key = key_generator(*arg, **kw)
95 self.delete(key)
95 self.delete(key)
96
96
97 def set_(value, *arg, **kw):
97 def set_(value, *arg, **kw):
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 self.set(key, value)
99 self.set(key, value)
100
100
101 def get(*arg, **kw):
101 def get(*arg, **kw):
102 key = key_generator(*arg, **kw)
102 key = key_generator(*arg, **kw)
103 return self.get(key)
103 return self.get(key)
104
104
105 user_func.set = set_
105 user_func.set = set_
106 user_func.invalidate = invalidate
106 user_func.invalidate = invalidate
107 user_func.get = get
107 user_func.get = get
108 user_func.refresh = refresh
108 user_func.refresh = refresh
109 user_func.key_generator = key_generator
109 user_func.key_generator = key_generator
110 user_func.original = user_func
110 user_func.original = user_func
111
111
112 # Use `decorate` to preserve the signature of :param:`user_func`.
112 # Use `decorate` to preserve the signature of :param:`user_func`.
113 return decorator.decorate(user_func, functools.partial(
113 return decorator.decorate(user_func, functools.partial(
114 get_or_create_for_user_func, key_generator))
114 get_or_create_for_user_func, key_generator))
115
115
116 return cache_decorator
116 return cache_decorator
117
117
118
118
119 def make_region(*arg, **kw):
119 def make_region(*arg, **kw):
120 return RhodeCodeCacheRegion(*arg, **kw)
120 return RhodeCodeCacheRegion(*arg, **kw)
121
121
122
122
123 def get_default_cache_settings(settings, prefixes=None):
123 def get_default_cache_settings(settings, prefixes=None):
124 prefixes = prefixes or []
124 prefixes = prefixes or []
125 cache_settings = {}
125 cache_settings = {}
126 for key in settings.keys():
126 for key in settings.keys():
127 for prefix in prefixes:
127 for prefix in prefixes:
128 if key.startswith(prefix):
128 if key.startswith(prefix):
129 name = key.split(prefix)[1].strip()
129 name = key.split(prefix)[1].strip()
130 val = settings[key]
130 val = settings[key]
131 if isinstance(val, str):
131 if isinstance(val, str):
132 val = val.strip()
132 val = val.strip()
133 cache_settings[name] = val
133 cache_settings[name] = val
134 return cache_settings
134 return cache_settings
135
135
136
136
137 def compute_key_from_params(*args):
137 def compute_key_from_params(*args):
138 """
138 """
139 Helper to compute key from given params to be used in cache manager
139 Helper to compute key from given params to be used in cache manager
140 """
140 """
141 return sha1(safe_bytes("_".join(map(str, args))))
141 return sha1(safe_bytes("_".join(map(str, args))))
142
142
143
143
144 def custom_key_generator(backend, namespace, fn):
144 def custom_key_generator(backend, namespace, fn):
145 func_name = fn.__name__
145 func_name = fn.__name__
146
146
147 def generate_key(*args):
147 def generate_key(*args):
148 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
148 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
149 namespace_pref = namespace or 'default_namespace'
149 namespace_pref = namespace or 'default_namespace'
150 arg_key = compute_key_from_params(*args)
150 arg_key = compute_key_from_params(*args)
151 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
151 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
152
152
153 return final_key
153 return final_key
154
154
155 return generate_key
155 return generate_key
156
156
157
157
158 def backend_key_generator(backend):
158 def backend_key_generator(backend):
159 """
159 """
160 Special wrapper that also sends over the backend to the key generator
160 Special wrapper that also sends over the backend to the key generator
161 """
161 """
162 def wrapper(namespace, fn):
162 def wrapper(namespace, fn):
163 return custom_key_generator(backend, namespace, fn)
163 return custom_key_generator(backend, namespace, fn)
164 return wrapper
164 return wrapper
165
165
166
166
167 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False, force=False):
167 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False, force=False):
168 from .backends import FileNamespaceBackend
168 from .backends import FileNamespaceBackend
169 from . import async_creation_runner
169 from . import async_creation_runner
170
170
171 region_obj = region_meta.dogpile_cache_regions.get(region_name)
171 region_obj = region_meta.dogpile_cache_regions.get(region_name)
172 if not region_obj:
172 if not region_obj:
173 reg_keys = list(region_meta.dogpile_cache_regions.keys())
173 reg_keys = list(region_meta.dogpile_cache_regions.keys())
174 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
174 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
175
175
176 region_uid_name = f'{region_name}:{region_namespace}'
176 region_uid_name = f'{region_name}:{region_namespace}'
177
177
178 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
178 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
179 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
179 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
180 if not region_namespace:
180 if not region_namespace:
181 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
181 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
182
182
183 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
183 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
184 if region_exist and not force:
184 if region_exist and not force:
185 log.debug('Using already configured region: %s', region_namespace)
185 log.debug('Using already configured region: %s', region_namespace)
186 return region_exist
186 return region_exist
187
187
188 expiration_time = region_obj.expiration_time
188 expiration_time = region_obj.expiration_time
189
189
190 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
190 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
191 namespace_cache_dir = cache_dir
191 namespace_cache_dir = cache_dir
192
192
193 # we default the namespace_cache_dir to our default cache dir.
193 # we default the namespace_cache_dir to our default cache dir.
194 # however, if this backend is configured with filename= param, we prioritize that
194 # however, if this backend is configured with filename= param, we prioritize that
195 # so all caches within that particular region, even those namespaced end up in the same path
195 # so all caches within that particular region, even those namespaced end up in the same path
196 if region_obj.actual_backend.filename:
196 if region_obj.actual_backend.filename:
197 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
197 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
198
198
199 if not os.path.isdir(namespace_cache_dir):
199 if not os.path.isdir(namespace_cache_dir):
200 os.makedirs(namespace_cache_dir)
200 os.makedirs(namespace_cache_dir)
201 new_region = make_region(
201 new_region = make_region(
202 name=region_uid_name,
202 name=region_uid_name,
203 function_key_generator=backend_key_generator(region_obj.actual_backend)
203 function_key_generator=backend_key_generator(region_obj.actual_backend)
204 )
204 )
205
205
206 namespace_filename = os.path.join(
206 namespace_filename = os.path.join(
207 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
207 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
208 # special type that allows 1db per namespace
208 # special type that allows 1db per namespace
209 new_region.configure(
209 new_region.configure(
210 backend='dogpile.cache.rc.file_namespace',
210 backend='dogpile.cache.rc.file_namespace',
211 expiration_time=expiration_time,
211 expiration_time=expiration_time,
212 arguments={"filename": namespace_filename}
212 arguments={"filename": namespace_filename}
213 )
213 )
214
214
215 # create and save in region caches
215 # create and save in region caches
216 log.debug('configuring new region: %s', region_uid_name)
216 log.debug('configuring new region: %s', region_uid_name)
217 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
217 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
218
218
219 region_obj._default_namespace = region_namespace
219 region_obj._default_namespace = region_namespace
220 if use_async_runner:
220 if use_async_runner:
221 region_obj.async_creation_runner = async_creation_runner
221 region_obj.async_creation_runner = async_creation_runner
222 return region_obj
222 return region_obj
223
223
224
224
225 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
225 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
226 from . import CLEAR_DELETE, CLEAR_INVALIDATE
226 from . import CLEAR_DELETE, CLEAR_INVALIDATE
227
227
228 if not isinstance(cache_region, RhodeCodeCacheRegion):
228 if not isinstance(cache_region, RhodeCodeCacheRegion):
229 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
229 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
230 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
230 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
231 cache_region, cache_namespace_uid, method)
231 cache_region, cache_namespace_uid, method)
232
232
233 num_affected_keys = 0
233 num_affected_keys = 0
234
234
235 if method == CLEAR_INVALIDATE:
235 if method == CLEAR_INVALIDATE:
236 # NOTE: The CacheRegion.invalidate() method’s default mode of
236 # NOTE: The CacheRegion.invalidate() method’s default mode of
237 # operation is to set a timestamp local to this CacheRegion in this Python process only.
237 # operation is to set a timestamp local to this CacheRegion in this Python process only.
238 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
238 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
239 cache_region.invalidate(hard=True)
239 cache_region.invalidate(hard=True)
240
240
241 if method == CLEAR_DELETE:
241 if method == CLEAR_DELETE:
242 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
242 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
243 return num_affected_keys
243 return num_affected_keys
@@ -1,25 +1,25 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 counter = 0
19 counter = 0
20
20
21
21
22 def get_request_counter(request):
22 def get_request_counter(request):
23 global counter
23 global counter
24 counter += 1
24 counter += 1
25 return counter
25 return counter
@@ -1,70 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib._vendor.statsd import client_from_config
18 from vcsserver.lib._vendor.statsd import client_from_config
19
19
20
20
21 class StatsdClientNotInitialised(Exception):
21 class StatsdClientNotInitialised(Exception):
22 pass
22 pass
23
23
24
24
25 class _Singleton(type):
25 class _Singleton(type):
26 """A metaclass that creates a Singleton base class when called."""
26 """A metaclass that creates a Singleton base class when called."""
27
27
28 _instances = {}
28 _instances = {}
29
29
30 def __call__(cls, *args, **kwargs):
30 def __call__(cls, *args, **kwargs):
31 if cls not in cls._instances:
31 if cls not in cls._instances:
32 cls._instances[cls] = super().__call__(*args, **kwargs)
32 cls._instances[cls] = super().__call__(*args, **kwargs)
33 return cls._instances[cls]
33 return cls._instances[cls]
34
34
35
35
36 class Singleton(_Singleton("SingletonMeta", (object,), {})):
36 class Singleton(_Singleton("SingletonMeta", (object,), {})):
37 pass
37 pass
38
38
39
39
40 class StatsdClientClass(Singleton):
40 class StatsdClientClass(Singleton):
41 setup_run = False
41 setup_run = False
42 statsd_client = None
42 statsd_client = None
43 statsd = None
43 statsd = None
44 strict_mode_init = False
44 strict_mode_init = False
45
45
46 def __getattribute__(self, name):
46 def __getattribute__(self, name):
47
47
48 if name.startswith("statsd"):
48 if name.startswith("statsd"):
49 if self.setup_run:
49 if self.setup_run:
50 return super().__getattribute__(name)
50 return super().__getattribute__(name)
51 else:
51 else:
52 if self.strict_mode_init:
52 if self.strict_mode_init:
53 raise StatsdClientNotInitialised(f"requested key was {name}")
53 raise StatsdClientNotInitialised(f"requested key was {name}")
54 return None
54 return None
55
55
56 return super().__getattribute__(name)
56 return super().__getattribute__(name)
57
57
58 def setup(self, settings):
58 def setup(self, settings):
59 """
59 """
60 Initialize the client
60 Initialize the client
61 """
61 """
62 strict_init_mode = settings.pop('statsd_strict_init', False)
62 strict_init_mode = settings.pop('statsd_strict_init', False)
63
63
64 statsd = client_from_config(settings)
64 statsd = client_from_config(settings)
65 self.statsd = statsd
65 self.statsd = statsd
66 self.statsd_client = statsd
66 self.statsd_client = statsd
67 self.setup_run = True
67 self.setup_run = True
68
68
69
69
70 StatsdClient = StatsdClientClass()
70 StatsdClient = StatsdClientClass()
@@ -1,158 +1,158 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import typing
18 import typing
19 import base64
19 import base64
20 import logging
20 import logging
21
21
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 def safe_int(val, default=None) -> int:
26 def safe_int(val, default=None) -> int:
27 """
27 """
28 Returns int() of val if val is not convertable to int use default
28 Returns int() of val if val is not convertable to int use default
29 instead
29 instead
30
30
31 :param val:
31 :param val:
32 :param default:
32 :param default:
33 """
33 """
34
34
35 try:
35 try:
36 val = int(val)
36 val = int(val)
37 except (ValueError, TypeError):
37 except (ValueError, TypeError):
38 val = default
38 val = default
39
39
40 return val
40 return val
41
41
42
42
43 def base64_to_str(text) -> str:
43 def base64_to_str(text) -> str:
44 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
44 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
45
45
46
46
47 def get_default_encodings() -> list[str]:
47 def get_default_encodings() -> list[str]:
48 return ['utf8']
48 return ['utf8']
49
49
50
50
51 def safe_str(str_, to_encoding=None) -> str:
51 def safe_str(str_, to_encoding=None) -> str:
52 """
52 """
53 safe str function. Does few trick to turn unicode_ into string
53 safe str function. Does few trick to turn unicode_ into string
54
54
55 :param str_: str to encode
55 :param str_: str to encode
56 :param to_encoding: encode to this type UTF8 default
56 :param to_encoding: encode to this type UTF8 default
57 """
57 """
58 if isinstance(str_, str):
58 if isinstance(str_, str):
59 return str_
59 return str_
60
60
61 # if it's bytes cast to str
61 # if it's bytes cast to str
62 if not isinstance(str_, bytes):
62 if not isinstance(str_, bytes):
63 return str(str_)
63 return str(str_)
64
64
65 to_encoding = to_encoding or get_default_encodings()
65 to_encoding = to_encoding or get_default_encodings()
66 if not isinstance(to_encoding, (list, tuple)):
66 if not isinstance(to_encoding, (list, tuple)):
67 to_encoding = [to_encoding]
67 to_encoding = [to_encoding]
68
68
69 for enc in to_encoding:
69 for enc in to_encoding:
70 try:
70 try:
71 return str(str_, enc)
71 return str(str_, enc)
72 except UnicodeDecodeError:
72 except UnicodeDecodeError:
73 pass
73 pass
74
74
75 return str(str_, to_encoding[0], 'replace')
75 return str(str_, to_encoding[0], 'replace')
76
76
77
77
78 def safe_bytes(str_, from_encoding=None) -> bytes:
78 def safe_bytes(str_, from_encoding=None) -> bytes:
79 """
79 """
80 safe bytes function. Does few trick to turn str_ into bytes string:
80 safe bytes function. Does few trick to turn str_ into bytes string:
81
81
82 :param str_: string to decode
82 :param str_: string to decode
83 :param from_encoding: encode from this type UTF8 default
83 :param from_encoding: encode from this type UTF8 default
84 """
84 """
85 if isinstance(str_, bytes):
85 if isinstance(str_, bytes):
86 return str_
86 return str_
87
87
88 if not isinstance(str_, str):
88 if not isinstance(str_, str):
89 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
89 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
90
90
91 from_encoding = from_encoding or get_default_encodings()
91 from_encoding = from_encoding or get_default_encodings()
92 if not isinstance(from_encoding, (list, tuple)):
92 if not isinstance(from_encoding, (list, tuple)):
93 from_encoding = [from_encoding]
93 from_encoding = [from_encoding]
94
94
95 for enc in from_encoding:
95 for enc in from_encoding:
96 try:
96 try:
97 return str_.encode(enc)
97 return str_.encode(enc)
98 except UnicodeDecodeError:
98 except UnicodeDecodeError:
99 pass
99 pass
100
100
101 return str_.encode(from_encoding[0], 'replace')
101 return str_.encode(from_encoding[0], 'replace')
102
102
103
103
104 def ascii_bytes(str_, allow_bytes=False) -> bytes:
104 def ascii_bytes(str_, allow_bytes=False) -> bytes:
105 """
105 """
106 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
106 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
107 Fails with UnicodeError on invalid input.
107 Fails with UnicodeError on invalid input.
108 This should be used where encoding and "safe" ambiguity should be avoided.
108 This should be used where encoding and "safe" ambiguity should be avoided.
109 Where strings already have been encoded in other ways but still are unicode
109 Where strings already have been encoded in other ways but still are unicode
110 string - for example to hex, base64, json, urlencoding, or are known to be
110 string - for example to hex, base64, json, urlencoding, or are known to be
111 identifiers.
111 identifiers.
112 """
112 """
113 if allow_bytes and isinstance(str_, bytes):
113 if allow_bytes and isinstance(str_, bytes):
114 return str_
114 return str_
115
115
116 if not isinstance(str_, str):
116 if not isinstance(str_, str):
117 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
117 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
118 return str_.encode('ascii')
118 return str_.encode('ascii')
119
119
120
120
121 def ascii_str(str_) -> str:
121 def ascii_str(str_) -> str:
122 """
122 """
123 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
123 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
124 Fails with UnicodeError on invalid input.
124 Fails with UnicodeError on invalid input.
125 This should be used where encoding and "safe" ambiguity should be avoided.
125 This should be used where encoding and "safe" ambiguity should be avoided.
126 Where strings are encoded but also in other ways are known to be ASCII, and
126 Where strings are encoded but also in other ways are known to be ASCII, and
127 where a unicode string is wanted without caring about encoding. For example
127 where a unicode string is wanted without caring about encoding. For example
128 to hex, base64, urlencoding, or are known to be identifiers.
128 to hex, base64, urlencoding, or are known to be identifiers.
129 """
129 """
130
130
131 if not isinstance(str_, bytes):
131 if not isinstance(str_, bytes):
132 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
132 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
133 return str_.decode('ascii')
133 return str_.decode('ascii')
134
134
135
135
136 def convert_to_str(data):
136 def convert_to_str(data):
137 if isinstance(data, bytes):
137 if isinstance(data, bytes):
138 return safe_str(data)
138 return safe_str(data)
139 elif isinstance(data, tuple):
139 elif isinstance(data, tuple):
140 return tuple(convert_to_str(item) for item in data)
140 return tuple(convert_to_str(item) for item in data)
141 elif isinstance(data, list):
141 elif isinstance(data, list):
142 return list(convert_to_str(item) for item in data)
142 return list(convert_to_str(item) for item in data)
143 else:
143 else:
144 return data
144 return data
145
145
146
146
147 def splitnewlines(text: bytes):
147 def splitnewlines(text: bytes):
148 """
148 """
149 like splitlines, but only split on newlines.
149 like splitlines, but only split on newlines.
150 """
150 """
151
151
152 lines = [_l + b'\n' for _l in text.split(b'\n')]
152 lines = [_l + b'\n' for _l in text.split(b'\n')]
153 if lines:
153 if lines:
154 if lines[-1] == b'\n':
154 if lines[-1] == b'\n':
155 lines.pop()
155 lines.pop()
156 else:
156 else:
157 lines[-1] = lines[-1][:-1]
157 lines[-1] = lines[-1][:-1]
158 return lines
158 return lines
@@ -1,111 +1,111 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import redis
19 import redis
20
20
21 from ..lib import rc_cache
21 from ..lib import rc_cache
22 from ..lib.ext_json import json
22 from ..lib.ext_json import json
23
23
24
24
25 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
26
26
27 redis_client = None
27 redis_client = None
28
28
29
29
30 class RedisTxnClient:
30 class RedisTxnClient:
31
31
32 def __init__(self, url):
32 def __init__(self, url):
33 self.url = url
33 self.url = url
34 self._create_client(url)
34 self._create_client(url)
35
35
36 def _create_client(self, url):
36 def _create_client(self, url):
37 connection_pool = redis.ConnectionPool.from_url(url)
37 connection_pool = redis.ConnectionPool.from_url(url)
38 self.writer_client = redis.StrictRedis(
38 self.writer_client = redis.StrictRedis(
39 connection_pool=connection_pool
39 connection_pool=connection_pool
40 )
40 )
41 self.reader_client = self.writer_client
41 self.reader_client = self.writer_client
42
42
43 def set(self, key, value):
43 def set(self, key, value):
44 self.writer_client.set(key, value)
44 self.writer_client.set(key, value)
45
45
46 def get(self, key):
46 def get(self, key):
47 return self.reader_client.get(key)
47 return self.reader_client.get(key)
48
48
49 def delete(self, key):
49 def delete(self, key):
50 self.writer_client.delete(key)
50 self.writer_client.delete(key)
51
51
52
52
53 def get_redis_client(url=''):
53 def get_redis_client(url=''):
54
54
55 global redis_client
55 global redis_client
56 if redis_client is not None:
56 if redis_client is not None:
57 return redis_client
57 return redis_client
58 if not url:
58 if not url:
59 from vcsserver import CONFIG
59 from vcsserver import CONFIG
60 url = CONFIG['vcs.svn.redis_conn']
60 url = CONFIG['vcs.svn.redis_conn']
61 redis_client = RedisTxnClient(url)
61 redis_client = RedisTxnClient(url)
62 return redis_client
62 return redis_client
63
63
64
64
65 def get_txn_id_data_key(repo_path, svn_txn_id):
65 def get_txn_id_data_key(repo_path, svn_txn_id):
66 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
66 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
67 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
67 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
68 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
68 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
69 log.debug('computed final key: %s', final_key)
69 log.debug('computed final key: %s', final_key)
70
70
71 return final_key
71 return final_key
72
72
73
73
74 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
74 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
75 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
75 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
76
76
77 if not svn_txn_id:
77 if not svn_txn_id:
78 log.warning('Cannot store txn_id because it is empty')
78 log.warning('Cannot store txn_id because it is empty')
79 return
79 return
80
80
81 redis_conn = get_redis_client()
81 redis_conn = get_redis_client()
82
82
83 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
83 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
84 store_data = json.dumps(data_dict)
84 store_data = json.dumps(data_dict)
85 redis_conn.set(store_key, store_data)
85 redis_conn.set(store_key, store_data)
86
86
87
87
88 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
88 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
89 """
89 """
90 Reads txn_id from store and if present returns the data for callback manager
90 Reads txn_id from store and if present returns the data for callback manager
91 """
91 """
92 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
92 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
93 redis_conn = get_redis_client()
93 redis_conn = get_redis_client()
94
94
95 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
95 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
96 data = {}
96 data = {}
97 redis_conn.get(store_key)
97 redis_conn.get(store_key)
98 raw_data = 'not-set'
98 raw_data = 'not-set'
99 try:
99 try:
100 raw_data = redis_conn.get(store_key)
100 raw_data = redis_conn.get(store_key)
101 if not raw_data:
101 if not raw_data:
102 raise ValueError(f'Failed to get txn_id metadata, from store: {store_key}')
102 raise ValueError(f'Failed to get txn_id metadata, from store: {store_key}')
103 data = json.loads(raw_data)
103 data = json.loads(raw_data)
104 except Exception:
104 except Exception:
105 log.exception('Failed to get txn_id metadata: %s', raw_data)
105 log.exception('Failed to get txn_id metadata: %s', raw_data)
106
106
107 if rm_on_read:
107 if rm_on_read:
108 log.debug('Cleaning up txn_id at %s', store_key)
108 log.debug('Cleaning up txn_id at %s', store_key)
109 redis_conn.delete(store_key)
109 redis_conn.delete(store_key)
110
110
111 return data
111 return data
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20
20
21 from svn import client
21 from svn import client
22 from svn import core
22 from svn import core
23 from svn import ra
23 from svn import ra
24
24
25 from mercurial import error
25 from mercurial import error
26
26
27 from vcsserver.lib.str_utils import safe_bytes
27 from vcsserver.lib.str_utils import safe_bytes
28
28
29 core.svn_config_ensure(None)
29 core.svn_config_ensure(None)
30 svn_config = core.svn_config_get_config(None)
30 svn_config = core.svn_config_get_config(None)
31
31
32
32
33 class RaCallbacks(ra.Callbacks):
33 class RaCallbacks(ra.Callbacks):
34 @staticmethod
34 @staticmethod
35 def open_tmp_file(pool): # pragma: no cover
35 def open_tmp_file(pool): # pragma: no cover
36 (fd, fn) = tempfile.mkstemp()
36 (fd, fn) = tempfile.mkstemp()
37 os.close(fd)
37 os.close(fd)
38 return fn
38 return fn
39
39
40 @staticmethod
40 @staticmethod
41 def get_client_string(pool):
41 def get_client_string(pool):
42 return b'RhodeCode-subversion-url-checker'
42 return b'RhodeCode-subversion-url-checker'
43
43
44
44
45 class SubversionException(Exception):
45 class SubversionException(Exception):
46 pass
46 pass
47
47
48
48
49 class SubversionConnectionException(SubversionException):
49 class SubversionConnectionException(SubversionException):
50 """Exception raised when a generic error occurs when connecting to a repository."""
50 """Exception raised when a generic error occurs when connecting to a repository."""
51
51
52
52
53 def normalize_url(url):
53 def normalize_url(url):
54 if not url:
54 if not url:
55 return url
55 return url
56 if url.startswith(b'svn+http://') or url.startswith(b'svn+https://'):
56 if url.startswith(b'svn+http://') or url.startswith(b'svn+https://'):
57 url = url[4:]
57 url = url[4:]
58 url = url.rstrip(b'/')
58 url = url.rstrip(b'/')
59 return url
59 return url
60
60
61
61
62 def _create_auth_baton(pool):
62 def _create_auth_baton(pool):
63 """Create a Subversion authentication baton. """
63 """Create a Subversion authentication baton. """
64 # Give the client context baton a suite of authentication
64 # Give the client context baton a suite of authentication
65 # providers.h
65 # providers.h
66 platform_specific = [
66 platform_specific = [
67 'svn_auth_get_gnome_keyring_simple_provider',
67 'svn_auth_get_gnome_keyring_simple_provider',
68 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
68 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
69 'svn_auth_get_keychain_simple_provider',
69 'svn_auth_get_keychain_simple_provider',
70 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
70 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
71 'svn_auth_get_kwallet_simple_provider',
71 'svn_auth_get_kwallet_simple_provider',
72 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
72 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
73 'svn_auth_get_ssl_client_cert_file_provider',
73 'svn_auth_get_ssl_client_cert_file_provider',
74 'svn_auth_get_windows_simple_provider',
74 'svn_auth_get_windows_simple_provider',
75 'svn_auth_get_windows_ssl_server_trust_provider',
75 'svn_auth_get_windows_ssl_server_trust_provider',
76 ]
76 ]
77
77
78 providers = []
78 providers = []
79
79
80 for p in platform_specific:
80 for p in platform_specific:
81 if getattr(core, p, None) is not None:
81 if getattr(core, p, None) is not None:
82 try:
82 try:
83 providers.append(getattr(core, p)())
83 providers.append(getattr(core, p)())
84 except RuntimeError:
84 except RuntimeError:
85 pass
85 pass
86
86
87 providers += [
87 providers += [
88 client.get_simple_provider(),
88 client.get_simple_provider(),
89 client.get_username_provider(),
89 client.get_username_provider(),
90 client.get_ssl_client_cert_file_provider(),
90 client.get_ssl_client_cert_file_provider(),
91 client.get_ssl_client_cert_pw_file_provider(),
91 client.get_ssl_client_cert_pw_file_provider(),
92 client.get_ssl_server_trust_file_provider(),
92 client.get_ssl_server_trust_file_provider(),
93 ]
93 ]
94
94
95 return core.svn_auth_open(providers, pool)
95 return core.svn_auth_open(providers, pool)
96
96
97
97
98 class SubversionRepo:
98 class SubversionRepo:
99 """Wrapper for a Subversion repository.
99 """Wrapper for a Subversion repository.
100
100
101 It uses the SWIG Python bindings, see above for requirements.
101 It uses the SWIG Python bindings, see above for requirements.
102 """
102 """
103 def __init__(self, svn_url: bytes = b'', username: bytes = b'', password: bytes = b''):
103 def __init__(self, svn_url: bytes = b'', username: bytes = b'', password: bytes = b''):
104
104
105 self.username = username
105 self.username = username
106 self.password = password
106 self.password = password
107 self.svn_url = core.svn_path_canonicalize(svn_url)
107 self.svn_url = core.svn_path_canonicalize(svn_url)
108
108
109 self.auth_baton_pool = core.Pool()
109 self.auth_baton_pool = core.Pool()
110 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
110 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
111 # self.init_ra_and_client() assumes that a pool already exists
111 # self.init_ra_and_client() assumes that a pool already exists
112 self.pool = core.Pool()
112 self.pool = core.Pool()
113
113
114 self.ra = self.init_ra_and_client()
114 self.ra = self.init_ra_and_client()
115 self.uuid = ra.get_uuid(self.ra, self.pool)
115 self.uuid = ra.get_uuid(self.ra, self.pool)
116
116
117 def init_ra_and_client(self):
117 def init_ra_and_client(self):
118 """Initializes the RA and client layers, because sometimes getting
118 """Initializes the RA and client layers, because sometimes getting
119 unified diffs runs the remote server out of open files.
119 unified diffs runs the remote server out of open files.
120 """
120 """
121
121
122 if self.username:
122 if self.username:
123 core.svn_auth_set_parameter(self.auth_baton,
123 core.svn_auth_set_parameter(self.auth_baton,
124 core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
124 core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
125 self.username)
125 self.username)
126 if self.password:
126 if self.password:
127 core.svn_auth_set_parameter(self.auth_baton,
127 core.svn_auth_set_parameter(self.auth_baton,
128 core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
128 core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
129 self.password)
129 self.password)
130
130
131 callbacks = RaCallbacks()
131 callbacks = RaCallbacks()
132 callbacks.auth_baton = self.auth_baton
132 callbacks.auth_baton = self.auth_baton
133
133
134 try:
134 try:
135 return ra.open2(self.svn_url, callbacks, svn_config, self.pool)
135 return ra.open2(self.svn_url, callbacks, svn_config, self.pool)
136 except SubversionException as e:
136 except SubversionException as e:
137 # e.child contains a detailed error messages
137 # e.child contains a detailed error messages
138 msglist = []
138 msglist = []
139 svn_exc = e
139 svn_exc = e
140 while svn_exc:
140 while svn_exc:
141 if svn_exc.args[0]:
141 if svn_exc.args[0]:
142 msglist.append(svn_exc.args[0])
142 msglist.append(svn_exc.args[0])
143 svn_exc = svn_exc.child
143 svn_exc = svn_exc.child
144 msg = '\n'.join(msglist)
144 msg = '\n'.join(msglist)
145 raise SubversionConnectionException(msg)
145 raise SubversionConnectionException(msg)
146
146
147
147
148 class svnremoterepo:
148 class svnremoterepo:
149 """ the dumb wrapper for actual Subversion repositories """
149 """ the dumb wrapper for actual Subversion repositories """
150
150
151 def __init__(self, username: bytes = b'', password: bytes = b'', svn_url: bytes = b''):
151 def __init__(self, username: bytes = b'', password: bytes = b'', svn_url: bytes = b''):
152 self.username = username or b''
152 self.username = username or b''
153 self.password = password or b''
153 self.password = password or b''
154 self.path = normalize_url(svn_url)
154 self.path = normalize_url(svn_url)
155
155
156 def svn(self):
156 def svn(self):
157 try:
157 try:
158 return SubversionRepo(self.path, self.username, self.password)
158 return SubversionRepo(self.path, self.username, self.password)
159 except SubversionConnectionException as e:
159 except SubversionConnectionException as e:
160 raise error.Abort(safe_bytes(e))
160 raise error.Abort(safe_bytes(e))
@@ -1,67 +1,67 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import logging
19 import logging
20
20
21 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
22
22
23
23
24 def str2bool(str_):
24 def str2bool(str_):
25 """
25 """
26 returns True/False value from given string, it tries to translate the
26 returns True/False value from given string, it tries to translate the
27 string into boolean
27 string into boolean
28
28
29 :param str_: string value to translate into boolean
29 :param str_: string value to translate into boolean
30 :rtype: boolean
30 :rtype: boolean
31 :returns: boolean from given string
31 :returns: boolean from given string
32 """
32 """
33 if str_ is None:
33 if str_ is None:
34 return False
34 return False
35 if str_ in (True, False):
35 if str_ in (True, False):
36 return str_
36 return str_
37 str_ = str(str_).strip().lower()
37 str_ = str(str_).strip().lower()
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
39
39
40
40
41 def aslist(obj, sep=None, strip=True) -> list:
41 def aslist(obj, sep=None, strip=True) -> list:
42 """
42 """
43 Returns given string separated by sep as list
43 Returns given string separated by sep as list
44
44
45 :param obj:
45 :param obj:
46 :param sep:
46 :param sep:
47 :param strip:
47 :param strip:
48 """
48 """
49 if isinstance(obj, str):
49 if isinstance(obj, str):
50 if obj in ['', ""]:
50 if obj in ['', ""]:
51 return []
51 return []
52
52
53 lst = obj.split(sep)
53 lst = obj.split(sep)
54 if strip:
54 if strip:
55 lst = [v.strip() for v in lst]
55 lst = [v.strip() for v in lst]
56 return lst
56 return lst
57 elif isinstance(obj, (list, tuple)):
57 elif isinstance(obj, (list, tuple)):
58 return obj
58 return obj
59 elif obj is None:
59 elif obj is None:
60 return []
60 return []
61 else:
61 else:
62 return [obj]
62 return [obj]
63
63
64
64
65 def assert_bytes(input_type, expected_types=(bytes,)):
65 def assert_bytes(input_type, expected_types=(bytes,)):
66 if not isinstance(input_type, expected_types):
66 if not isinstance(input_type, expected_types):
67 raise ValueError(f'input_types should be one of {expected_types} got {type(input_type)} instead')
67 raise ValueError(f'input_types should be one of {expected_types} got {type(input_type)} instead')
@@ -1,417 +1,417 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import dulwich.protocol
24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver.lib.ext_json import json
28 from vcsserver.lib.ext_json import json
29 from vcsserver import hooks, subprocessio
29 from vcsserver import hooks, subprocessio
30 from vcsserver.lib.str_utils import ascii_bytes
30 from vcsserver.lib.str_utils import ascii_bytes
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class FileWrapper:
36 class FileWrapper:
37 """File wrapper that ensures how much data is read from it."""
37 """File wrapper that ensures how much data is read from it."""
38
38
39 def __init__(self, fd, content_length):
39 def __init__(self, fd, content_length):
40 self.fd = fd
40 self.fd = fd
41 self.content_length = content_length
41 self.content_length = content_length
42 self.remain = content_length
42 self.remain = content_length
43
43
44 def read(self, size):
44 def read(self, size):
45 if size <= self.remain:
45 if size <= self.remain:
46 try:
46 try:
47 data = self.fd.read(size)
47 data = self.fd.read(size)
48 except socket.error:
48 except socket.error:
49 raise IOError(self)
49 raise IOError(self)
50 self.remain -= size
50 self.remain -= size
51 elif self.remain:
51 elif self.remain:
52 data = self.fd.read(self.remain)
52 data = self.fd.read(self.remain)
53 self.remain = 0
53 self.remain = 0
54 else:
54 else:
55 data = None
55 data = None
56 return data
56 return data
57
57
58 def __repr__(self):
58 def __repr__(self):
59 return '<FileWrapper {} len: {}, read: {}>'.format(
59 return '<FileWrapper {} len: {}, read: {}>'.format(
60 self.fd, self.content_length, self.content_length - self.remain
60 self.fd, self.content_length, self.content_length - self.remain
61 )
61 )
62
62
63
63
64 class GitRepository:
64 class GitRepository:
65 """WSGI app for handling Git smart protocol endpoints."""
65 """WSGI app for handling Git smart protocol endpoints."""
66
66
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 FLUSH_PACKET = b"0000"
76 FLUSH_PACKET = b"0000"
77
77
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 files = frozenset(f.lower() for f in os.listdir(content_path))
82 valid_dir_signature = self.git_folder_signature.issubset(files)
82 valid_dir_signature = self.git_folder_signature.issubset(files)
83
83
84 if not valid_dir_signature:
84 if not valid_dir_signature:
85 raise OSError(f'{content_path} missing git signature')
85 raise OSError(f'{content_path} missing git signature')
86
86
87 self.content_path = content_path
87 self.content_path = content_path
88 self.repo_name = repo_name
88 self.repo_name = repo_name
89 self.extras = extras
89 self.extras = extras
90 self.git_path = git_path
90 self.git_path = git_path
91 self.update_server_info = update_server_info
91 self.update_server_info = update_server_info
92
92
93 def _get_fixedpath(self, path):
93 def _get_fixedpath(self, path):
94 """
94 """
95 Small fix for repo_path
95 Small fix for repo_path
96
96
97 :param path:
97 :param path:
98 """
98 """
99 path = path.split(self.repo_name, 1)[-1]
99 path = path.split(self.repo_name, 1)[-1]
100 if path.startswith('.git'):
100 if path.startswith('.git'):
101 # for bare repos we still get the .git prefix inside, we skip it
101 # for bare repos we still get the .git prefix inside, we skip it
102 # here, and remove from the service command
102 # here, and remove from the service command
103 path = path[4:]
103 path = path[4:]
104
104
105 return path.strip('/')
105 return path.strip('/')
106
106
107 def inforefs(self, request, unused_environ):
107 def inforefs(self, request, unused_environ):
108 """
108 """
109 WSGI Response producer for HTTP GET Git Smart
109 WSGI Response producer for HTTP GET Git Smart
110 HTTP /info/refs request.
110 HTTP /info/refs request.
111 """
111 """
112
112
113 git_command = request.GET.get('service')
113 git_command = request.GET.get('service')
114 if git_command not in self.commands:
114 if git_command not in self.commands:
115 log.debug('command %s not allowed', git_command)
115 log.debug('command %s not allowed', git_command)
116 return exc.HTTPForbidden()
116 return exc.HTTPForbidden()
117
117
118 # please, resist the urge to add '\n' to git capture and increment
118 # please, resist the urge to add '\n' to git capture and increment
119 # line count by 1.
119 # line count by 1.
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
121 # a part of protocol.
121 # a part of protocol.
122 # The code in Git client not only does NOT need '\n', but actually
122 # The code in Git client not only does NOT need '\n', but actually
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
124 # It reads binary, per number of bytes specified.
124 # It reads binary, per number of bytes specified.
125 # if you do add '\n' as part of data, count it.
125 # if you do add '\n' as part of data, count it.
126 server_advert = f'# service={git_command}\n'
126 server_advert = f'# service={git_command}\n'
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
128 try:
128 try:
129 gitenv = dict(os.environ)
129 gitenv = dict(os.environ)
130 # forget all configs
130 # forget all configs
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
133 '--advertise-refs', self.content_path]
133 '--advertise-refs', self.content_path]
134 out = subprocessio.SubprocessIOChunker(
134 out = subprocessio.SubprocessIOChunker(
135 command,
135 command,
136 env=gitenv,
136 env=gitenv,
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
138 shell=False
138 shell=False
139 )
139 )
140 except OSError:
140 except OSError:
141 log.exception('Error processing command')
141 log.exception('Error processing command')
142 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
143
143
144 resp = Response()
144 resp = Response()
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 resp.content_type = f'application/x-{git_command}-advertisement'
146 resp.charset = None
146 resp.charset = None
147 resp.app_iter = out
147 resp.app_iter = out
148
148
149 return resp
149 return resp
150
150
151 def _get_want_capabilities(self, request):
151 def _get_want_capabilities(self, request):
152 """Read the capabilities found in the first want line of the request."""
152 """Read the capabilities found in the first want line of the request."""
153 pos = request.body_file_seekable.tell()
153 pos = request.body_file_seekable.tell()
154 first_line = request.body_file_seekable.readline()
154 first_line = request.body_file_seekable.readline()
155 request.body_file_seekable.seek(pos)
155 request.body_file_seekable.seek(pos)
156
156
157 return frozenset(
157 return frozenset(
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
159
159
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
161 """
161 """
162 Construct a response with an empty PACK file.
162 Construct a response with an empty PACK file.
163
163
164 We use an empty PACK file, as that would trigger the failure of the pull
164 We use an empty PACK file, as that would trigger the failure of the pull
165 or clone command.
165 or clone command.
166
166
167 We also print in the error output a message explaining why the command
167 We also print in the error output a message explaining why the command
168 was aborted.
168 was aborted.
169
169
170 If additionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
171 of the pre-pull hook.
171 of the pre-pull hook.
172
172
173 Note that for clients not supporting side-band we just send them the
173 Note that for clients not supporting side-band we just send them the
174 emtpy PACK file.
174 emtpy PACK file.
175 """
175 """
176
176
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
178 response = []
178 response = []
179 proto = dulwich.protocol.Protocol(None, response.append)
179 proto = dulwich.protocol.Protocol(None, response.append)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
181
181
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
184 # produces a fatal error in the client:
184 # produces a fatal error in the client:
185 # fatal: error in sideband demultiplexer
185 # fatal: error in sideband demultiplexer
186 proto.write_sideband(
186 proto.write_sideband(
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
192
192
193 # writes b"0000" as default
193 # writes b"0000" as default
194 proto.write_pkt_line(None)
194 proto.write_pkt_line(None)
195
195
196 return response
196 return response
197 else:
197 else:
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
201 """
202 Given a list response we inject the post-pull messages.
202 Given a list response we inject the post-pull messages.
203
203
204 We only inject the messages if the client supports sideband, and the
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
205 response has the format:
206 0008NAK\n...0000
206 0008NAK\n...0000
207
207
208 Note that we do not check the no-progress capability as by default, git
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
209 sends it, which effectively would block all messages.
210 """
210 """
211
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
213 return response
214
214
215 if not start_message and not end_message:
215 if not start_message and not end_message:
216 return response
216 return response
217
217
218 try:
218 try:
219 iter(response)
219 iter(response)
220 # iterator probably will work, we continue
220 # iterator probably will work, we continue
221 except TypeError:
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
225
226 def injected_response():
226 def injected_response():
227
227
228 do_loop = 1
228 do_loop = 1
229 header_injected = 0
229 header_injected = 0
230 next_item = None
230 next_item = None
231 has_item = False
231 has_item = False
232 item = b''
232 item = b''
233
233
234 while do_loop:
234 while do_loop:
235
235
236 try:
236 try:
237 next_item = next(response)
237 next_item = next(response)
238 except StopIteration:
238 except StopIteration:
239 do_loop = 0
239 do_loop = 0
240
240
241 if has_item:
241 if has_item:
242 # last item ! alter it now
242 # last item ! alter it now
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
244 new_response = [item[:-4]]
244 new_response = [item[:-4]]
245 new_response.extend(self._get_messages(end_message, capabilities))
245 new_response.extend(self._get_messages(end_message, capabilities))
246 new_response.append(self.FLUSH_PACKET)
246 new_response.append(self.FLUSH_PACKET)
247 item = b''.join(new_response)
247 item = b''.join(new_response)
248
248
249 yield item
249 yield item
250
250
251 has_item = True
251 has_item = True
252 item = next_item
252 item = next_item
253
253
254 # alter item if it's the initial chunk
254 # alter item if it's the initial chunk
255 if not header_injected and item.startswith(b'0008NAK\n'):
255 if not header_injected and item.startswith(b'0008NAK\n'):
256 new_response = [b'0008NAK\n']
256 new_response = [b'0008NAK\n']
257 new_response.extend(self._get_messages(start_message, capabilities))
257 new_response.extend(self._get_messages(start_message, capabilities))
258 new_response.append(item[8:])
258 new_response.append(item[8:])
259 item = b''.join(new_response)
259 item = b''.join(new_response)
260 header_injected = 1
260 header_injected = 1
261
261
262 return injected_response()
262 return injected_response()
263
263
264 def _write_sideband_to_proto(self, proto, data, capabilities):
264 def _write_sideband_to_proto(self, proto, data, capabilities):
265 """
265 """
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
267
267
268 We do not use dulwich's write_sideband directly as it only supports
268 We do not use dulwich's write_sideband directly as it only supports
269 side-band-64k.
269 side-band-64k.
270 """
270 """
271 if not data:
271 if not data:
272 return
272 return
273
273
274 # N.B.(skreft): The values below are explained in the pack protocol
274 # N.B.(skreft): The values below are explained in the pack protocol
275 # documentation, section Packfile Data.
275 # documentation, section Packfile Data.
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
278 chunk_size = 65515
278 chunk_size = 65515
279 elif CAPABILITY_SIDE_BAND in capabilities:
279 elif CAPABILITY_SIDE_BAND in capabilities:
280 chunk_size = 995
280 chunk_size = 995
281 else:
281 else:
282 return
282 return
283
283
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
285
285
286 for chunk in chunker:
286 for chunk in chunker:
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
288
288
289 def _get_messages(self, data, capabilities):
289 def _get_messages(self, data, capabilities):
290 """Return a list with packets for sending data in sideband number 2."""
290 """Return a list with packets for sending data in sideband number 2."""
291 response = []
291 response = []
292 proto = dulwich.protocol.Protocol(None, response.append)
292 proto = dulwich.protocol.Protocol(None, response.append)
293
293
294 self._write_sideband_to_proto(proto, data, capabilities)
294 self._write_sideband_to_proto(proto, data, capabilities)
295
295
296 return response
296 return response
297
297
298 def backend(self, request, environ):
298 def backend(self, request, environ):
299 """
299 """
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
301 Reads commands and data from HTTP POST's body.
301 Reads commands and data from HTTP POST's body.
302 returns an iterator obj with contents of git command's
302 returns an iterator obj with contents of git command's
303 response to stdout
303 response to stdout
304 """
304 """
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
306 # we probably want to have the same mechanism used by mercurial and
306 # we probably want to have the same mechanism used by mercurial and
307 # simplevcs.
307 # simplevcs.
308 # For that we would need to parse the output of the command looking for
308 # For that we would need to parse the output of the command looking for
309 # some signs of the HTTPLockedError, parse the data and reraise it in
309 # some signs of the HTTPLockedError, parse the data and reraise it in
310 # pygrack. However, that would interfere with the streaming.
310 # pygrack. However, that would interfere with the streaming.
311 #
311 #
312 # Now the output of a blocked push is:
312 # Now the output of a blocked push is:
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
314 # POST git-receive-pack (1047 bytes)
314 # POST git-receive-pack (1047 bytes)
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
317 # ! [remote rejected] master -> master (pre-receive hook declined)
317 # ! [remote rejected] master -> master (pre-receive hook declined)
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
319
319
320 git_command = self._get_fixedpath(request.path_info)
320 git_command = self._get_fixedpath(request.path_info)
321 if git_command not in self.commands:
321 if git_command not in self.commands:
322 log.debug('command %s not allowed', git_command)
322 log.debug('command %s not allowed', git_command)
323 return exc.HTTPForbidden()
323 return exc.HTTPForbidden()
324
324
325 capabilities = None
325 capabilities = None
326 if git_command == 'git-upload-pack':
326 if git_command == 'git-upload-pack':
327 capabilities = self._get_want_capabilities(request)
327 capabilities = self._get_want_capabilities(request)
328
328
329 if 'CONTENT_LENGTH' in environ:
329 if 'CONTENT_LENGTH' in environ:
330 inputstream = FileWrapper(request.body_file_seekable,
330 inputstream = FileWrapper(request.body_file_seekable,
331 request.content_length)
331 request.content_length)
332 else:
332 else:
333 inputstream = request.body_file_seekable
333 inputstream = request.body_file_seekable
334
334
335 resp = Response()
335 resp = Response()
336 resp.content_type = f'application/x-{git_command}-result'
336 resp.content_type = f'application/x-{git_command}-result'
337 resp.charset = None
337 resp.charset = None
338
338
339 pre_pull_messages = ''
339 pre_pull_messages = ''
340 # Upload-pack == clone
340 # Upload-pack == clone
341 if git_command == 'git-upload-pack':
341 if git_command == 'git-upload-pack':
342 hook_response = hooks.git_pre_pull(self.extras)
342 hook_response = hooks.git_pre_pull(self.extras)
343 if hook_response.status != 0:
343 if hook_response.status != 0:
344 pre_pull_messages = hook_response.output
344 pre_pull_messages = hook_response.output
345 resp.app_iter = self._build_failed_pre_pull_response(
345 resp.app_iter = self._build_failed_pre_pull_response(
346 capabilities, pre_pull_messages)
346 capabilities, pre_pull_messages)
347 return resp
347 return resp
348
348
349 gitenv = dict(os.environ)
349 gitenv = dict(os.environ)
350 # forget all configs
350 # forget all configs
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
354 self.content_path]
354 self.content_path]
355 log.debug('handling cmd %s', cmd)
355 log.debug('handling cmd %s', cmd)
356
356
357 out = subprocessio.SubprocessIOChunker(
357 out = subprocessio.SubprocessIOChunker(
358 cmd,
358 cmd,
359 input_stream=inputstream,
359 input_stream=inputstream,
360 env=gitenv,
360 env=gitenv,
361 cwd=self.content_path,
361 cwd=self.content_path,
362 shell=False,
362 shell=False,
363 fail_on_stderr=False,
363 fail_on_stderr=False,
364 fail_on_return_code=False
364 fail_on_return_code=False
365 )
365 )
366
366
367 if self.update_server_info and git_command == 'git-receive-pack':
367 if self.update_server_info and git_command == 'git-receive-pack':
368 # We need to fully consume the iterator here, as the
368 # We need to fully consume the iterator here, as the
369 # update-server-info command needs to be run after the push.
369 # update-server-info command needs to be run after the push.
370 out = list(out)
370 out = list(out)
371
371
372 # Updating refs manually after each push.
372 # Updating refs manually after each push.
373 # This is required as some clients are exposing Git repos internally
373 # This is required as some clients are exposing Git repos internally
374 # with the dumb protocol.
374 # with the dumb protocol.
375 cmd = [self.git_path, 'update-server-info']
375 cmd = [self.git_path, 'update-server-info']
376 log.debug('handling cmd %s', cmd)
376 log.debug('handling cmd %s', cmd)
377 output = subprocessio.SubprocessIOChunker(
377 output = subprocessio.SubprocessIOChunker(
378 cmd,
378 cmd,
379 input_stream=inputstream,
379 input_stream=inputstream,
380 env=gitenv,
380 env=gitenv,
381 cwd=self.content_path,
381 cwd=self.content_path,
382 shell=False,
382 shell=False,
383 fail_on_stderr=False,
383 fail_on_stderr=False,
384 fail_on_return_code=False
384 fail_on_return_code=False
385 )
385 )
386 # Consume all the output so the subprocess finishes
386 # Consume all the output so the subprocess finishes
387 for _ in output:
387 for _ in output:
388 pass
388 pass
389
389
390 # Upload-pack == clone
390 # Upload-pack == clone
391 if git_command == 'git-upload-pack':
391 if git_command == 'git-upload-pack':
392 hook_response = hooks.git_post_pull(self.extras)
392 hook_response = hooks.git_post_pull(self.extras)
393 post_pull_messages = hook_response.output
393 post_pull_messages = hook_response.output
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
395 else:
395 else:
396 resp.app_iter = out
396 resp.app_iter = out
397
397
398 return resp
398 return resp
399
399
400 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
401 request = Request(environ)
401 request = Request(environ)
402 _path = self._get_fixedpath(request.path_info)
402 _path = self._get_fixedpath(request.path_info)
403 if _path.startswith('info/refs'):
403 if _path.startswith('info/refs'):
404 app = self.inforefs
404 app = self.inforefs
405 else:
405 else:
406 app = self.backend
406 app = self.backend
407
407
408 try:
408 try:
409 resp = app(request, environ)
409 resp = app(request, environ)
410 except exc.HTTPException as error:
410 except exc.HTTPException as error:
411 log.exception('HTTP Error')
411 log.exception('HTTP Error')
412 resp = error
412 resp = error
413 except Exception:
413 except Exception:
414 log.exception('Unknown error')
414 log.exception('Unknown error')
415 resp = exc.HTTPInternalServerError()
415 resp = exc.HTTPInternalServerError()
416
416
417 return resp(environ, start_response)
417 return resp(environ, start_response)
@@ -1,17 +1,17 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
@@ -1,1526 +1,1526 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib.request
24 import urllib.request
25 import urllib.parse
25 import urllib.parse
26 import urllib.error
26 import urllib.error
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40
40
41 import vcsserver
41 import vcsserver
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.lib.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes, convert_to_str, splitnewlines
43 from vcsserver.lib.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes, convert_to_str, splitnewlines
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54 HEAD_MARKER = b'HEAD'
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception:
72 except Exception:
73 # NOTE(marcink): because of how dulwich handles some exceptions
73 # NOTE(marcink): because of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 repo = Repository(safe_bytes(wire['path']))
110 repo = Repository(safe_bytes(wire['path']))
111 else:
111 else:
112 # dulwich mode
112 # dulwich mode
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo = Repo(repo_path)
114 repo = Repo(repo_path)
115
115
116 log.debug('repository created: got GIT object: %s', repo)
116 log.debug('repository created: got GIT object: %s', repo)
117 return repo
117 return repo
118
118
119 def repo(self, wire, create=False, use_libgit2=False):
119 def repo(self, wire, create=False, use_libgit2=False):
120 """
120 """
121 Get a repository instance for the given path.
121 Get a repository instance for the given path.
122 """
122 """
123 return self._create_repo(wire, create, use_libgit2)
123 return self._create_repo(wire, create, use_libgit2)
124
124
125 def repo_libgit2(self, wire):
125 def repo_libgit2(self, wire):
126 return self.repo(wire, use_libgit2=True)
126 return self.repo(wire, use_libgit2=True)
127
127
128
128
129 def create_signature_from_string(author_str, **kwargs):
129 def create_signature_from_string(author_str, **kwargs):
130 """
130 """
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
132
132
133 :param author_str: String of the format 'Name <email>'
133 :param author_str: String of the format 'Name <email>'
134 :return: pygit2.Signature object
134 :return: pygit2.Signature object
135 """
135 """
136 match = re.match(r'^(.+) <(.+)>$', author_str)
136 match = re.match(r'^(.+) <(.+)>$', author_str)
137 if match is None:
137 if match is None:
138 raise ValueError(f"Invalid format: {author_str}")
138 raise ValueError(f"Invalid format: {author_str}")
139
139
140 name, email = match.groups()
140 name, email = match.groups()
141 return pygit2.Signature(name, email, **kwargs)
141 return pygit2.Signature(name, email, **kwargs)
142
142
143
143
144 def get_obfuscated_url(url_obj):
144 def get_obfuscated_url(url_obj):
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
146 url_obj.query = obfuscate_qs(url_obj.query)
146 url_obj.query = obfuscate_qs(url_obj.query)
147 obfuscated_uri = str(url_obj)
147 obfuscated_uri = str(url_obj)
148 return obfuscated_uri
148 return obfuscated_uri
149
149
150
150
151 class GitRemote(RemoteBase):
151 class GitRemote(RemoteBase):
152
152
153 def __init__(self, factory):
153 def __init__(self, factory):
154 self._factory = factory
154 self._factory = factory
155 self._bulk_methods = {
155 self._bulk_methods = {
156 "date": self.date,
156 "date": self.date,
157 "author": self.author,
157 "author": self.author,
158 "branch": self.branch,
158 "branch": self.branch,
159 "message": self.message,
159 "message": self.message,
160 "parents": self.parents,
160 "parents": self.parents,
161 "_commit": self.revision,
161 "_commit": self.revision,
162 }
162 }
163 self._bulk_file_methods = {
163 self._bulk_file_methods = {
164 "size": self.get_node_size,
164 "size": self.get_node_size,
165 "data": self.get_node_data,
165 "data": self.get_node_data,
166 "flags": self.get_node_flags,
166 "flags": self.get_node_flags,
167 "is_binary": self.get_node_is_binary,
167 "is_binary": self.get_node_is_binary,
168 "md5": self.md5_hash
168 "md5": self.md5_hash
169 }
169 }
170
170
171 def _wire_to_config(self, wire):
171 def _wire_to_config(self, wire):
172 if 'config' in wire:
172 if 'config' in wire:
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
174 return {}
174 return {}
175
175
176 def _remote_conf(self, config):
176 def _remote_conf(self, config):
177 params = [
177 params = [
178 '-c', 'core.askpass=""',
178 '-c', 'core.askpass=""',
179 ]
179 ]
180 config_attrs = {
180 config_attrs = {
181 'vcs_ssl_dir': 'http.sslCAinfo={}',
181 'vcs_ssl_dir': 'http.sslCAinfo={}',
182 'vcs_git_lfs_store_location': 'lfs.storage={}'
182 'vcs_git_lfs_store_location': 'lfs.storage={}'
183 }
183 }
184 for key, param in config_attrs.items():
184 for key, param in config_attrs.items():
185 if value := config.get(key):
185 if value := config.get(key):
186 params.extend(['-c', param.format(value)])
186 params.extend(['-c', param.format(value)])
187 return params
187 return params
188
188
189 @reraise_safe_exceptions
189 @reraise_safe_exceptions
190 def discover_git_version(self):
190 def discover_git_version(self):
191 stdout, _ = self.run_git_command(
191 stdout, _ = self.run_git_command(
192 {}, ['--version'], _bare=True, _safe=True)
192 {}, ['--version'], _bare=True, _safe=True)
193 prefix = b'git version'
193 prefix = b'git version'
194 if stdout.startswith(prefix):
194 if stdout.startswith(prefix):
195 stdout = stdout[len(prefix):]
195 stdout = stdout[len(prefix):]
196 return safe_str(stdout.strip())
196 return safe_str(stdout.strip())
197
197
198 @reraise_safe_exceptions
198 @reraise_safe_exceptions
199 def is_empty(self, wire):
199 def is_empty(self, wire):
200 repo_init = self._factory.repo_libgit2(wire)
200 repo_init = self._factory.repo_libgit2(wire)
201 with repo_init as repo:
201 with repo_init as repo:
202 try:
202 try:
203 has_head = repo.head.name
203 has_head = repo.head.name
204 if has_head:
204 if has_head:
205 return False
205 return False
206
206
207 # NOTE(marcink): check again using more expensive method
207 # NOTE(marcink): check again using more expensive method
208 return repo.is_empty
208 return repo.is_empty
209 except Exception:
209 except Exception:
210 pass
210 pass
211
211
212 return True
212 return True
213
213
214 @reraise_safe_exceptions
214 @reraise_safe_exceptions
215 def assert_correct_path(self, wire):
215 def assert_correct_path(self, wire):
216 cache_on, context_uid, repo_id = self._cache_on(wire)
216 cache_on, context_uid, repo_id = self._cache_on(wire)
217 region = self._region(wire)
217 region = self._region(wire)
218
218
219 @region.conditional_cache_on_arguments(condition=cache_on)
219 @region.conditional_cache_on_arguments(condition=cache_on)
220 def _assert_correct_path(_context_uid, _repo_id, fast_check):
220 def _assert_correct_path(_context_uid, _repo_id, fast_check):
221 if fast_check:
221 if fast_check:
222 path = safe_str(wire['path'])
222 path = safe_str(wire['path'])
223 if pygit2.discover_repository(path):
223 if pygit2.discover_repository(path):
224 return True
224 return True
225 return False
225 return False
226 else:
226 else:
227 try:
227 try:
228 repo_init = self._factory.repo_libgit2(wire)
228 repo_init = self._factory.repo_libgit2(wire)
229 with repo_init:
229 with repo_init:
230 pass
230 pass
231 except pygit2.GitError:
231 except pygit2.GitError:
232 path = wire.get('path')
232 path = wire.get('path')
233 tb = traceback.format_exc()
233 tb = traceback.format_exc()
234 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
234 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
235 return False
235 return False
236 return True
236 return True
237
237
238 return _assert_correct_path(context_uid, repo_id, True)
238 return _assert_correct_path(context_uid, repo_id, True)
239
239
240 @reraise_safe_exceptions
240 @reraise_safe_exceptions
241 def bare(self, wire):
241 def bare(self, wire):
242 repo_init = self._factory.repo_libgit2(wire)
242 repo_init = self._factory.repo_libgit2(wire)
243 with repo_init as repo:
243 with repo_init as repo:
244 return repo.is_bare
244 return repo.is_bare
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def get_node_data(self, wire, commit_id, path):
247 def get_node_data(self, wire, commit_id, path):
248 repo_init = self._factory.repo_libgit2(wire)
248 repo_init = self._factory.repo_libgit2(wire)
249 with repo_init as repo:
249 with repo_init as repo:
250 commit = repo[commit_id]
250 commit = repo[commit_id]
251 blob_obj = commit.tree[path]
251 blob_obj = commit.tree[path]
252
252
253 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
253 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
254 raise exceptions.LookupException()(
254 raise exceptions.LookupException()(
255 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
255 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
256
256
257 return BytesEnvelope(blob_obj.data)
257 return BytesEnvelope(blob_obj.data)
258
258
259 @reraise_safe_exceptions
259 @reraise_safe_exceptions
260 def get_node_size(self, wire, commit_id, path):
260 def get_node_size(self, wire, commit_id, path):
261 repo_init = self._factory.repo_libgit2(wire)
261 repo_init = self._factory.repo_libgit2(wire)
262 with repo_init as repo:
262 with repo_init as repo:
263 commit = repo[commit_id]
263 commit = repo[commit_id]
264 blob_obj = commit.tree[path]
264 blob_obj = commit.tree[path]
265
265
266 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
266 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
267 raise exceptions.LookupException()(
267 raise exceptions.LookupException()(
268 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
268 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
269
269
270 return blob_obj.size
270 return blob_obj.size
271
271
272 @reraise_safe_exceptions
272 @reraise_safe_exceptions
273 def get_node_flags(self, wire, commit_id, path):
273 def get_node_flags(self, wire, commit_id, path):
274 repo_init = self._factory.repo_libgit2(wire)
274 repo_init = self._factory.repo_libgit2(wire)
275 with repo_init as repo:
275 with repo_init as repo:
276 commit = repo[commit_id]
276 commit = repo[commit_id]
277 blob_obj = commit.tree[path]
277 blob_obj = commit.tree[path]
278
278
279 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
279 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
280 raise exceptions.LookupException()(
280 raise exceptions.LookupException()(
281 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
281 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
282
282
283 return blob_obj.filemode
283 return blob_obj.filemode
284
284
285 @reraise_safe_exceptions
285 @reraise_safe_exceptions
286 def get_node_is_binary(self, wire, commit_id, path):
286 def get_node_is_binary(self, wire, commit_id, path):
287 repo_init = self._factory.repo_libgit2(wire)
287 repo_init = self._factory.repo_libgit2(wire)
288 with repo_init as repo:
288 with repo_init as repo:
289 commit = repo[commit_id]
289 commit = repo[commit_id]
290 blob_obj = commit.tree[path]
290 blob_obj = commit.tree[path]
291
291
292 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
292 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
293 raise exceptions.LookupException()(
293 raise exceptions.LookupException()(
294 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
294 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
295
295
296 return blob_obj.is_binary
296 return blob_obj.is_binary
297
297
298 @reraise_safe_exceptions
298 @reraise_safe_exceptions
299 def blob_as_pretty_string(self, wire, sha):
299 def blob_as_pretty_string(self, wire, sha):
300 repo_init = self._factory.repo_libgit2(wire)
300 repo_init = self._factory.repo_libgit2(wire)
301 with repo_init as repo:
301 with repo_init as repo:
302 blob_obj = repo[sha]
302 blob_obj = repo[sha]
303 return BytesEnvelope(blob_obj.data)
303 return BytesEnvelope(blob_obj.data)
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def blob_raw_length(self, wire, sha):
306 def blob_raw_length(self, wire, sha):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 region = self._region(wire)
308 region = self._region(wire)
309
309
310 @region.conditional_cache_on_arguments(condition=cache_on)
310 @region.conditional_cache_on_arguments(condition=cache_on)
311 def _blob_raw_length(_repo_id, _sha):
311 def _blob_raw_length(_repo_id, _sha):
312
312
313 repo_init = self._factory.repo_libgit2(wire)
313 repo_init = self._factory.repo_libgit2(wire)
314 with repo_init as repo:
314 with repo_init as repo:
315 blob = repo[sha]
315 blob = repo[sha]
316 return blob.size
316 return blob.size
317
317
318 return _blob_raw_length(repo_id, sha)
318 return _blob_raw_length(repo_id, sha)
319
319
320 def _parse_lfs_pointer(self, raw_content):
320 def _parse_lfs_pointer(self, raw_content):
321 spec_string = b'version https://git-lfs.github.com/spec'
321 spec_string = b'version https://git-lfs.github.com/spec'
322 if raw_content and raw_content.startswith(spec_string):
322 if raw_content and raw_content.startswith(spec_string):
323
323
324 pattern = re.compile(rb"""
324 pattern = re.compile(rb"""
325 (?:\n)?
325 (?:\n)?
326 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
326 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
327 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
327 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
328 ^size[ ](?P<oid_size>[0-9]+)\n
328 ^size[ ](?P<oid_size>[0-9]+)\n
329 (?:\n)?
329 (?:\n)?
330 """, re.VERBOSE | re.MULTILINE)
330 """, re.VERBOSE | re.MULTILINE)
331 match = pattern.match(raw_content)
331 match = pattern.match(raw_content)
332 if match:
332 if match:
333 return match.groupdict()
333 return match.groupdict()
334
334
335 return {}
335 return {}
336
336
337 @reraise_safe_exceptions
337 @reraise_safe_exceptions
338 def is_large_file(self, wire, commit_id):
338 def is_large_file(self, wire, commit_id):
339 cache_on, context_uid, repo_id = self._cache_on(wire)
339 cache_on, context_uid, repo_id = self._cache_on(wire)
340 region = self._region(wire)
340 region = self._region(wire)
341
341
342 @region.conditional_cache_on_arguments(condition=cache_on)
342 @region.conditional_cache_on_arguments(condition=cache_on)
343 def _is_large_file(_repo_id, _sha):
343 def _is_large_file(_repo_id, _sha):
344 repo_init = self._factory.repo_libgit2(wire)
344 repo_init = self._factory.repo_libgit2(wire)
345 with repo_init as repo:
345 with repo_init as repo:
346 blob = repo[commit_id]
346 blob = repo[commit_id]
347 if blob.is_binary:
347 if blob.is_binary:
348 return {}
348 return {}
349
349
350 return self._parse_lfs_pointer(blob.data)
350 return self._parse_lfs_pointer(blob.data)
351
351
352 return _is_large_file(repo_id, commit_id)
352 return _is_large_file(repo_id, commit_id)
353
353
354 @reraise_safe_exceptions
354 @reraise_safe_exceptions
355 def is_binary(self, wire, tree_id):
355 def is_binary(self, wire, tree_id):
356 cache_on, context_uid, repo_id = self._cache_on(wire)
356 cache_on, context_uid, repo_id = self._cache_on(wire)
357 region = self._region(wire)
357 region = self._region(wire)
358
358
359 @region.conditional_cache_on_arguments(condition=cache_on)
359 @region.conditional_cache_on_arguments(condition=cache_on)
360 def _is_binary(_repo_id, _tree_id):
360 def _is_binary(_repo_id, _tree_id):
361 repo_init = self._factory.repo_libgit2(wire)
361 repo_init = self._factory.repo_libgit2(wire)
362 with repo_init as repo:
362 with repo_init as repo:
363 blob_obj = repo[tree_id]
363 blob_obj = repo[tree_id]
364 return blob_obj.is_binary
364 return blob_obj.is_binary
365
365
366 return _is_binary(repo_id, tree_id)
366 return _is_binary(repo_id, tree_id)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def md5_hash(self, wire, commit_id, path):
369 def md5_hash(self, wire, commit_id, path):
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _md5_hash(_repo_id, _commit_id, _path):
374 def _md5_hash(_repo_id, _commit_id, _path):
375 repo_init = self._factory.repo_libgit2(wire)
375 repo_init = self._factory.repo_libgit2(wire)
376 with repo_init as repo:
376 with repo_init as repo:
377 commit = repo[_commit_id]
377 commit = repo[_commit_id]
378 blob_obj = commit.tree[_path]
378 blob_obj = commit.tree[_path]
379
379
380 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
380 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
381 raise exceptions.LookupException()(
381 raise exceptions.LookupException()(
382 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
382 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
383
383
384 return ''
384 return ''
385
385
386 return _md5_hash(repo_id, commit_id, path)
386 return _md5_hash(repo_id, commit_id, path)
387
387
388 @reraise_safe_exceptions
388 @reraise_safe_exceptions
389 def in_largefiles_store(self, wire, oid):
389 def in_largefiles_store(self, wire, oid):
390 conf = self._wire_to_config(wire)
390 conf = self._wire_to_config(wire)
391 repo_init = self._factory.repo_libgit2(wire)
391 repo_init = self._factory.repo_libgit2(wire)
392 with repo_init as repo:
392 with repo_init as repo:
393 repo_name = repo.path
393 repo_name = repo.path
394
394
395 store_location = conf.get('vcs_git_lfs_store_location')
395 store_location = conf.get('vcs_git_lfs_store_location')
396 if store_location:
396 if store_location:
397
397
398 store = LFSOidStore(
398 store = LFSOidStore(
399 oid=oid, repo=repo_name, store_location=store_location)
399 oid=oid, repo=repo_name, store_location=store_location)
400 return store.has_oid()
400 return store.has_oid()
401
401
402 return False
402 return False
403
403
404 @reraise_safe_exceptions
404 @reraise_safe_exceptions
405 def store_path(self, wire, oid):
405 def store_path(self, wire, oid):
406 conf = self._wire_to_config(wire)
406 conf = self._wire_to_config(wire)
407 repo_init = self._factory.repo_libgit2(wire)
407 repo_init = self._factory.repo_libgit2(wire)
408 with repo_init as repo:
408 with repo_init as repo:
409 repo_name = repo.path
409 repo_name = repo.path
410
410
411 store_location = conf.get('vcs_git_lfs_store_location')
411 store_location = conf.get('vcs_git_lfs_store_location')
412 if store_location:
412 if store_location:
413 store = LFSOidStore(
413 store = LFSOidStore(
414 oid=oid, repo=repo_name, store_location=store_location)
414 oid=oid, repo=repo_name, store_location=store_location)
415 return store.oid_path
415 return store.oid_path
416 raise ValueError(f'Unable to fetch oid with path {oid}')
416 raise ValueError(f'Unable to fetch oid with path {oid}')
417
417
418 @reraise_safe_exceptions
418 @reraise_safe_exceptions
419 def bulk_request(self, wire, rev, pre_load):
419 def bulk_request(self, wire, rev, pre_load):
420 cache_on, context_uid, repo_id = self._cache_on(wire)
420 cache_on, context_uid, repo_id = self._cache_on(wire)
421 region = self._region(wire)
421 region = self._region(wire)
422
422
423 @region.conditional_cache_on_arguments(condition=cache_on)
423 @region.conditional_cache_on_arguments(condition=cache_on)
424 def _bulk_request(_repo_id, _rev, _pre_load):
424 def _bulk_request(_repo_id, _rev, _pre_load):
425 result = {}
425 result = {}
426 for attr in pre_load:
426 for attr in pre_load:
427 try:
427 try:
428 method = self._bulk_methods[attr]
428 method = self._bulk_methods[attr]
429 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
429 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
430 args = [wire, rev]
430 args = [wire, rev]
431 result[attr] = method(*args)
431 result[attr] = method(*args)
432 except KeyError as e:
432 except KeyError as e:
433 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
433 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
434 return result
434 return result
435
435
436 return _bulk_request(repo_id, rev, sorted(pre_load))
436 return _bulk_request(repo_id, rev, sorted(pre_load))
437
437
438 @reraise_safe_exceptions
438 @reraise_safe_exceptions
439 def bulk_file_request(self, wire, commit_id, path, pre_load):
439 def bulk_file_request(self, wire, commit_id, path, pre_load):
440 cache_on, context_uid, repo_id = self._cache_on(wire)
440 cache_on, context_uid, repo_id = self._cache_on(wire)
441 region = self._region(wire)
441 region = self._region(wire)
442
442
443 @region.conditional_cache_on_arguments(condition=cache_on)
443 @region.conditional_cache_on_arguments(condition=cache_on)
444 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
444 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
445 result = {}
445 result = {}
446 for attr in pre_load:
446 for attr in pre_load:
447 try:
447 try:
448 method = self._bulk_file_methods[attr]
448 method = self._bulk_file_methods[attr]
449 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
449 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
450 result[attr] = method(wire, _commit_id, _path)
450 result[attr] = method(wire, _commit_id, _path)
451 except KeyError as e:
451 except KeyError as e:
452 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
452 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
453 return result
453 return result
454
454
455 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
455 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
456
456
457 def _build_opener(self, url: str):
457 def _build_opener(self, url: str):
458 handlers = []
458 handlers = []
459 url_obj = url_parser(safe_bytes(url))
459 url_obj = url_parser(safe_bytes(url))
460 authinfo = url_obj.authinfo()[1]
460 authinfo = url_obj.authinfo()[1]
461
461
462 if authinfo:
462 if authinfo:
463 # create a password manager
463 # create a password manager
464 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
464 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
465 passmgr.add_password(*convert_to_str(authinfo))
465 passmgr.add_password(*convert_to_str(authinfo))
466
466
467 handlers.extend((httpbasicauthhandler(passmgr),
467 handlers.extend((httpbasicauthhandler(passmgr),
468 httpdigestauthhandler(passmgr)))
468 httpdigestauthhandler(passmgr)))
469
469
470 return urllib.request.build_opener(*handlers)
470 return urllib.request.build_opener(*handlers)
471
471
472 @reraise_safe_exceptions
472 @reraise_safe_exceptions
473 def check_url(self, url, config):
473 def check_url(self, url, config):
474 url_obj = url_parser(safe_bytes(url))
474 url_obj = url_parser(safe_bytes(url))
475
475
476 test_uri = safe_str(url_obj.authinfo()[0])
476 test_uri = safe_str(url_obj.authinfo()[0])
477 obfuscated_uri = get_obfuscated_url(url_obj)
477 obfuscated_uri = get_obfuscated_url(url_obj)
478
478
479 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
479 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
480
480
481 if not test_uri.endswith('info/refs'):
481 if not test_uri.endswith('info/refs'):
482 test_uri = test_uri.rstrip('/') + '/info/refs'
482 test_uri = test_uri.rstrip('/') + '/info/refs'
483
483
484 o = self._build_opener(url=url)
484 o = self._build_opener(url=url)
485 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
485 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
486
486
487 q = {"service": 'git-upload-pack'}
487 q = {"service": 'git-upload-pack'}
488 qs = f'?{urllib.parse.urlencode(q)}'
488 qs = f'?{urllib.parse.urlencode(q)}'
489 cu = f"{test_uri}{qs}"
489 cu = f"{test_uri}{qs}"
490
490
491 try:
491 try:
492 req = urllib.request.Request(cu, None, {})
492 req = urllib.request.Request(cu, None, {})
493 log.debug("Trying to open URL %s", obfuscated_uri)
493 log.debug("Trying to open URL %s", obfuscated_uri)
494 resp = o.open(req)
494 resp = o.open(req)
495 if resp.code != 200:
495 if resp.code != 200:
496 raise exceptions.URLError()('Return Code is not 200')
496 raise exceptions.URLError()('Return Code is not 200')
497 except Exception as e:
497 except Exception as e:
498 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
498 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
499 # means it cannot be cloned
499 # means it cannot be cloned
500 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
500 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
501
501
502 # now detect if it's proper git repo
502 # now detect if it's proper git repo
503 gitdata: bytes = resp.read()
503 gitdata: bytes = resp.read()
504
504
505 if b'service=git-upload-pack' in gitdata:
505 if b'service=git-upload-pack' in gitdata:
506 pass
506 pass
507 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
507 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
508 # old style git can return some other format!
508 # old style git can return some other format!
509 pass
509 pass
510 else:
510 else:
511 e = None
511 e = None
512 raise exceptions.URLError(e)(
512 raise exceptions.URLError(e)(
513 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
513 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
514
514
515 return True
515 return True
516
516
517 @reraise_safe_exceptions
517 @reraise_safe_exceptions
518 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
518 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
519 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
519 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
520 remote_refs = self.pull(wire, url, apply_refs=False)
520 remote_refs = self.pull(wire, url, apply_refs=False)
521 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
522 if isinstance(valid_refs, list):
522 if isinstance(valid_refs, list):
523 valid_refs = tuple(valid_refs)
523 valid_refs = tuple(valid_refs)
524
524
525 for k in remote_refs:
525 for k in remote_refs:
526 # only parse heads/tags and skip so called deferred tags
526 # only parse heads/tags and skip so called deferred tags
527 if k.startswith(valid_refs) and not k.endswith(deferred):
527 if k.startswith(valid_refs) and not k.endswith(deferred):
528 repo[k] = remote_refs[k]
528 repo[k] = remote_refs[k]
529
529
530 if update_after_clone:
530 if update_after_clone:
531 # we want to checkout HEAD
531 # we want to checkout HEAD
532 repo["HEAD"] = remote_refs["HEAD"]
532 repo["HEAD"] = remote_refs["HEAD"]
533 index.build_index_from_tree(repo.path, repo.index_path(),
533 index.build_index_from_tree(repo.path, repo.index_path(),
534 repo.object_store, repo["HEAD"].tree)
534 repo.object_store, repo["HEAD"].tree)
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def branch(self, wire, commit_id):
537 def branch(self, wire, commit_id):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
539 region = self._region(wire)
540
540
541 @region.conditional_cache_on_arguments(condition=cache_on)
541 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _branch(_context_uid, _repo_id, _commit_id):
542 def _branch(_context_uid, _repo_id, _commit_id):
543 regex = re.compile('^refs/heads')
543 regex = re.compile('^refs/heads')
544
544
545 def filter_with(ref):
545 def filter_with(ref):
546 return regex.match(ref[0]) and ref[1] == _commit_id
546 return regex.match(ref[0]) and ref[1] == _commit_id
547
547
548 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
548 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
549 return [x[0].split('refs/heads/')[-1] for x in branches]
549 return [x[0].split('refs/heads/')[-1] for x in branches]
550
550
551 return _branch(context_uid, repo_id, commit_id)
551 return _branch(context_uid, repo_id, commit_id)
552
552
553 @reraise_safe_exceptions
553 @reraise_safe_exceptions
554 def delete_branch(self, wire, branch_name):
554 def delete_branch(self, wire, branch_name):
555 repo_init = self._factory.repo_libgit2(wire)
555 repo_init = self._factory.repo_libgit2(wire)
556 with repo_init as repo:
556 with repo_init as repo:
557 if branch := repo.lookup_branch(branch_name):
557 if branch := repo.lookup_branch(branch_name):
558 branch.delete()
558 branch.delete()
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def commit_branches(self, wire, commit_id):
561 def commit_branches(self, wire, commit_id):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 region = self._region(wire)
563 region = self._region(wire)
564
564
565 @region.conditional_cache_on_arguments(condition=cache_on)
565 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _commit_branches(_context_uid, _repo_id, _commit_id):
566 def _commit_branches(_context_uid, _repo_id, _commit_id):
567 repo_init = self._factory.repo_libgit2(wire)
567 repo_init = self._factory.repo_libgit2(wire)
568 with repo_init as repo:
568 with repo_init as repo:
569 branches = [x for x in repo.branches.with_commit(_commit_id)]
569 branches = [x for x in repo.branches.with_commit(_commit_id)]
570 return branches
570 return branches
571
571
572 return _commit_branches(context_uid, repo_id, commit_id)
572 return _commit_branches(context_uid, repo_id, commit_id)
573
573
574 @reraise_safe_exceptions
574 @reraise_safe_exceptions
575 def add_object(self, wire, content):
575 def add_object(self, wire, content):
576 repo_init = self._factory.repo_libgit2(wire)
576 repo_init = self._factory.repo_libgit2(wire)
577 with repo_init as repo:
577 with repo_init as repo:
578 blob = objects.Blob()
578 blob = objects.Blob()
579 blob.set_raw_string(content)
579 blob.set_raw_string(content)
580 repo.object_store.add_object(blob)
580 repo.object_store.add_object(blob)
581 return blob.id
581 return blob.id
582
582
583 @reraise_safe_exceptions
583 @reraise_safe_exceptions
584 def create_commit(self, wire, author, committer, message, branch, new_tree_id,
584 def create_commit(self, wire, author, committer, message, branch, new_tree_id,
585 date_args: list[int, int] = None,
585 date_args: list[int, int] = None,
586 parents: list | None = None):
586 parents: list | None = None):
587
587
588 repo_init = self._factory.repo_libgit2(wire)
588 repo_init = self._factory.repo_libgit2(wire)
589 with repo_init as repo:
589 with repo_init as repo:
590
590
591 if date_args:
591 if date_args:
592 current_time, offset = date_args
592 current_time, offset = date_args
593
593
594 kw = {
594 kw = {
595 'time': current_time,
595 'time': current_time,
596 'offset': offset
596 'offset': offset
597 }
597 }
598 author = create_signature_from_string(author, **kw)
598 author = create_signature_from_string(author, **kw)
599 committer = create_signature_from_string(committer, **kw)
599 committer = create_signature_from_string(committer, **kw)
600
600
601 tree = new_tree_id
601 tree = new_tree_id
602 if isinstance(tree, (bytes, str)):
602 if isinstance(tree, (bytes, str)):
603 # validate this tree is in the repo...
603 # validate this tree is in the repo...
604 tree = repo[safe_str(tree)].id
604 tree = repo[safe_str(tree)].id
605
605
606 if parents:
606 if parents:
607 # run via sha's and validate them in repo
607 # run via sha's and validate them in repo
608 parents = [repo[c].id for c in parents]
608 parents = [repo[c].id for c in parents]
609 else:
609 else:
610 parents = []
610 parents = []
611 # ensure we COMMIT on top of given branch head
611 # ensure we COMMIT on top of given branch head
612 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
612 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
613 if branch in repo.branches.local:
613 if branch in repo.branches.local:
614 parents += [repo.branches[branch].target]
614 parents += [repo.branches[branch].target]
615 elif [x for x in repo.branches.local]:
615 elif [x for x in repo.branches.local]:
616 parents += [repo.head.target]
616 parents += [repo.head.target]
617 #else:
617 #else:
618 # in case we want to commit on new branch we create it on top of HEAD
618 # in case we want to commit on new branch we create it on top of HEAD
619 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
619 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
620
620
621 # # Create a new commit
621 # # Create a new commit
622 commit_oid = repo.create_commit(
622 commit_oid = repo.create_commit(
623 f'refs/heads/{branch}', # the name of the reference to update
623 f'refs/heads/{branch}', # the name of the reference to update
624 author, # the author of the commit
624 author, # the author of the commit
625 committer, # the committer of the commit
625 committer, # the committer of the commit
626 message, # the commit message
626 message, # the commit message
627 tree, # the tree produced by the index
627 tree, # the tree produced by the index
628 parents # list of parents for the new commit, usually just one,
628 parents # list of parents for the new commit, usually just one,
629 )
629 )
630
630
631 new_commit_id = safe_str(commit_oid)
631 new_commit_id = safe_str(commit_oid)
632
632
633 return new_commit_id
633 return new_commit_id
634
634
635 @reraise_safe_exceptions
635 @reraise_safe_exceptions
636 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
636 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
637
637
638 def mode2pygit(mode):
638 def mode2pygit(mode):
639 """
639 """
640 git only supports two filemode 644 and 755
640 git only supports two filemode 644 and 755
641
641
642 0o100755 -> 33261
642 0o100755 -> 33261
643 0o100644 -> 33188
643 0o100644 -> 33188
644 """
644 """
645 return {
645 return {
646 0o100644: pygit2.GIT_FILEMODE_BLOB,
646 0o100644: pygit2.GIT_FILEMODE_BLOB,
647 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
647 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
648 0o120000: pygit2.GIT_FILEMODE_LINK
648 0o120000: pygit2.GIT_FILEMODE_LINK
649 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
649 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
650
650
651 repo_init = self._factory.repo_libgit2(wire)
651 repo_init = self._factory.repo_libgit2(wire)
652 with repo_init as repo:
652 with repo_init as repo:
653 repo_index = repo.index
653 repo_index = repo.index
654
654
655 commit_parents = None
655 commit_parents = None
656 if commit_tree and commit_data['parents']:
656 if commit_tree and commit_data['parents']:
657 commit_parents = commit_data['parents']
657 commit_parents = commit_data['parents']
658 parent_commit = repo[commit_parents[0]]
658 parent_commit = repo[commit_parents[0]]
659 repo_index.read_tree(parent_commit.tree)
659 repo_index.read_tree(parent_commit.tree)
660
660
661 for pathspec in updated:
661 for pathspec in updated:
662 blob_id = repo.create_blob(pathspec['content'])
662 blob_id = repo.create_blob(pathspec['content'])
663 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
663 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
664 repo_index.add(ie)
664 repo_index.add(ie)
665
665
666 for pathspec in removed:
666 for pathspec in removed:
667 repo_index.remove(pathspec)
667 repo_index.remove(pathspec)
668
668
669 # Write changes to the index
669 # Write changes to the index
670 repo_index.write()
670 repo_index.write()
671
671
672 # Create a tree from the updated index
672 # Create a tree from the updated index
673 written_commit_tree = repo_index.write_tree()
673 written_commit_tree = repo_index.write_tree()
674
674
675 new_tree_id = written_commit_tree
675 new_tree_id = written_commit_tree
676
676
677 author = commit_data['author']
677 author = commit_data['author']
678 committer = commit_data['committer']
678 committer = commit_data['committer']
679 message = commit_data['message']
679 message = commit_data['message']
680
680
681 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
681 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
682
682
683 new_commit_id = self.create_commit(wire, author, committer, message, branch,
683 new_commit_id = self.create_commit(wire, author, committer, message, branch,
684 new_tree_id, date_args=date_args, parents=commit_parents)
684 new_tree_id, date_args=date_args, parents=commit_parents)
685
685
686 # libgit2, ensure the branch is there and exists
686 # libgit2, ensure the branch is there and exists
687 self.create_branch(wire, branch, new_commit_id)
687 self.create_branch(wire, branch, new_commit_id)
688
688
689 # libgit2, set new ref to this created commit
689 # libgit2, set new ref to this created commit
690 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
690 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
691
691
692 return new_commit_id
692 return new_commit_id
693
693
694 @reraise_safe_exceptions
694 @reraise_safe_exceptions
695 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
695 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
696 if url != 'default' and '://' not in url:
696 if url != 'default' and '://' not in url:
697 client = LocalGitClient(url)
697 client = LocalGitClient(url)
698 else:
698 else:
699 url_obj = url_parser(safe_bytes(url))
699 url_obj = url_parser(safe_bytes(url))
700 o = self._build_opener(url)
700 o = self._build_opener(url)
701 url = url_obj.authinfo()[0]
701 url = url_obj.authinfo()[0]
702 client = HttpGitClient(base_url=url, opener=o)
702 client = HttpGitClient(base_url=url, opener=o)
703 repo = self._factory.repo(wire)
703 repo = self._factory.repo(wire)
704
704
705 determine_wants = repo.object_store.determine_wants_all
705 determine_wants = repo.object_store.determine_wants_all
706
706
707 if refs:
707 if refs:
708 refs: list[bytes] = [ascii_bytes(x) for x in refs]
708 refs: list[bytes] = [ascii_bytes(x) for x in refs]
709
709
710 def determine_wants_requested(_remote_refs):
710 def determine_wants_requested(_remote_refs):
711 determined = []
711 determined = []
712 for ref_name, ref_hash in _remote_refs.items():
712 for ref_name, ref_hash in _remote_refs.items():
713 bytes_ref_name = safe_bytes(ref_name)
713 bytes_ref_name = safe_bytes(ref_name)
714
714
715 if bytes_ref_name in refs:
715 if bytes_ref_name in refs:
716 bytes_ref_hash = safe_bytes(ref_hash)
716 bytes_ref_hash = safe_bytes(ref_hash)
717 determined.append(bytes_ref_hash)
717 determined.append(bytes_ref_hash)
718 return determined
718 return determined
719
719
720 # swap with our custom requested wants
720 # swap with our custom requested wants
721 determine_wants = determine_wants_requested
721 determine_wants = determine_wants_requested
722
722
723 try:
723 try:
724 remote_refs = client.fetch(
724 remote_refs = client.fetch(
725 path=url, target=repo, determine_wants=determine_wants)
725 path=url, target=repo, determine_wants=determine_wants)
726
726
727 except NotGitRepository as e:
727 except NotGitRepository as e:
728 log.warning(
728 log.warning(
729 'Trying to fetch from "%s" failed, not a Git repository.', url)
729 'Trying to fetch from "%s" failed, not a Git repository.', url)
730 # Exception can contain unicode which we convert
730 # Exception can contain unicode which we convert
731 raise exceptions.AbortException(e)(repr(e))
731 raise exceptions.AbortException(e)(repr(e))
732
732
733 # mikhail: client.fetch() returns all the remote refs, but fetches only
733 # mikhail: client.fetch() returns all the remote refs, but fetches only
734 # refs filtered by `determine_wants` function. We need to filter result
734 # refs filtered by `determine_wants` function. We need to filter result
735 # as well
735 # as well
736 if refs:
736 if refs:
737 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
737 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
738
738
739 if apply_refs:
739 if apply_refs:
740 # TODO: johbo: Needs proper test coverage with a git repository
740 # TODO: johbo: Needs proper test coverage with a git repository
741 # that contains a tag object, so that we would end up with
741 # that contains a tag object, so that we would end up with
742 # a peeled ref at this point.
742 # a peeled ref at this point.
743 for k in remote_refs:
743 for k in remote_refs:
744 if k.endswith(PEELED_REF_MARKER):
744 if k.endswith(PEELED_REF_MARKER):
745 log.debug("Skipping peeled reference %s", k)
745 log.debug("Skipping peeled reference %s", k)
746 continue
746 continue
747 repo[k] = remote_refs[k]
747 repo[k] = remote_refs[k]
748
748
749 if refs and not update_after:
749 if refs and not update_after:
750 # update to ref
750 # update to ref
751 # mikhail: explicitly set the head to the last ref.
751 # mikhail: explicitly set the head to the last ref.
752 update_to_ref = refs[-1]
752 update_to_ref = refs[-1]
753 if isinstance(update_after, str):
753 if isinstance(update_after, str):
754 update_to_ref = update_after
754 update_to_ref = update_after
755
755
756 repo[HEAD_MARKER] = remote_refs[update_to_ref]
756 repo[HEAD_MARKER] = remote_refs[update_to_ref]
757
757
758 if update_after:
758 if update_after:
759 # we want to check out HEAD
759 # we want to check out HEAD
760 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
760 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
761 index.build_index_from_tree(repo.path, repo.index_path(),
761 index.build_index_from_tree(repo.path, repo.index_path(),
762 repo.object_store, repo[HEAD_MARKER].tree)
762 repo.object_store, repo[HEAD_MARKER].tree)
763
763
764 if isinstance(remote_refs, FetchPackResult):
764 if isinstance(remote_refs, FetchPackResult):
765 return remote_refs.refs
765 return remote_refs.refs
766 return remote_refs
766 return remote_refs
767
767
768 @reraise_safe_exceptions
768 @reraise_safe_exceptions
769 def sync_fetch(self, wire, url, refs=None, all_refs=False, **kwargs):
769 def sync_fetch(self, wire, url, refs=None, all_refs=False, **kwargs):
770 self._factory.repo(wire)
770 self._factory.repo(wire)
771 if refs and not isinstance(refs, (list, tuple)):
771 if refs and not isinstance(refs, (list, tuple)):
772 refs = [refs]
772 refs = [refs]
773
773
774 config = self._wire_to_config(wire)
774 config = self._wire_to_config(wire)
775 # get all remote refs we'll use to fetch later
775 # get all remote refs we'll use to fetch later
776 cmd = ['ls-remote']
776 cmd = ['ls-remote']
777 if not all_refs:
777 if not all_refs:
778 cmd += ['--heads', '--tags']
778 cmd += ['--heads', '--tags']
779 cmd += [url]
779 cmd += [url]
780 output, __ = self.run_git_command(
780 output, __ = self.run_git_command(
781 wire, cmd, fail_on_stderr=False,
781 wire, cmd, fail_on_stderr=False,
782 _copts=self._remote_conf(config),
782 _copts=self._remote_conf(config),
783 extra_env={'GIT_TERMINAL_PROMPT': '0'})
783 extra_env={'GIT_TERMINAL_PROMPT': '0'})
784
784
785 remote_refs = collections.OrderedDict()
785 remote_refs = collections.OrderedDict()
786 fetch_refs = []
786 fetch_refs = []
787
787
788 for ref_line in output.splitlines():
788 for ref_line in output.splitlines():
789 sha, ref = ref_line.split(b'\t')
789 sha, ref = ref_line.split(b'\t')
790 sha = sha.strip()
790 sha = sha.strip()
791 if ref in remote_refs:
791 if ref in remote_refs:
792 # duplicate, skip
792 # duplicate, skip
793 continue
793 continue
794 if ref.endswith(PEELED_REF_MARKER):
794 if ref.endswith(PEELED_REF_MARKER):
795 log.debug("Skipping peeled reference %s", ref)
795 log.debug("Skipping peeled reference %s", ref)
796 continue
796 continue
797 # don't sync HEAD
797 # don't sync HEAD
798 if ref in [HEAD_MARKER]:
798 if ref in [HEAD_MARKER]:
799 continue
799 continue
800
800
801 remote_refs[ref] = sha
801 remote_refs[ref] = sha
802
802
803 if refs and sha in refs:
803 if refs and sha in refs:
804 # we filter fetch using our specified refs
804 # we filter fetch using our specified refs
805 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
805 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
806 elif not refs:
806 elif not refs:
807 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
807 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
808 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
808 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
809
809
810 if fetch_refs:
810 if fetch_refs:
811 for chunk in more_itertools.chunked(fetch_refs, 128):
811 for chunk in more_itertools.chunked(fetch_refs, 128):
812 fetch_refs_chunks = list(chunk)
812 fetch_refs_chunks = list(chunk)
813 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
813 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
814 self.run_git_command(
814 self.run_git_command(
815 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
815 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
816 fail_on_stderr=False,
816 fail_on_stderr=False,
817 _copts=self._remote_conf(config),
817 _copts=self._remote_conf(config),
818 extra_env={'GIT_TERMINAL_PROMPT': '0'})
818 extra_env={'GIT_TERMINAL_PROMPT': '0'})
819 if kwargs.get('sync_large_objects'):
819 if kwargs.get('sync_large_objects'):
820 self.run_git_command(
820 self.run_git_command(
821 wire, ['lfs', 'fetch', url, '--all'],
821 wire, ['lfs', 'fetch', url, '--all'],
822 fail_on_stderr=False,
822 fail_on_stderr=False,
823 _copts=self._remote_conf(config),
823 _copts=self._remote_conf(config),
824 )
824 )
825
825
826 return remote_refs
826 return remote_refs
827
827
828 @reraise_safe_exceptions
828 @reraise_safe_exceptions
829 def sync_push(self, wire, url, refs=None, **kwargs):
829 def sync_push(self, wire, url, refs=None, **kwargs):
830 if not self.check_url(url, wire):
830 if not self.check_url(url, wire):
831 return
831 return
832 config = self._wire_to_config(wire)
832 config = self._wire_to_config(wire)
833 self._factory.repo(wire)
833 self._factory.repo(wire)
834 self.run_git_command(
834 self.run_git_command(
835 wire, ['push', url, '--mirror'], fail_on_stderr=False,
835 wire, ['push', url, '--mirror'], fail_on_stderr=False,
836 _copts=self._remote_conf(config),
836 _copts=self._remote_conf(config),
837 extra_env={'GIT_TERMINAL_PROMPT': '0'})
837 extra_env={'GIT_TERMINAL_PROMPT': '0'})
838 if kwargs.get('sync_large_objects'):
838 if kwargs.get('sync_large_objects'):
839 self.run_git_command(
839 self.run_git_command(
840 wire, ['lfs', 'push', url, '--all'],
840 wire, ['lfs', 'push', url, '--all'],
841 fail_on_stderr=False,
841 fail_on_stderr=False,
842 _copts=self._remote_conf(config),
842 _copts=self._remote_conf(config),
843 )
843 )
844
844
845 @reraise_safe_exceptions
845 @reraise_safe_exceptions
846 def get_remote_refs(self, wire, url):
846 def get_remote_refs(self, wire, url):
847 repo = Repo(url)
847 repo = Repo(url)
848 return repo.get_refs()
848 return repo.get_refs()
849
849
850 @reraise_safe_exceptions
850 @reraise_safe_exceptions
851 def get_description(self, wire):
851 def get_description(self, wire):
852 repo = self._factory.repo(wire)
852 repo = self._factory.repo(wire)
853 return repo.get_description()
853 return repo.get_description()
854
854
855 @reraise_safe_exceptions
855 @reraise_safe_exceptions
856 def get_missing_revs(self, wire, rev1, rev2, other_repo_path):
856 def get_missing_revs(self, wire, rev1, rev2, other_repo_path):
857 origin_repo_path = wire['path']
857 origin_repo_path = wire['path']
858 repo = self._factory.repo(wire)
858 repo = self._factory.repo(wire)
859 # fetch from other_repo_path to our origin repo
859 # fetch from other_repo_path to our origin repo
860 LocalGitClient(thin_packs=False).fetch(other_repo_path, repo)
860 LocalGitClient(thin_packs=False).fetch(other_repo_path, repo)
861
861
862 wire_remote = wire.copy()
862 wire_remote = wire.copy()
863 wire_remote['path'] = other_repo_path
863 wire_remote['path'] = other_repo_path
864 repo_remote = self._factory.repo(wire_remote)
864 repo_remote = self._factory.repo(wire_remote)
865
865
866 # fetch from origin_repo_path to our remote repo
866 # fetch from origin_repo_path to our remote repo
867 LocalGitClient(thin_packs=False).fetch(origin_repo_path, repo_remote)
867 LocalGitClient(thin_packs=False).fetch(origin_repo_path, repo_remote)
868
868
869 revs = [
869 revs = [
870 x.commit.id
870 x.commit.id
871 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
871 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
872 return revs
872 return revs
873
873
874 @reraise_safe_exceptions
874 @reraise_safe_exceptions
875 def get_object(self, wire, sha, maybe_unreachable=False):
875 def get_object(self, wire, sha, maybe_unreachable=False):
876 cache_on, context_uid, repo_id = self._cache_on(wire)
876 cache_on, context_uid, repo_id = self._cache_on(wire)
877 region = self._region(wire)
877 region = self._region(wire)
878
878
879 @region.conditional_cache_on_arguments(condition=cache_on)
879 @region.conditional_cache_on_arguments(condition=cache_on)
880 def _get_object(_context_uid, _repo_id, _sha):
880 def _get_object(_context_uid, _repo_id, _sha):
881 repo_init = self._factory.repo_libgit2(wire)
881 repo_init = self._factory.repo_libgit2(wire)
882 with repo_init as repo:
882 with repo_init as repo:
883
883
884 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
884 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
885 try:
885 try:
886 commit = repo.revparse_single(sha)
886 commit = repo.revparse_single(sha)
887 except KeyError:
887 except KeyError:
888 # NOTE(marcink): KeyError doesn't give us any meaningful information
888 # NOTE(marcink): KeyError doesn't give us any meaningful information
889 # here, we instead give something more explicit
889 # here, we instead give something more explicit
890 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
890 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
891 raise exceptions.LookupException(e)(missing_commit_err)
891 raise exceptions.LookupException(e)(missing_commit_err)
892 except ValueError as e:
892 except ValueError as e:
893 raise exceptions.LookupException(e)(missing_commit_err)
893 raise exceptions.LookupException(e)(missing_commit_err)
894
894
895 is_tag = False
895 is_tag = False
896 if isinstance(commit, pygit2.Tag):
896 if isinstance(commit, pygit2.Tag):
897 commit = repo.get(commit.target)
897 commit = repo.get(commit.target)
898 is_tag = True
898 is_tag = True
899
899
900 check_dangling = True
900 check_dangling = True
901 if is_tag:
901 if is_tag:
902 check_dangling = False
902 check_dangling = False
903
903
904 if check_dangling and maybe_unreachable:
904 if check_dangling and maybe_unreachable:
905 check_dangling = False
905 check_dangling = False
906
906
907 # we used a reference and it parsed means we're not having a dangling commit
907 # we used a reference and it parsed means we're not having a dangling commit
908 if sha != commit.hex:
908 if sha != commit.hex:
909 check_dangling = False
909 check_dangling = False
910
910
911 if check_dangling:
911 if check_dangling:
912 # check for dangling commit
912 # check for dangling commit
913 for branch in repo.branches.with_commit(commit.hex):
913 for branch in repo.branches.with_commit(commit.hex):
914 if branch:
914 if branch:
915 break
915 break
916 else:
916 else:
917 # NOTE(marcink): Empty error doesn't give us any meaningful information
917 # NOTE(marcink): Empty error doesn't give us any meaningful information
918 # here, we instead give something more explicit
918 # here, we instead give something more explicit
919 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
919 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
920 raise exceptions.LookupException(e)(missing_commit_err)
920 raise exceptions.LookupException(e)(missing_commit_err)
921
921
922 commit_id = commit.hex
922 commit_id = commit.hex
923 type_str = commit.type_str
923 type_str = commit.type_str
924
924
925 return {
925 return {
926 'id': commit_id,
926 'id': commit_id,
927 'type': type_str,
927 'type': type_str,
928 'commit_id': commit_id,
928 'commit_id': commit_id,
929 'idx': 0
929 'idx': 0
930 }
930 }
931
931
932 return _get_object(context_uid, repo_id, sha)
932 return _get_object(context_uid, repo_id, sha)
933
933
934 @reraise_safe_exceptions
934 @reraise_safe_exceptions
935 def get_refs(self, wire):
935 def get_refs(self, wire):
936 cache_on, context_uid, repo_id = self._cache_on(wire)
936 cache_on, context_uid, repo_id = self._cache_on(wire)
937 region = self._region(wire)
937 region = self._region(wire)
938
938
939 @region.conditional_cache_on_arguments(condition=cache_on)
939 @region.conditional_cache_on_arguments(condition=cache_on)
940 def _get_refs(_context_uid, _repo_id):
940 def _get_refs(_context_uid, _repo_id):
941
941
942 repo_init = self._factory.repo_libgit2(wire)
942 repo_init = self._factory.repo_libgit2(wire)
943 with repo_init as repo:
943 with repo_init as repo:
944 regex = re.compile('^refs/(heads|tags)/')
944 regex = re.compile('^refs/(heads|tags)/')
945 return {x.name: x.target.hex for x in
945 return {x.name: x.target.hex for x in
946 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
946 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
947
947
948 return _get_refs(context_uid, repo_id)
948 return _get_refs(context_uid, repo_id)
949
949
950 @reraise_safe_exceptions
950 @reraise_safe_exceptions
951 def get_branch_pointers(self, wire):
951 def get_branch_pointers(self, wire):
952 cache_on, context_uid, repo_id = self._cache_on(wire)
952 cache_on, context_uid, repo_id = self._cache_on(wire)
953 region = self._region(wire)
953 region = self._region(wire)
954
954
955 @region.conditional_cache_on_arguments(condition=cache_on)
955 @region.conditional_cache_on_arguments(condition=cache_on)
956 def _get_branch_pointers(_context_uid, _repo_id):
956 def _get_branch_pointers(_context_uid, _repo_id):
957
957
958 repo_init = self._factory.repo_libgit2(wire)
958 repo_init = self._factory.repo_libgit2(wire)
959 regex = re.compile('^refs/heads')
959 regex = re.compile('^refs/heads')
960 with repo_init as repo:
960 with repo_init as repo:
961 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
961 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
962 return {x.target.hex: x.shorthand for x in branches}
962 return {x.target.hex: x.shorthand for x in branches}
963
963
964 return _get_branch_pointers(context_uid, repo_id)
964 return _get_branch_pointers(context_uid, repo_id)
965
965
966 @reraise_safe_exceptions
966 @reraise_safe_exceptions
967 def head(self, wire, show_exc=True):
967 def head(self, wire, show_exc=True):
968 cache_on, context_uid, repo_id = self._cache_on(wire)
968 cache_on, context_uid, repo_id = self._cache_on(wire)
969 region = self._region(wire)
969 region = self._region(wire)
970
970
971 @region.conditional_cache_on_arguments(condition=cache_on)
971 @region.conditional_cache_on_arguments(condition=cache_on)
972 def _head(_context_uid, _repo_id, _show_exc):
972 def _head(_context_uid, _repo_id, _show_exc):
973 repo_init = self._factory.repo_libgit2(wire)
973 repo_init = self._factory.repo_libgit2(wire)
974 with repo_init as repo:
974 with repo_init as repo:
975 try:
975 try:
976 return repo.head.peel().hex
976 return repo.head.peel().hex
977 except Exception:
977 except Exception:
978 if show_exc:
978 if show_exc:
979 raise
979 raise
980 return _head(context_uid, repo_id, show_exc)
980 return _head(context_uid, repo_id, show_exc)
981
981
982 @reraise_safe_exceptions
982 @reraise_safe_exceptions
983 def init(self, wire):
983 def init(self, wire):
984 repo_path = safe_str(wire['path'])
984 repo_path = safe_str(wire['path'])
985 os.makedirs(repo_path, mode=0o755)
985 os.makedirs(repo_path, mode=0o755)
986 pygit2.init_repository(repo_path, bare=False)
986 pygit2.init_repository(repo_path, bare=False)
987
987
988 @reraise_safe_exceptions
988 @reraise_safe_exceptions
989 def init_bare(self, wire):
989 def init_bare(self, wire):
990 repo_path = safe_str(wire['path'])
990 repo_path = safe_str(wire['path'])
991 os.makedirs(repo_path, mode=0o755)
991 os.makedirs(repo_path, mode=0o755)
992 pygit2.init_repository(repo_path, bare=True)
992 pygit2.init_repository(repo_path, bare=True)
993
993
994 @reraise_safe_exceptions
994 @reraise_safe_exceptions
995 def revision(self, wire, rev):
995 def revision(self, wire, rev):
996
996
997 cache_on, context_uid, repo_id = self._cache_on(wire)
997 cache_on, context_uid, repo_id = self._cache_on(wire)
998 region = self._region(wire)
998 region = self._region(wire)
999
999
1000 @region.conditional_cache_on_arguments(condition=cache_on)
1000 @region.conditional_cache_on_arguments(condition=cache_on)
1001 def _revision(_context_uid, _repo_id, _rev):
1001 def _revision(_context_uid, _repo_id, _rev):
1002 repo_init = self._factory.repo_libgit2(wire)
1002 repo_init = self._factory.repo_libgit2(wire)
1003 with repo_init as repo:
1003 with repo_init as repo:
1004 commit = repo[rev]
1004 commit = repo[rev]
1005 obj_data = {
1005 obj_data = {
1006 'id': commit.id.hex,
1006 'id': commit.id.hex,
1007 }
1007 }
1008 # tree objects itself don't have tree_id attribute
1008 # tree objects itself don't have tree_id attribute
1009 if hasattr(commit, 'tree_id'):
1009 if hasattr(commit, 'tree_id'):
1010 obj_data['tree'] = commit.tree_id.hex
1010 obj_data['tree'] = commit.tree_id.hex
1011
1011
1012 return obj_data
1012 return obj_data
1013 return _revision(context_uid, repo_id, rev)
1013 return _revision(context_uid, repo_id, rev)
1014
1014
1015 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1016 def date(self, wire, commit_id):
1016 def date(self, wire, commit_id):
1017 cache_on, context_uid, repo_id = self._cache_on(wire)
1017 cache_on, context_uid, repo_id = self._cache_on(wire)
1018 region = self._region(wire)
1018 region = self._region(wire)
1019
1019
1020 @region.conditional_cache_on_arguments(condition=cache_on)
1020 @region.conditional_cache_on_arguments(condition=cache_on)
1021 def _date(_repo_id, _commit_id):
1021 def _date(_repo_id, _commit_id):
1022 repo_init = self._factory.repo_libgit2(wire)
1022 repo_init = self._factory.repo_libgit2(wire)
1023 with repo_init as repo:
1023 with repo_init as repo:
1024 commit = repo[commit_id]
1024 commit = repo[commit_id]
1025
1025
1026 if hasattr(commit, 'commit_time'):
1026 if hasattr(commit, 'commit_time'):
1027 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1027 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1028 else:
1028 else:
1029 commit = commit.get_object()
1029 commit = commit.get_object()
1030 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1030 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1031
1031
1032 # TODO(marcink): check dulwich difference of offset vs timezone
1032 # TODO(marcink): check dulwich difference of offset vs timezone
1033 return [commit_time, commit_time_offset]
1033 return [commit_time, commit_time_offset]
1034 return _date(repo_id, commit_id)
1034 return _date(repo_id, commit_id)
1035
1035
1036 @reraise_safe_exceptions
1036 @reraise_safe_exceptions
1037 def author(self, wire, commit_id):
1037 def author(self, wire, commit_id):
1038 cache_on, context_uid, repo_id = self._cache_on(wire)
1038 cache_on, context_uid, repo_id = self._cache_on(wire)
1039 region = self._region(wire)
1039 region = self._region(wire)
1040
1040
1041 @region.conditional_cache_on_arguments(condition=cache_on)
1041 @region.conditional_cache_on_arguments(condition=cache_on)
1042 def _author(_repo_id, _commit_id):
1042 def _author(_repo_id, _commit_id):
1043 repo_init = self._factory.repo_libgit2(wire)
1043 repo_init = self._factory.repo_libgit2(wire)
1044 with repo_init as repo:
1044 with repo_init as repo:
1045 commit = repo[commit_id]
1045 commit = repo[commit_id]
1046
1046
1047 if hasattr(commit, 'author'):
1047 if hasattr(commit, 'author'):
1048 author = commit.author
1048 author = commit.author
1049 else:
1049 else:
1050 author = commit.get_object().author
1050 author = commit.get_object().author
1051
1051
1052 if author.email:
1052 if author.email:
1053 return f"{author.name} <{author.email}>"
1053 return f"{author.name} <{author.email}>"
1054
1054
1055 try:
1055 try:
1056 return f"{author.name}"
1056 return f"{author.name}"
1057 except Exception:
1057 except Exception:
1058 return f"{safe_str(author.raw_name)}"
1058 return f"{safe_str(author.raw_name)}"
1059
1059
1060 return _author(repo_id, commit_id)
1060 return _author(repo_id, commit_id)
1061
1061
1062 @reraise_safe_exceptions
1062 @reraise_safe_exceptions
1063 def message(self, wire, commit_id):
1063 def message(self, wire, commit_id):
1064 cache_on, context_uid, repo_id = self._cache_on(wire)
1064 cache_on, context_uid, repo_id = self._cache_on(wire)
1065 region = self._region(wire)
1065 region = self._region(wire)
1066
1066
1067 @region.conditional_cache_on_arguments(condition=cache_on)
1067 @region.conditional_cache_on_arguments(condition=cache_on)
1068 def _message(_repo_id, _commit_id):
1068 def _message(_repo_id, _commit_id):
1069 repo_init = self._factory.repo_libgit2(wire)
1069 repo_init = self._factory.repo_libgit2(wire)
1070 with repo_init as repo:
1070 with repo_init as repo:
1071 commit = repo[commit_id]
1071 commit = repo[commit_id]
1072 return commit.message
1072 return commit.message
1073 return _message(repo_id, commit_id)
1073 return _message(repo_id, commit_id)
1074
1074
1075 @reraise_safe_exceptions
1075 @reraise_safe_exceptions
1076 def parents(self, wire, commit_id):
1076 def parents(self, wire, commit_id):
1077 cache_on, context_uid, repo_id = self._cache_on(wire)
1077 cache_on, context_uid, repo_id = self._cache_on(wire)
1078 region = self._region(wire)
1078 region = self._region(wire)
1079
1079
1080 @region.conditional_cache_on_arguments(condition=cache_on)
1080 @region.conditional_cache_on_arguments(condition=cache_on)
1081 def _parents(_repo_id, _commit_id):
1081 def _parents(_repo_id, _commit_id):
1082 repo_init = self._factory.repo_libgit2(wire)
1082 repo_init = self._factory.repo_libgit2(wire)
1083 with repo_init as repo:
1083 with repo_init as repo:
1084 commit = repo[commit_id]
1084 commit = repo[commit_id]
1085 if hasattr(commit, 'parent_ids'):
1085 if hasattr(commit, 'parent_ids'):
1086 parent_ids = commit.parent_ids
1086 parent_ids = commit.parent_ids
1087 else:
1087 else:
1088 parent_ids = commit.get_object().parent_ids
1088 parent_ids = commit.get_object().parent_ids
1089
1089
1090 return [x.hex for x in parent_ids]
1090 return [x.hex for x in parent_ids]
1091 return _parents(repo_id, commit_id)
1091 return _parents(repo_id, commit_id)
1092
1092
1093 @reraise_safe_exceptions
1093 @reraise_safe_exceptions
1094 def children(self, wire, commit_id):
1094 def children(self, wire, commit_id):
1095 cache_on, context_uid, repo_id = self._cache_on(wire)
1095 cache_on, context_uid, repo_id = self._cache_on(wire)
1096 region = self._region(wire)
1096 region = self._region(wire)
1097
1097
1098 head = self.head(wire)
1098 head = self.head(wire)
1099
1099
1100 @region.conditional_cache_on_arguments(condition=cache_on)
1100 @region.conditional_cache_on_arguments(condition=cache_on)
1101 def _children(_repo_id, _commit_id):
1101 def _children(_repo_id, _commit_id):
1102
1102
1103 output, __ = self.run_git_command(
1103 output, __ = self.run_git_command(
1104 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1104 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1105
1105
1106 child_ids = []
1106 child_ids = []
1107 pat = re.compile(fr'^{commit_id}')
1107 pat = re.compile(fr'^{commit_id}')
1108 for line in output.splitlines():
1108 for line in output.splitlines():
1109 line = safe_str(line)
1109 line = safe_str(line)
1110 if pat.match(line):
1110 if pat.match(line):
1111 found_ids = line.split(' ')[1:]
1111 found_ids = line.split(' ')[1:]
1112 child_ids.extend(found_ids)
1112 child_ids.extend(found_ids)
1113 break
1113 break
1114
1114
1115 return child_ids
1115 return child_ids
1116 return _children(repo_id, commit_id)
1116 return _children(repo_id, commit_id)
1117
1117
1118 @reraise_safe_exceptions
1118 @reraise_safe_exceptions
1119 def set_refs(self, wire, key, value):
1119 def set_refs(self, wire, key, value):
1120 repo_init = self._factory.repo_libgit2(wire)
1120 repo_init = self._factory.repo_libgit2(wire)
1121 with repo_init as repo:
1121 with repo_init as repo:
1122 repo.references.create(key, value, force=True)
1122 repo.references.create(key, value, force=True)
1123
1123
1124 @reraise_safe_exceptions
1124 @reraise_safe_exceptions
1125 def update_refs(self, wire, key, value):
1125 def update_refs(self, wire, key, value):
1126 repo_init = self._factory.repo_libgit2(wire)
1126 repo_init = self._factory.repo_libgit2(wire)
1127 with repo_init as repo:
1127 with repo_init as repo:
1128 if key not in repo.references:
1128 if key not in repo.references:
1129 raise ValueError(f'Reference {key} not found in the repository')
1129 raise ValueError(f'Reference {key} not found in the repository')
1130 repo.references.create(key, value, force=True)
1130 repo.references.create(key, value, force=True)
1131
1131
1132 @reraise_safe_exceptions
1132 @reraise_safe_exceptions
1133 def create_branch(self, wire, branch_name, commit_id, force=False):
1133 def create_branch(self, wire, branch_name, commit_id, force=False):
1134 repo_init = self._factory.repo_libgit2(wire)
1134 repo_init = self._factory.repo_libgit2(wire)
1135 with repo_init as repo:
1135 with repo_init as repo:
1136 if commit_id:
1136 if commit_id:
1137 commit = repo[commit_id]
1137 commit = repo[commit_id]
1138 else:
1138 else:
1139 # if commit is not given just use the HEAD
1139 # if commit is not given just use the HEAD
1140 commit = repo.head()
1140 commit = repo.head()
1141
1141
1142 if force:
1142 if force:
1143 repo.branches.local.create(branch_name, commit, force=force)
1143 repo.branches.local.create(branch_name, commit, force=force)
1144 elif not repo.branches.get(branch_name):
1144 elif not repo.branches.get(branch_name):
1145 # create only if that branch isn't existing
1145 # create only if that branch isn't existing
1146 repo.branches.local.create(branch_name, commit, force=force)
1146 repo.branches.local.create(branch_name, commit, force=force)
1147
1147
1148 @reraise_safe_exceptions
1148 @reraise_safe_exceptions
1149 def remove_ref(self, wire, key):
1149 def remove_ref(self, wire, key):
1150 repo_init = self._factory.repo_libgit2(wire)
1150 repo_init = self._factory.repo_libgit2(wire)
1151 with repo_init as repo:
1151 with repo_init as repo:
1152 repo.references.delete(key)
1152 repo.references.delete(key)
1153
1153
1154 @reraise_safe_exceptions
1154 @reraise_safe_exceptions
1155 def tag_remove(self, wire, tag_name):
1155 def tag_remove(self, wire, tag_name):
1156 repo_init = self._factory.repo_libgit2(wire)
1156 repo_init = self._factory.repo_libgit2(wire)
1157 with repo_init as repo:
1157 with repo_init as repo:
1158 key = f'refs/tags/{tag_name}'
1158 key = f'refs/tags/{tag_name}'
1159 repo.references.delete(key)
1159 repo.references.delete(key)
1160
1160
1161 @reraise_safe_exceptions
1161 @reraise_safe_exceptions
1162 def tree_changes(self, wire, source_id, target_id):
1162 def tree_changes(self, wire, source_id, target_id):
1163 repo = self._factory.repo(wire)
1163 repo = self._factory.repo(wire)
1164 # source can be empty
1164 # source can be empty
1165 source_id = safe_bytes(source_id if source_id else b'')
1165 source_id = safe_bytes(source_id if source_id else b'')
1166 target_id = safe_bytes(target_id)
1166 target_id = safe_bytes(target_id)
1167
1167
1168 source = repo[source_id].tree if source_id else None
1168 source = repo[source_id].tree if source_id else None
1169 target = repo[target_id].tree
1169 target = repo[target_id].tree
1170 result = repo.object_store.tree_changes(source, target)
1170 result = repo.object_store.tree_changes(source, target)
1171
1171
1172 added = set()
1172 added = set()
1173 modified = set()
1173 modified = set()
1174 deleted = set()
1174 deleted = set()
1175 for (old_path, new_path), (_, _), (_, _) in list(result):
1175 for (old_path, new_path), (_, _), (_, _) in list(result):
1176 if new_path and old_path:
1176 if new_path and old_path:
1177 modified.add(new_path)
1177 modified.add(new_path)
1178 elif new_path and not old_path:
1178 elif new_path and not old_path:
1179 added.add(new_path)
1179 added.add(new_path)
1180 elif not new_path and old_path:
1180 elif not new_path and old_path:
1181 deleted.add(old_path)
1181 deleted.add(old_path)
1182
1182
1183 return list(added), list(modified), list(deleted)
1183 return list(added), list(modified), list(deleted)
1184
1184
1185 @reraise_safe_exceptions
1185 @reraise_safe_exceptions
1186 def tree_and_type_for_path(self, wire, commit_id, path):
1186 def tree_and_type_for_path(self, wire, commit_id, path):
1187
1187
1188 cache_on, context_uid, repo_id = self._cache_on(wire)
1188 cache_on, context_uid, repo_id = self._cache_on(wire)
1189 region = self._region(wire)
1189 region = self._region(wire)
1190
1190
1191 @region.conditional_cache_on_arguments(condition=cache_on)
1191 @region.conditional_cache_on_arguments(condition=cache_on)
1192 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1192 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1193 repo_init = self._factory.repo_libgit2(wire)
1193 repo_init = self._factory.repo_libgit2(wire)
1194
1194
1195 with repo_init as repo:
1195 with repo_init as repo:
1196 commit = repo[commit_id]
1196 commit = repo[commit_id]
1197 try:
1197 try:
1198 tree = commit.tree[path]
1198 tree = commit.tree[path]
1199 except KeyError:
1199 except KeyError:
1200 return None, None, None
1200 return None, None, None
1201
1201
1202 return tree.id.hex, tree.type_str, tree.filemode
1202 return tree.id.hex, tree.type_str, tree.filemode
1203 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1203 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1204
1204
1205 @reraise_safe_exceptions
1205 @reraise_safe_exceptions
1206 def tree_items(self, wire, tree_id):
1206 def tree_items(self, wire, tree_id):
1207 cache_on, context_uid, repo_id = self._cache_on(wire)
1207 cache_on, context_uid, repo_id = self._cache_on(wire)
1208 region = self._region(wire)
1208 region = self._region(wire)
1209
1209
1210 @region.conditional_cache_on_arguments(condition=cache_on)
1210 @region.conditional_cache_on_arguments(condition=cache_on)
1211 def _tree_items(_repo_id, _tree_id):
1211 def _tree_items(_repo_id, _tree_id):
1212
1212
1213 repo_init = self._factory.repo_libgit2(wire)
1213 repo_init = self._factory.repo_libgit2(wire)
1214 with repo_init as repo:
1214 with repo_init as repo:
1215 try:
1215 try:
1216 tree = repo[tree_id]
1216 tree = repo[tree_id]
1217 except KeyError:
1217 except KeyError:
1218 raise ObjectMissing(f'No tree with id: {tree_id}')
1218 raise ObjectMissing(f'No tree with id: {tree_id}')
1219
1219
1220 result = []
1220 result = []
1221 for item in tree:
1221 for item in tree:
1222 item_sha = item.hex
1222 item_sha = item.hex
1223 item_mode = item.filemode
1223 item_mode = item.filemode
1224 item_type = item.type_str
1224 item_type = item.type_str
1225
1225
1226 if item_type == 'commit':
1226 if item_type == 'commit':
1227 # NOTE(marcink): submodules we translate to 'link' for backward compat
1227 # NOTE(marcink): submodules we translate to 'link' for backward compat
1228 item_type = 'link'
1228 item_type = 'link'
1229
1229
1230 result.append((item.name, item_mode, item_sha, item_type))
1230 result.append((item.name, item_mode, item_sha, item_type))
1231 return result
1231 return result
1232 return _tree_items(repo_id, tree_id)
1232 return _tree_items(repo_id, tree_id)
1233
1233
1234 @reraise_safe_exceptions
1234 @reraise_safe_exceptions
1235 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1235 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1236 """
1236 """
1237 Old version that uses subprocess to call diff
1237 Old version that uses subprocess to call diff
1238 """
1238 """
1239
1239
1240 flags = [
1240 flags = [
1241 f'-U{context}', '--patch',
1241 f'-U{context}', '--patch',
1242 '--binary',
1242 '--binary',
1243 '--find-renames',
1243 '--find-renames',
1244 '--no-indent-heuristic',
1244 '--no-indent-heuristic',
1245 # '--indent-heuristic',
1245 # '--indent-heuristic',
1246 #'--full-index',
1246 #'--full-index',
1247 #'--abbrev=40'
1247 #'--abbrev=40'
1248 ]
1248 ]
1249
1249
1250 if opt_ignorews:
1250 if opt_ignorews:
1251 flags.append('--ignore-all-space')
1251 flags.append('--ignore-all-space')
1252
1252
1253 if commit_id_1 == self.EMPTY_COMMIT:
1253 if commit_id_1 == self.EMPTY_COMMIT:
1254 cmd = ['show'] + flags + [commit_id_2]
1254 cmd = ['show'] + flags + [commit_id_2]
1255 else:
1255 else:
1256 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1256 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1257
1257
1258 if file_filter:
1258 if file_filter:
1259 cmd.extend(['--', file_filter])
1259 cmd.extend(['--', file_filter])
1260
1260
1261 diff, __ = self.run_git_command(wire, cmd)
1261 diff, __ = self.run_git_command(wire, cmd)
1262 # If we used 'show' command, strip first few lines (until actual diff
1262 # If we used 'show' command, strip first few lines (until actual diff
1263 # starts)
1263 # starts)
1264 if commit_id_1 == self.EMPTY_COMMIT:
1264 if commit_id_1 == self.EMPTY_COMMIT:
1265 lines = diff.splitlines()
1265 lines = diff.splitlines()
1266 x = 0
1266 x = 0
1267 for line in lines:
1267 for line in lines:
1268 if line.startswith(b'diff'):
1268 if line.startswith(b'diff'):
1269 break
1269 break
1270 x += 1
1270 x += 1
1271 # Append new line just like 'diff' command do
1271 # Append new line just like 'diff' command do
1272 diff = '\n'.join(lines[x:]) + '\n'
1272 diff = '\n'.join(lines[x:]) + '\n'
1273 return diff
1273 return diff
1274
1274
1275 @reraise_safe_exceptions
1275 @reraise_safe_exceptions
1276 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1276 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1277 repo_init = self._factory.repo_libgit2(wire)
1277 repo_init = self._factory.repo_libgit2(wire)
1278
1278
1279 with repo_init as repo:
1279 with repo_init as repo:
1280 swap = True
1280 swap = True
1281 flags = 0
1281 flags = 0
1282 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1282 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1283
1283
1284 if opt_ignorews:
1284 if opt_ignorews:
1285 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1285 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1286
1286
1287 if commit_id_1 == self.EMPTY_COMMIT:
1287 if commit_id_1 == self.EMPTY_COMMIT:
1288 comm1 = repo[commit_id_2]
1288 comm1 = repo[commit_id_2]
1289 diff_obj = comm1.tree.diff_to_tree(
1289 diff_obj = comm1.tree.diff_to_tree(
1290 flags=flags, context_lines=context, swap=swap)
1290 flags=flags, context_lines=context, swap=swap)
1291
1291
1292 else:
1292 else:
1293 comm1 = repo[commit_id_2]
1293 comm1 = repo[commit_id_2]
1294 comm2 = repo[commit_id_1]
1294 comm2 = repo[commit_id_1]
1295 diff_obj = comm1.tree.diff_to_tree(
1295 diff_obj = comm1.tree.diff_to_tree(
1296 comm2.tree, flags=flags, context_lines=context, swap=swap)
1296 comm2.tree, flags=flags, context_lines=context, swap=swap)
1297 similar_flags = 0
1297 similar_flags = 0
1298 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1298 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1299 diff_obj.find_similar(flags=similar_flags)
1299 diff_obj.find_similar(flags=similar_flags)
1300
1300
1301 if file_filter:
1301 if file_filter:
1302 for p in diff_obj:
1302 for p in diff_obj:
1303 if p.delta.old_file.path == file_filter:
1303 if p.delta.old_file.path == file_filter:
1304 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1304 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1305 # fo matching path == no diff
1305 # fo matching path == no diff
1306 return BytesEnvelope(b'')
1306 return BytesEnvelope(b'')
1307
1307
1308 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1308 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1309
1309
1310 @reraise_safe_exceptions
1310 @reraise_safe_exceptions
1311 def node_history(self, wire, commit_id, path, limit):
1311 def node_history(self, wire, commit_id, path, limit):
1312 cache_on, context_uid, repo_id = self._cache_on(wire)
1312 cache_on, context_uid, repo_id = self._cache_on(wire)
1313 region = self._region(wire)
1313 region = self._region(wire)
1314
1314
1315 @region.conditional_cache_on_arguments(condition=cache_on)
1315 @region.conditional_cache_on_arguments(condition=cache_on)
1316 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1316 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1317 # optimize for n==1, rev-list is much faster for that use-case
1317 # optimize for n==1, rev-list is much faster for that use-case
1318 if limit == 1:
1318 if limit == 1:
1319 cmd = ['rev-list', '-1', commit_id, '--', path]
1319 cmd = ['rev-list', '-1', commit_id, '--', path]
1320 else:
1320 else:
1321 cmd = ['log']
1321 cmd = ['log']
1322 if limit:
1322 if limit:
1323 cmd.extend(['-n', str(safe_int(limit, 0))])
1323 cmd.extend(['-n', str(safe_int(limit, 0))])
1324 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1324 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1325
1325
1326 output, __ = self.run_git_command(wire, cmd)
1326 output, __ = self.run_git_command(wire, cmd)
1327 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1327 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1328
1328
1329 return [x for x in commit_ids]
1329 return [x for x in commit_ids]
1330 return _node_history(context_uid, repo_id, commit_id, path, limit)
1330 return _node_history(context_uid, repo_id, commit_id, path, limit)
1331
1331
1332 @reraise_safe_exceptions
1332 @reraise_safe_exceptions
1333 def node_annotate_legacy(self, wire, commit_id, path):
1333 def node_annotate_legacy(self, wire, commit_id, path):
1334 # note: replaced by pygit2 implementation
1334 # note: replaced by pygit2 implementation
1335 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1335 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1336 # -l ==> outputs long shas (and we need all 40 characters)
1336 # -l ==> outputs long shas (and we need all 40 characters)
1337 # --root ==> doesn't put '^' character for boundaries
1337 # --root ==> doesn't put '^' character for boundaries
1338 # -r commit_id ==> blames for the given commit
1338 # -r commit_id ==> blames for the given commit
1339 output, __ = self.run_git_command(wire, cmd)
1339 output, __ = self.run_git_command(wire, cmd)
1340
1340
1341 result = []
1341 result = []
1342 for i, blame_line in enumerate(output.splitlines()[:-1]):
1342 for i, blame_line in enumerate(output.splitlines()[:-1]):
1343 line_no = i + 1
1343 line_no = i + 1
1344 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1344 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1345 result.append((line_no, blame_commit_id, line))
1345 result.append((line_no, blame_commit_id, line))
1346
1346
1347 return result
1347 return result
1348
1348
1349 @reraise_safe_exceptions
1349 @reraise_safe_exceptions
1350 def node_annotate(self, wire, commit_id, path):
1350 def node_annotate(self, wire, commit_id, path):
1351
1351
1352 result_libgit = []
1352 result_libgit = []
1353 repo_init = self._factory.repo_libgit2(wire)
1353 repo_init = self._factory.repo_libgit2(wire)
1354 with repo_init as repo:
1354 with repo_init as repo:
1355 commit = repo[commit_id]
1355 commit = repo[commit_id]
1356 blame_obj = repo.blame(path, newest_commit=commit_id)
1356 blame_obj = repo.blame(path, newest_commit=commit_id)
1357 file_content = commit.tree[path].data
1357 file_content = commit.tree[path].data
1358 for i, line in enumerate(splitnewlines(file_content)):
1358 for i, line in enumerate(splitnewlines(file_content)):
1359 line_no = i + 1
1359 line_no = i + 1
1360 hunk = blame_obj.for_line(line_no)
1360 hunk = blame_obj.for_line(line_no)
1361 blame_commit_id = hunk.final_commit_id.hex
1361 blame_commit_id = hunk.final_commit_id.hex
1362
1362
1363 result_libgit.append((line_no, blame_commit_id, line))
1363 result_libgit.append((line_no, blame_commit_id, line))
1364
1364
1365 return BinaryEnvelope(result_libgit)
1365 return BinaryEnvelope(result_libgit)
1366
1366
1367 @reraise_safe_exceptions
1367 @reraise_safe_exceptions
1368 def update_server_info(self, wire, force=False):
1368 def update_server_info(self, wire, force=False):
1369 cmd = ['update-server-info']
1369 cmd = ['update-server-info']
1370 if force:
1370 if force:
1371 cmd += ['--force']
1371 cmd += ['--force']
1372 output, __ = self.run_git_command(wire, cmd)
1372 output, __ = self.run_git_command(wire, cmd)
1373 return output.splitlines()
1373 return output.splitlines()
1374
1374
1375 @reraise_safe_exceptions
1375 @reraise_safe_exceptions
1376 def get_all_commit_ids(self, wire):
1376 def get_all_commit_ids(self, wire):
1377
1377
1378 cache_on, context_uid, repo_id = self._cache_on(wire)
1378 cache_on, context_uid, repo_id = self._cache_on(wire)
1379 region = self._region(wire)
1379 region = self._region(wire)
1380
1380
1381 @region.conditional_cache_on_arguments(condition=cache_on)
1381 @region.conditional_cache_on_arguments(condition=cache_on)
1382 def _get_all_commit_ids(_context_uid, _repo_id):
1382 def _get_all_commit_ids(_context_uid, _repo_id):
1383
1383
1384 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1384 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1385 try:
1385 try:
1386 output, __ = self.run_git_command(wire, cmd)
1386 output, __ = self.run_git_command(wire, cmd)
1387 return output.splitlines()
1387 return output.splitlines()
1388 except Exception:
1388 except Exception:
1389 # Can be raised for empty repositories
1389 # Can be raised for empty repositories
1390 return []
1390 return []
1391
1391
1392 @region.conditional_cache_on_arguments(condition=cache_on)
1392 @region.conditional_cache_on_arguments(condition=cache_on)
1393 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1393 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1394 repo_init = self._factory.repo_libgit2(wire)
1394 repo_init = self._factory.repo_libgit2(wire)
1395 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1395 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1396 results = []
1396 results = []
1397 with repo_init as repo:
1397 with repo_init as repo:
1398 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1398 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1399 results.append(commit.id.hex)
1399 results.append(commit.id.hex)
1400
1400
1401 return _get_all_commit_ids(context_uid, repo_id)
1401 return _get_all_commit_ids(context_uid, repo_id)
1402
1402
1403 @reraise_safe_exceptions
1403 @reraise_safe_exceptions
1404 def run_git_command(self, wire, cmd, **opts):
1404 def run_git_command(self, wire, cmd, **opts):
1405 path = wire.get('path', None)
1405 path = wire.get('path', None)
1406 debug_mode = vcsserver.ConfigGet().get_bool('debug')
1406 debug_mode = vcsserver.ConfigGet().get_bool('debug')
1407
1407
1408 if path and os.path.isdir(path):
1408 if path and os.path.isdir(path):
1409 opts['cwd'] = path
1409 opts['cwd'] = path
1410
1410
1411 if '_bare' in opts:
1411 if '_bare' in opts:
1412 _copts = []
1412 _copts = []
1413 del opts['_bare']
1413 del opts['_bare']
1414 else:
1414 else:
1415 _copts = ['-c', 'core.quotepath=false', '-c', 'advice.diverging=false']
1415 _copts = ['-c', 'core.quotepath=false', '-c', 'advice.diverging=false']
1416 safe_call = False
1416 safe_call = False
1417 if '_safe' in opts:
1417 if '_safe' in opts:
1418 # no exc on failure
1418 # no exc on failure
1419 del opts['_safe']
1419 del opts['_safe']
1420 safe_call = True
1420 safe_call = True
1421
1421
1422 if '_copts' in opts:
1422 if '_copts' in opts:
1423 _copts.extend(opts['_copts'] or [])
1423 _copts.extend(opts['_copts'] or [])
1424 del opts['_copts']
1424 del opts['_copts']
1425
1425
1426 gitenv = os.environ.copy()
1426 gitenv = os.environ.copy()
1427 gitenv.update(opts.pop('extra_env', {}))
1427 gitenv.update(opts.pop('extra_env', {}))
1428 # need to clean fix GIT_DIR !
1428 # need to clean fix GIT_DIR !
1429 if 'GIT_DIR' in gitenv:
1429 if 'GIT_DIR' in gitenv:
1430 del gitenv['GIT_DIR']
1430 del gitenv['GIT_DIR']
1431 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1431 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1432 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1432 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1433
1433
1434 cmd = [settings.GIT_EXECUTABLE()] + _copts + cmd
1434 cmd = [settings.GIT_EXECUTABLE()] + _copts + cmd
1435 _opts = {'env': gitenv, 'shell': False}
1435 _opts = {'env': gitenv, 'shell': False}
1436
1436
1437 proc = None
1437 proc = None
1438 try:
1438 try:
1439 _opts.update(opts)
1439 _opts.update(opts)
1440 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1440 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1441
1441
1442 return b''.join(proc), b''.join(proc.stderr)
1442 return b''.join(proc), b''.join(proc.stderr)
1443 except OSError as err:
1443 except OSError as err:
1444 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1444 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1445 call_opts = {}
1445 call_opts = {}
1446 if debug_mode:
1446 if debug_mode:
1447 call_opts = _opts
1447 call_opts = _opts
1448
1448
1449 tb_err = ("Couldn't run git command ({}).\n"
1449 tb_err = ("Couldn't run git command ({}).\n"
1450 "Original error was:{}\n"
1450 "Original error was:{}\n"
1451 "Call options:{}\n"
1451 "Call options:{}\n"
1452 .format(cmd, err, call_opts))
1452 .format(cmd, err, call_opts))
1453 log.exception(tb_err)
1453 log.exception(tb_err)
1454 if safe_call:
1454 if safe_call:
1455 return '', err
1455 return '', err
1456 else:
1456 else:
1457 raise exceptions.VcsException()(tb_err)
1457 raise exceptions.VcsException()(tb_err)
1458 finally:
1458 finally:
1459 if proc:
1459 if proc:
1460 proc.close()
1460 proc.close()
1461
1461
1462 @reraise_safe_exceptions
1462 @reraise_safe_exceptions
1463 def install_hooks(self, wire, force=False):
1463 def install_hooks(self, wire, force=False):
1464 from vcsserver.hook_utils import install_git_hooks
1464 from vcsserver.hook_utils import install_git_hooks
1465 bare = self.bare(wire)
1465 bare = self.bare(wire)
1466 path = wire['path']
1466 path = wire['path']
1467 binary_dir = settings.BINARY_DIR
1467 binary_dir = settings.BINARY_DIR
1468 if binary_dir:
1468 if binary_dir:
1469 os.path.join(binary_dir, 'python3')
1469 os.path.join(binary_dir, 'python3')
1470 return install_git_hooks(path, bare, force_create=force)
1470 return install_git_hooks(path, bare, force_create=force)
1471
1471
1472 @reraise_safe_exceptions
1472 @reraise_safe_exceptions
1473 def get_hooks_info(self, wire):
1473 def get_hooks_info(self, wire):
1474 from vcsserver.hook_utils import (
1474 from vcsserver.hook_utils import (
1475 get_git_pre_hook_version, get_git_post_hook_version)
1475 get_git_pre_hook_version, get_git_post_hook_version)
1476 bare = self.bare(wire)
1476 bare = self.bare(wire)
1477 path = wire['path']
1477 path = wire['path']
1478 return {
1478 return {
1479 'pre_version': get_git_pre_hook_version(path, bare),
1479 'pre_version': get_git_pre_hook_version(path, bare),
1480 'post_version': get_git_post_hook_version(path, bare),
1480 'post_version': get_git_post_hook_version(path, bare),
1481 }
1481 }
1482
1482
1483 @reraise_safe_exceptions
1483 @reraise_safe_exceptions
1484 def set_head_ref(self, wire, head_name):
1484 def set_head_ref(self, wire, head_name):
1485 log.debug('Setting refs/head to `%s`', head_name)
1485 log.debug('Setting refs/head to `%s`', head_name)
1486 repo_init = self._factory.repo_libgit2(wire)
1486 repo_init = self._factory.repo_libgit2(wire)
1487 with repo_init as repo:
1487 with repo_init as repo:
1488 repo.set_head(f'refs/heads/{head_name}')
1488 repo.set_head(f'refs/heads/{head_name}')
1489
1489
1490 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1490 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1491
1491
1492 @reraise_safe_exceptions
1492 @reraise_safe_exceptions
1493 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1493 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1494 archive_dir_name, commit_id, cache_config):
1494 archive_dir_name, commit_id, cache_config):
1495
1495
1496 def file_walker(_commit_id, path):
1496 def file_walker(_commit_id, path):
1497 repo_init = self._factory.repo_libgit2(wire)
1497 repo_init = self._factory.repo_libgit2(wire)
1498
1498
1499 with repo_init as repo:
1499 with repo_init as repo:
1500 commit = repo[commit_id]
1500 commit = repo[commit_id]
1501
1501
1502 if path in ['', '/']:
1502 if path in ['', '/']:
1503 tree = commit.tree
1503 tree = commit.tree
1504 else:
1504 else:
1505 tree = commit.tree[path.rstrip('/')]
1505 tree = commit.tree[path.rstrip('/')]
1506 tree_id = tree.id.hex
1506 tree_id = tree.id.hex
1507 try:
1507 try:
1508 tree = repo[tree_id]
1508 tree = repo[tree_id]
1509 except KeyError:
1509 except KeyError:
1510 raise ObjectMissing(f'No tree with id: {tree_id}')
1510 raise ObjectMissing(f'No tree with id: {tree_id}')
1511
1511
1512 index = LibGit2Index.Index()
1512 index = LibGit2Index.Index()
1513 index.read_tree(tree)
1513 index.read_tree(tree)
1514 file_iter = index
1514 file_iter = index
1515
1515
1516 for file_node in file_iter:
1516 for file_node in file_iter:
1517 file_path = file_node.path
1517 file_path = file_node.path
1518 mode = file_node.mode
1518 mode = file_node.mode
1519 is_link = stat.S_ISLNK(mode)
1519 is_link = stat.S_ISLNK(mode)
1520 if mode == pygit2.GIT_FILEMODE_COMMIT:
1520 if mode == pygit2.GIT_FILEMODE_COMMIT:
1521 log.debug('Skipping path %s as a commit node', file_path)
1521 log.debug('Skipping path %s as a commit node', file_path)
1522 continue
1522 continue
1523 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1523 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1524
1524
1525 return store_archive_in_cache(
1525 return store_archive_in_cache(
1526 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1526 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
@@ -1,1236 +1,1236 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import binascii
18 import binascii
19 import io
19 import io
20 import logging
20 import logging
21 import stat
21 import stat
22 import sys
22 import sys
23 import urllib.request
23 import urllib.request
24 import urllib.parse
24 import urllib.parse
25 import hashlib
25 import hashlib
26 import traceback
26 import traceback
27
27
28 from hgext import largefiles, rebase
28 from hgext import largefiles, rebase
29
29
30 from mercurial import commands
30 from mercurial import commands
31 from mercurial import unionrepo
31 from mercurial import unionrepo
32 from mercurial import verify
32 from mercurial import verify
33 from mercurial import repair
33 from mercurial import repair
34 from mercurial.error import AmbiguousPrefixLookupError
34 from mercurial.error import AmbiguousPrefixLookupError
35 from mercurial.utils.urlutil import path as hg_path
35 from mercurial.utils.urlutil import path as hg_path
36
36
37 import vcsserver
37 import vcsserver
38 from vcsserver import exceptions
38 from vcsserver import exceptions
39 from vcsserver.base import (
39 from vcsserver.base import (
40 RepoFactory,
40 RepoFactory,
41 obfuscate_qs,
41 obfuscate_qs,
42 raise_from_original,
42 raise_from_original,
43 store_archive_in_cache,
43 store_archive_in_cache,
44 ArchiveNode,
44 ArchiveNode,
45 BytesEnvelope,
45 BytesEnvelope,
46 BinaryEnvelope,
46 BinaryEnvelope,
47 )
47 )
48 from vcsserver.hgcompat import (
48 from vcsserver.hgcompat import (
49 archival,
49 archival,
50 bin,
50 bin,
51 clone,
51 clone,
52 config as hgconfig,
52 config as hgconfig,
53 diffopts,
53 diffopts,
54 hex,
54 hex,
55 get_ctx,
55 get_ctx,
56 hg_url as url_parser,
56 hg_url as url_parser,
57 httpbasicauthhandler,
57 httpbasicauthhandler,
58 httpdigestauthhandler,
58 httpdigestauthhandler,
59 make_peer,
59 make_peer,
60 instance,
60 instance,
61 match,
61 match,
62 memctx,
62 memctx,
63 exchange,
63 exchange,
64 memfilectx,
64 memfilectx,
65 nullrev,
65 nullrev,
66 hg_merge,
66 hg_merge,
67 patch,
67 patch,
68 peer,
68 peer,
69 revrange,
69 revrange,
70 ui,
70 ui,
71 hg_tag,
71 hg_tag,
72 Abort,
72 Abort,
73 LookupError,
73 LookupError,
74 RepoError,
74 RepoError,
75 RepoLookupError,
75 RepoLookupError,
76 InterventionRequired,
76 InterventionRequired,
77 RequirementError,
77 RequirementError,
78 alwaysmatcher,
78 alwaysmatcher,
79 patternmatcher,
79 patternmatcher,
80 hgext_strip,
80 hgext_strip,
81 )
81 )
82 from vcsserver.lib.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes, convert_to_str
82 from vcsserver.lib.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes, convert_to_str
83 from vcsserver.vcs_base import RemoteBase
83 from vcsserver.vcs_base import RemoteBase
84 from vcsserver.config import hooks as hooks_config
84 from vcsserver.config import hooks as hooks_config
85 from vcsserver.lib.exc_tracking import format_exc
85 from vcsserver.lib.exc_tracking import format_exc
86
86
87 log = logging.getLogger(__name__)
87 log = logging.getLogger(__name__)
88
88
89
89
90 def make_ui_from_config(repo_config, interactive=True):
90 def make_ui_from_config(repo_config, interactive=True):
91
91
92 class LoggingUI(ui.ui):
92 class LoggingUI(ui.ui):
93
93
94 def status(self, *msg, **opts):
94 def status(self, *msg, **opts):
95 str_msg = map(safe_str, msg)
95 str_msg = map(safe_str, msg)
96 log.info(' '.join(str_msg).rstrip('\n'))
96 log.info(' '.join(str_msg).rstrip('\n'))
97 #super(LoggingUI, self).status(*msg, **opts)
97 #super(LoggingUI, self).status(*msg, **opts)
98
98
99 def warn(self, *msg, **opts):
99 def warn(self, *msg, **opts):
100 str_msg = map(safe_str, msg)
100 str_msg = map(safe_str, msg)
101 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
101 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
102 #super(LoggingUI, self).warn(*msg, **opts)
102 #super(LoggingUI, self).warn(*msg, **opts)
103
103
104 def error(self, *msg, **opts):
104 def error(self, *msg, **opts):
105 str_msg = map(safe_str, msg)
105 str_msg = map(safe_str, msg)
106 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
106 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
107 #super(LoggingUI, self).error(*msg, **opts)
107 #super(LoggingUI, self).error(*msg, **opts)
108
108
109 def note(self, *msg, **opts):
109 def note(self, *msg, **opts):
110 str_msg = map(safe_str, msg)
110 str_msg = map(safe_str, msg)
111 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
111 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
112 #super(LoggingUI, self).note(*msg, **opts)
112 #super(LoggingUI, self).note(*msg, **opts)
113
113
114 def debug(self, *msg, **opts):
114 def debug(self, *msg, **opts):
115 str_msg = map(safe_str, msg)
115 str_msg = map(safe_str, msg)
116 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
116 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
117 #super(LoggingUI, self).debug(*msg, **opts)
117 #super(LoggingUI, self).debug(*msg, **opts)
118
118
119 baseui = LoggingUI()
119 baseui = LoggingUI()
120
120
121 # clean the baseui object
121 # clean the baseui object
122 baseui._ocfg = hgconfig.config()
122 baseui._ocfg = hgconfig.config()
123 baseui._ucfg = hgconfig.config()
123 baseui._ucfg = hgconfig.config()
124 baseui._tcfg = hgconfig.config()
124 baseui._tcfg = hgconfig.config()
125
125
126 for section, option, value in repo_config:
126 for section, option, value in repo_config:
127 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
127 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
128
128
129 # make our hgweb quiet so it doesn't print output
129 # make our hgweb quiet so it doesn't print output
130 baseui.setconfig(b'ui', b'quiet', b'true')
130 baseui.setconfig(b'ui', b'quiet', b'true')
131
131
132 baseui.setconfig(b'ui', b'paginate', b'never')
132 baseui.setconfig(b'ui', b'paginate', b'never')
133 # for better Error reporting of Mercurial
133 # for better Error reporting of Mercurial
134 baseui.setconfig(b'ui', b'message-output', b'stderr')
134 baseui.setconfig(b'ui', b'message-output', b'stderr')
135
135
136 # force mercurial to only use 1 thread, otherwise it may try to set a
136 # force mercurial to only use 1 thread, otherwise it may try to set a
137 # signal in a non-main thread, thus generating a ValueError.
137 # signal in a non-main thread, thus generating a ValueError.
138 baseui.setconfig(b'worker', b'numcpus', 1)
138 baseui.setconfig(b'worker', b'numcpus', 1)
139
139
140 # If there is no config for the largefiles extension, we explicitly disable
140 # If there is no config for the largefiles extension, we explicitly disable
141 # it here. This overrides settings from repositories hgrc file. Recent
141 # it here. This overrides settings from repositories hgrc file. Recent
142 # mercurial versions enable largefiles in hgrc on clone from largefile
142 # mercurial versions enable largefiles in hgrc on clone from largefile
143 # repo.
143 # repo.
144 if not baseui.hasconfig(b'extensions', b'largefiles'):
144 if not baseui.hasconfig(b'extensions', b'largefiles'):
145 log.debug('Explicitly disable largefiles extension for repo.')
145 log.debug('Explicitly disable largefiles extension for repo.')
146 baseui.setconfig(b'extensions', b'largefiles', b'!')
146 baseui.setconfig(b'extensions', b'largefiles', b'!')
147
147
148 baseui.setconfig(b'ui', b'interactive', b'true' if interactive else b'false')
148 baseui.setconfig(b'ui', b'interactive', b'true' if interactive else b'false')
149 return baseui
149 return baseui
150
150
151
151
152 def reraise_safe_exceptions(func):
152 def reraise_safe_exceptions(func):
153 """Decorator for converting mercurial exceptions to something neutral."""
153 """Decorator for converting mercurial exceptions to something neutral."""
154
154
155 def wrapper(*args, **kwargs):
155 def wrapper(*args, **kwargs):
156 try:
156 try:
157 return func(*args, **kwargs)
157 return func(*args, **kwargs)
158 except (Abort, InterventionRequired) as e:
158 except (Abort, InterventionRequired) as e:
159 raise_from_original(exceptions.AbortException(e), e)
159 raise_from_original(exceptions.AbortException(e), e)
160 except RepoLookupError as e:
160 except RepoLookupError as e:
161 raise_from_original(exceptions.LookupException(e), e)
161 raise_from_original(exceptions.LookupException(e), e)
162 except RequirementError as e:
162 except RequirementError as e:
163 raise_from_original(exceptions.RequirementException(e), e)
163 raise_from_original(exceptions.RequirementException(e), e)
164 except RepoError as e:
164 except RepoError as e:
165 raise_from_original(exceptions.VcsException(e), e)
165 raise_from_original(exceptions.VcsException(e), e)
166 except LookupError as e:
166 except LookupError as e:
167 raise_from_original(exceptions.LookupException(e), e)
167 raise_from_original(exceptions.LookupException(e), e)
168 except Exception as e:
168 except Exception as e:
169 if not hasattr(e, '_vcs_kind'):
169 if not hasattr(e, '_vcs_kind'):
170 log.exception("Unhandled exception in hg remote call")
170 log.exception("Unhandled exception in hg remote call")
171 raise_from_original(exceptions.UnhandledException(e), e)
171 raise_from_original(exceptions.UnhandledException(e), e)
172
172
173 raise
173 raise
174 return wrapper
174 return wrapper
175
175
176
176
177 class MercurialFactory(RepoFactory):
177 class MercurialFactory(RepoFactory):
178 repo_type = 'hg'
178 repo_type = 'hg'
179
179
180 def _create_config(self, config, hooks=True):
180 def _create_config(self, config, hooks=True):
181 if not hooks:
181 if not hooks:
182
182
183 hooks_to_clean = {
183 hooks_to_clean = {
184
184
185 hooks_config.HOOK_REPO_SIZE,
185 hooks_config.HOOK_REPO_SIZE,
186 hooks_config.HOOK_PRE_PULL,
186 hooks_config.HOOK_PRE_PULL,
187 hooks_config.HOOK_PULL,
187 hooks_config.HOOK_PULL,
188
188
189 hooks_config.HOOK_PRE_PUSH,
189 hooks_config.HOOK_PRE_PUSH,
190 # TODO: what about PRETXT, this was disabled in pre 5.0.0
190 # TODO: what about PRETXT, this was disabled in pre 5.0.0
191 hooks_config.HOOK_PRETX_PUSH,
191 hooks_config.HOOK_PRETX_PUSH,
192
192
193 }
193 }
194 new_config = []
194 new_config = []
195 for section, option, value in config:
195 for section, option, value in config:
196 if section == 'hooks' and option in hooks_to_clean:
196 if section == 'hooks' and option in hooks_to_clean:
197 continue
197 continue
198 new_config.append((section, option, value))
198 new_config.append((section, option, value))
199 config = new_config
199 config = new_config
200
200
201 baseui = make_ui_from_config(config)
201 baseui = make_ui_from_config(config)
202 return baseui
202 return baseui
203
203
204 def _create_repo(self, wire, create):
204 def _create_repo(self, wire, create):
205 baseui = self._create_config(wire["config"])
205 baseui = self._create_config(wire["config"])
206 repo = instance(baseui, safe_bytes(wire["path"]), create)
206 repo = instance(baseui, safe_bytes(wire["path"]), create)
207 log.debug('repository created: got HG object: %s', repo)
207 log.debug('repository created: got HG object: %s', repo)
208 return repo
208 return repo
209
209
210 def repo(self, wire, create=False):
210 def repo(self, wire, create=False):
211 """
211 """
212 Get a repository instance for the given path.
212 Get a repository instance for the given path.
213 """
213 """
214 return self._create_repo(wire, create)
214 return self._create_repo(wire, create)
215
215
216
216
217 def patch_ui_message_output(baseui):
217 def patch_ui_message_output(baseui):
218 baseui.setconfig(b'ui', b'quiet', b'false')
218 baseui.setconfig(b'ui', b'quiet', b'false')
219 output = io.BytesIO()
219 output = io.BytesIO()
220
220
221 def write(data, **unused_kwargs):
221 def write(data, **unused_kwargs):
222 output.write(data)
222 output.write(data)
223
223
224 baseui.status = write
224 baseui.status = write
225 baseui.write = write
225 baseui.write = write
226 baseui.warn = write
226 baseui.warn = write
227 baseui.debug = write
227 baseui.debug = write
228
228
229 return baseui, output
229 return baseui, output
230
230
231
231
232 def get_obfuscated_url(url_obj):
232 def get_obfuscated_url(url_obj):
233 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
233 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
234 url_obj.query = obfuscate_qs(url_obj.query)
234 url_obj.query = obfuscate_qs(url_obj.query)
235 obfuscated_uri = str(url_obj)
235 obfuscated_uri = str(url_obj)
236 return obfuscated_uri
236 return obfuscated_uri
237
237
238
238
239 def normalize_url_for_hg(url: str):
239 def normalize_url_for_hg(url: str):
240 _proto = None
240 _proto = None
241
241
242 if '+' in url[:url.find('://')]:
242 if '+' in url[:url.find('://')]:
243 _proto = url[0:url.find('+')]
243 _proto = url[0:url.find('+')]
244 url = url[url.find('+') + 1:]
244 url = url[url.find('+') + 1:]
245 return url, _proto
245 return url, _proto
246
246
247
247
248 class HgRemote(RemoteBase):
248 class HgRemote(RemoteBase):
249
249
250 def __init__(self, factory):
250 def __init__(self, factory):
251 self._factory = factory
251 self._factory = factory
252 self._bulk_methods = {
252 self._bulk_methods = {
253 "affected_files": self.ctx_files,
253 "affected_files": self.ctx_files,
254 "author": self.ctx_user,
254 "author": self.ctx_user,
255 "branch": self.ctx_branch,
255 "branch": self.ctx_branch,
256 "children": self.ctx_children,
256 "children": self.ctx_children,
257 "date": self.ctx_date,
257 "date": self.ctx_date,
258 "message": self.ctx_description,
258 "message": self.ctx_description,
259 "parents": self.ctx_parents,
259 "parents": self.ctx_parents,
260 "status": self.ctx_status,
260 "status": self.ctx_status,
261 "obsolete": self.ctx_obsolete,
261 "obsolete": self.ctx_obsolete,
262 "phase": self.ctx_phase,
262 "phase": self.ctx_phase,
263 "hidden": self.ctx_hidden,
263 "hidden": self.ctx_hidden,
264 "_file_paths": self.ctx_list,
264 "_file_paths": self.ctx_list,
265 }
265 }
266 self._bulk_file_methods = {
266 self._bulk_file_methods = {
267 "size": self.fctx_size,
267 "size": self.fctx_size,
268 "data": self.fctx_node_data,
268 "data": self.fctx_node_data,
269 "flags": self.fctx_flags,
269 "flags": self.fctx_flags,
270 "is_binary": self.is_binary,
270 "is_binary": self.is_binary,
271 "md5": self.md5_hash,
271 "md5": self.md5_hash,
272 }
272 }
273
273
274 def _get_ctx(self, repo, ref):
274 def _get_ctx(self, repo, ref):
275 return get_ctx(repo, ref)
275 return get_ctx(repo, ref)
276
276
277 @reraise_safe_exceptions
277 @reraise_safe_exceptions
278 def discover_hg_version(self):
278 def discover_hg_version(self):
279 from mercurial import util
279 from mercurial import util
280 return safe_str(util.version())
280 return safe_str(util.version())
281
281
282 @reraise_safe_exceptions
282 @reraise_safe_exceptions
283 def is_empty(self, wire):
283 def is_empty(self, wire):
284 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
285
285
286 try:
286 try:
287 return len(repo) == 0
287 return len(repo) == 0
288 except Exception:
288 except Exception:
289 log.exception("failed to read object_store")
289 log.exception("failed to read object_store")
290 return False
290 return False
291
291
292 @reraise_safe_exceptions
292 @reraise_safe_exceptions
293 def bookmarks(self, wire):
293 def bookmarks(self, wire):
294 cache_on, context_uid, repo_id = self._cache_on(wire)
294 cache_on, context_uid, repo_id = self._cache_on(wire)
295 region = self._region(wire)
295 region = self._region(wire)
296
296
297 @region.conditional_cache_on_arguments(condition=cache_on)
297 @region.conditional_cache_on_arguments(condition=cache_on)
298 def _bookmarks(_context_uid, _repo_id):
298 def _bookmarks(_context_uid, _repo_id):
299 repo = self._factory.repo(wire)
299 repo = self._factory.repo(wire)
300 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
300 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
301
301
302 return _bookmarks(context_uid, repo_id)
302 return _bookmarks(context_uid, repo_id)
303
303
304 @reraise_safe_exceptions
304 @reraise_safe_exceptions
305 def branches(self, wire, normal, closed):
305 def branches(self, wire, normal, closed):
306 cache_on, context_uid, repo_id = self._cache_on(wire)
306 cache_on, context_uid, repo_id = self._cache_on(wire)
307 region = self._region(wire)
307 region = self._region(wire)
308
308
309 @region.conditional_cache_on_arguments(condition=cache_on)
309 @region.conditional_cache_on_arguments(condition=cache_on)
310 def _branches(_context_uid, _repo_id, _normal, _closed):
310 def _branches(_context_uid, _repo_id, _normal, _closed):
311 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
312 iter_branches = repo.branchmap().iterbranches()
312 iter_branches = repo.branchmap().iterbranches()
313 bt = {}
313 bt = {}
314 for branch_name, _heads, tip_node, is_closed in iter_branches:
314 for branch_name, _heads, tip_node, is_closed in iter_branches:
315 if normal and not is_closed:
315 if normal and not is_closed:
316 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
316 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
317 if closed and is_closed:
317 if closed and is_closed:
318 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
318 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
319
319
320 return bt
320 return bt
321
321
322 return _branches(context_uid, repo_id, normal, closed)
322 return _branches(context_uid, repo_id, normal, closed)
323
323
324 @reraise_safe_exceptions
324 @reraise_safe_exceptions
325 def bulk_request(self, wire, commit_id, pre_load):
325 def bulk_request(self, wire, commit_id, pre_load):
326 cache_on, context_uid, repo_id = self._cache_on(wire)
326 cache_on, context_uid, repo_id = self._cache_on(wire)
327 region = self._region(wire)
327 region = self._region(wire)
328
328
329 @region.conditional_cache_on_arguments(condition=cache_on)
329 @region.conditional_cache_on_arguments(condition=cache_on)
330 def _bulk_request(_repo_id, _commit_id, _pre_load):
330 def _bulk_request(_repo_id, _commit_id, _pre_load):
331 result = {}
331 result = {}
332 for attr in pre_load:
332 for attr in pre_load:
333 try:
333 try:
334 method = self._bulk_methods[attr]
334 method = self._bulk_methods[attr]
335 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
335 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
336 result[attr] = method(wire, commit_id)
336 result[attr] = method(wire, commit_id)
337 except KeyError as e:
337 except KeyError as e:
338 raise exceptions.VcsException(e)(
338 raise exceptions.VcsException(e)(
339 f'Unknown bulk attribute: "{attr}"')
339 f'Unknown bulk attribute: "{attr}"')
340 return result
340 return result
341
341
342 return _bulk_request(repo_id, commit_id, sorted(pre_load))
342 return _bulk_request(repo_id, commit_id, sorted(pre_load))
343
343
344 @reraise_safe_exceptions
344 @reraise_safe_exceptions
345 def ctx_branch(self, wire, commit_id):
345 def ctx_branch(self, wire, commit_id):
346 cache_on, context_uid, repo_id = self._cache_on(wire)
346 cache_on, context_uid, repo_id = self._cache_on(wire)
347 region = self._region(wire)
347 region = self._region(wire)
348
348
349 @region.conditional_cache_on_arguments(condition=cache_on)
349 @region.conditional_cache_on_arguments(condition=cache_on)
350 def _ctx_branch(_repo_id, _commit_id):
350 def _ctx_branch(_repo_id, _commit_id):
351 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
353 return ctx.branch()
353 return ctx.branch()
354 return _ctx_branch(repo_id, commit_id)
354 return _ctx_branch(repo_id, commit_id)
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_date(self, wire, commit_id):
357 def ctx_date(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 region = self._region(wire)
359 region = self._region(wire)
360
360
361 @region.conditional_cache_on_arguments(condition=cache_on)
361 @region.conditional_cache_on_arguments(condition=cache_on)
362 def _ctx_date(_repo_id, _commit_id):
362 def _ctx_date(_repo_id, _commit_id):
363 repo = self._factory.repo(wire)
363 repo = self._factory.repo(wire)
364 ctx = self._get_ctx(repo, commit_id)
364 ctx = self._get_ctx(repo, commit_id)
365 return ctx.date()
365 return ctx.date()
366 return _ctx_date(repo_id, commit_id)
366 return _ctx_date(repo_id, commit_id)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def ctx_description(self, wire, revision):
369 def ctx_description(self, wire, revision):
370 repo = self._factory.repo(wire)
370 repo = self._factory.repo(wire)
371 ctx = self._get_ctx(repo, revision)
371 ctx = self._get_ctx(repo, revision)
372 return ctx.description()
372 return ctx.description()
373
373
374 @reraise_safe_exceptions
374 @reraise_safe_exceptions
375 def ctx_files(self, wire, commit_id):
375 def ctx_files(self, wire, commit_id):
376 cache_on, context_uid, repo_id = self._cache_on(wire)
376 cache_on, context_uid, repo_id = self._cache_on(wire)
377 region = self._region(wire)
377 region = self._region(wire)
378
378
379 @region.conditional_cache_on_arguments(condition=cache_on)
379 @region.conditional_cache_on_arguments(condition=cache_on)
380 def _ctx_files(_repo_id, _commit_id):
380 def _ctx_files(_repo_id, _commit_id):
381 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
382 ctx = self._get_ctx(repo, commit_id)
382 ctx = self._get_ctx(repo, commit_id)
383 return ctx.files()
383 return ctx.files()
384
384
385 return _ctx_files(repo_id, commit_id)
385 return _ctx_files(repo_id, commit_id)
386
386
387 @reraise_safe_exceptions
387 @reraise_safe_exceptions
388 def ctx_list(self, path, revision):
388 def ctx_list(self, path, revision):
389 repo = self._factory.repo(path)
389 repo = self._factory.repo(path)
390 ctx = self._get_ctx(repo, revision)
390 ctx = self._get_ctx(repo, revision)
391 return list(ctx)
391 return list(ctx)
392
392
393 @reraise_safe_exceptions
393 @reraise_safe_exceptions
394 def ctx_parents(self, wire, commit_id):
394 def ctx_parents(self, wire, commit_id):
395 cache_on, context_uid, repo_id = self._cache_on(wire)
395 cache_on, context_uid, repo_id = self._cache_on(wire)
396 region = self._region(wire)
396 region = self._region(wire)
397
397
398 @region.conditional_cache_on_arguments(condition=cache_on)
398 @region.conditional_cache_on_arguments(condition=cache_on)
399 def _ctx_parents(_repo_id, _commit_id):
399 def _ctx_parents(_repo_id, _commit_id):
400 repo = self._factory.repo(wire)
400 repo = self._factory.repo(wire)
401 ctx = self._get_ctx(repo, commit_id)
401 ctx = self._get_ctx(repo, commit_id)
402 return [parent.hex() for parent in ctx.parents()
402 return [parent.hex() for parent in ctx.parents()
403 if not (parent.hidden() or parent.obsolete())]
403 if not (parent.hidden() or parent.obsolete())]
404
404
405 return _ctx_parents(repo_id, commit_id)
405 return _ctx_parents(repo_id, commit_id)
406
406
407 @reraise_safe_exceptions
407 @reraise_safe_exceptions
408 def ctx_children(self, wire, commit_id):
408 def ctx_children(self, wire, commit_id):
409 cache_on, context_uid, repo_id = self._cache_on(wire)
409 cache_on, context_uid, repo_id = self._cache_on(wire)
410 region = self._region(wire)
410 region = self._region(wire)
411
411
412 @region.conditional_cache_on_arguments(condition=cache_on)
412 @region.conditional_cache_on_arguments(condition=cache_on)
413 def _ctx_children(_repo_id, _commit_id):
413 def _ctx_children(_repo_id, _commit_id):
414 repo = self._factory.repo(wire)
414 repo = self._factory.repo(wire)
415 ctx = self._get_ctx(repo, commit_id)
415 ctx = self._get_ctx(repo, commit_id)
416 return [child.hex() for child in ctx.children()
416 return [child.hex() for child in ctx.children()
417 if not (child.hidden() or child.obsolete())]
417 if not (child.hidden() or child.obsolete())]
418
418
419 return _ctx_children(repo_id, commit_id)
419 return _ctx_children(repo_id, commit_id)
420
420
421 @reraise_safe_exceptions
421 @reraise_safe_exceptions
422 def ctx_phase(self, wire, commit_id):
422 def ctx_phase(self, wire, commit_id):
423 cache_on, context_uid, repo_id = self._cache_on(wire)
423 cache_on, context_uid, repo_id = self._cache_on(wire)
424 region = self._region(wire)
424 region = self._region(wire)
425
425
426 @region.conditional_cache_on_arguments(condition=cache_on)
426 @region.conditional_cache_on_arguments(condition=cache_on)
427 def _ctx_phase(_context_uid, _repo_id, _commit_id):
427 def _ctx_phase(_context_uid, _repo_id, _commit_id):
428 repo = self._factory.repo(wire)
428 repo = self._factory.repo(wire)
429 ctx = self._get_ctx(repo, commit_id)
429 ctx = self._get_ctx(repo, commit_id)
430 # public=0, draft=1, secret=3
430 # public=0, draft=1, secret=3
431 return ctx.phase()
431 return ctx.phase()
432 return _ctx_phase(context_uid, repo_id, commit_id)
432 return _ctx_phase(context_uid, repo_id, commit_id)
433
433
434 @reraise_safe_exceptions
434 @reraise_safe_exceptions
435 def ctx_obsolete(self, wire, commit_id):
435 def ctx_obsolete(self, wire, commit_id):
436 cache_on, context_uid, repo_id = self._cache_on(wire)
436 cache_on, context_uid, repo_id = self._cache_on(wire)
437 region = self._region(wire)
437 region = self._region(wire)
438
438
439 @region.conditional_cache_on_arguments(condition=cache_on)
439 @region.conditional_cache_on_arguments(condition=cache_on)
440 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
440 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
441 repo = self._factory.repo(wire)
441 repo = self._factory.repo(wire)
442 ctx = self._get_ctx(repo, commit_id)
442 ctx = self._get_ctx(repo, commit_id)
443 return ctx.obsolete()
443 return ctx.obsolete()
444 return _ctx_obsolete(context_uid, repo_id, commit_id)
444 return _ctx_obsolete(context_uid, repo_id, commit_id)
445
445
446 @reraise_safe_exceptions
446 @reraise_safe_exceptions
447 def ctx_hidden(self, wire, commit_id):
447 def ctx_hidden(self, wire, commit_id):
448 cache_on, context_uid, repo_id = self._cache_on(wire)
448 cache_on, context_uid, repo_id = self._cache_on(wire)
449 region = self._region(wire)
449 region = self._region(wire)
450
450
451 @region.conditional_cache_on_arguments(condition=cache_on)
451 @region.conditional_cache_on_arguments(condition=cache_on)
452 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
452 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
453 repo = self._factory.repo(wire)
453 repo = self._factory.repo(wire)
454 ctx = self._get_ctx(repo, commit_id)
454 ctx = self._get_ctx(repo, commit_id)
455 return ctx.hidden()
455 return ctx.hidden()
456 return _ctx_hidden(context_uid, repo_id, commit_id)
456 return _ctx_hidden(context_uid, repo_id, commit_id)
457
457
458 @reraise_safe_exceptions
458 @reraise_safe_exceptions
459 def ctx_substate(self, wire, revision):
459 def ctx_substate(self, wire, revision):
460 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
461 ctx = self._get_ctx(repo, revision)
461 ctx = self._get_ctx(repo, revision)
462 return ctx.substate
462 return ctx.substate
463
463
464 @reraise_safe_exceptions
464 @reraise_safe_exceptions
465 def ctx_status(self, wire, revision):
465 def ctx_status(self, wire, revision):
466 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
467 ctx = self._get_ctx(repo, revision)
467 ctx = self._get_ctx(repo, revision)
468 status = repo[ctx.p1().node()].status(other=ctx.node())
468 status = repo[ctx.p1().node()].status(other=ctx.node())
469 # object of status (odd, custom named tuple in mercurial) is not
469 # object of status (odd, custom named tuple in mercurial) is not
470 # correctly serializable, we make it a list, as the underling
470 # correctly serializable, we make it a list, as the underling
471 # API expects this to be a list
471 # API expects this to be a list
472 return list(status)
472 return list(status)
473
473
474 @reraise_safe_exceptions
474 @reraise_safe_exceptions
475 def ctx_user(self, wire, revision):
475 def ctx_user(self, wire, revision):
476 repo = self._factory.repo(wire)
476 repo = self._factory.repo(wire)
477 ctx = self._get_ctx(repo, revision)
477 ctx = self._get_ctx(repo, revision)
478 return ctx.user()
478 return ctx.user()
479
479
480 @reraise_safe_exceptions
480 @reraise_safe_exceptions
481 def check_url(self, url, config):
481 def check_url(self, url, config):
482 url, _proto = normalize_url_for_hg(url)
482 url, _proto = normalize_url_for_hg(url)
483 url_obj = url_parser(safe_bytes(url))
483 url_obj = url_parser(safe_bytes(url))
484
484
485 test_uri = safe_str(url_obj.authinfo()[0])
485 test_uri = safe_str(url_obj.authinfo()[0])
486 authinfo = url_obj.authinfo()[1]
486 authinfo = url_obj.authinfo()[1]
487 obfuscated_uri = get_obfuscated_url(url_obj)
487 obfuscated_uri = get_obfuscated_url(url_obj)
488 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
488 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
489
489
490 handlers = []
490 handlers = []
491 if authinfo:
491 if authinfo:
492 # create a password manager
492 # create a password manager
493 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
493 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
494 passmgr.add_password(*convert_to_str(authinfo))
494 passmgr.add_password(*convert_to_str(authinfo))
495
495
496 handlers.extend((httpbasicauthhandler(passmgr),
496 handlers.extend((httpbasicauthhandler(passmgr),
497 httpdigestauthhandler(passmgr)))
497 httpdigestauthhandler(passmgr)))
498
498
499 o = urllib.request.build_opener(*handlers)
499 o = urllib.request.build_opener(*handlers)
500 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
500 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
501 ('Accept', 'application/mercurial-0.1')]
501 ('Accept', 'application/mercurial-0.1')]
502
502
503 q = {"cmd": 'between'}
503 q = {"cmd": 'between'}
504 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
504 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
505 qs = f'?{urllib.parse.urlencode(q)}'
505 qs = f'?{urllib.parse.urlencode(q)}'
506 cu = f"{test_uri}{qs}"
506 cu = f"{test_uri}{qs}"
507
507
508 try:
508 try:
509 req = urllib.request.Request(cu, None, {})
509 req = urllib.request.Request(cu, None, {})
510 log.debug("Trying to open URL %s", obfuscated_uri)
510 log.debug("Trying to open URL %s", obfuscated_uri)
511 resp = o.open(req)
511 resp = o.open(req)
512 if resp.code != 200:
512 if resp.code != 200:
513 raise exceptions.URLError()('Return Code is not 200')
513 raise exceptions.URLError()('Return Code is not 200')
514 except Exception as e:
514 except Exception as e:
515 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
515 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
516 # means it cannot be cloned
516 # means it cannot be cloned
517 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
517 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
518
518
519 # now check if it's a proper hg repo, but don't do it for svn
519 # now check if it's a proper hg repo, but don't do it for svn
520 try:
520 try:
521 if _proto == 'svn':
521 if _proto == 'svn':
522 pass
522 pass
523 else:
523 else:
524 # check for pure hg repos
524 # check for pure hg repos
525 log.debug(
525 log.debug(
526 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
526 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
527 # Create repo path with custom mercurial path object
527 # Create repo path with custom mercurial path object
528 ui = make_ui_from_config(config, interactive=False)
528 ui = make_ui_from_config(config, interactive=False)
529 repo_path = hg_path(ui=ui, rawloc=safe_bytes(url))
529 repo_path = hg_path(ui=ui, rawloc=safe_bytes(url))
530 peer_checker = make_peer(ui, repo_path, False)
530 peer_checker = make_peer(ui, repo_path, False)
531 peer_checker.lookup(b'tip')
531 peer_checker.lookup(b'tip')
532 except Exception as e:
532 except Exception as e:
533 log.warning("URL is not a valid Mercurial repository: %s",
533 log.warning("URL is not a valid Mercurial repository: %s",
534 obfuscated_uri)
534 obfuscated_uri)
535 raise exceptions.URLError(e)(
535 raise exceptions.URLError(e)(
536 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
536 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
537
537
538 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
538 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
539 return True
539 return True
540
540
541 @reraise_safe_exceptions
541 @reraise_safe_exceptions
542 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
542 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
543 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
544
544
545 if file_filter:
545 if file_filter:
546 # unpack the file-filter
546 # unpack the file-filter
547 repo_path, node_path = file_filter
547 repo_path, node_path = file_filter
548 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
548 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
549 else:
549 else:
550 match_filter = file_filter
550 match_filter = file_filter
551 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
551 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
552
552
553 try:
553 try:
554 diff_iter = patch.diff(
554 diff_iter = patch.diff(
555 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
555 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
556 return BytesEnvelope(b"".join(diff_iter))
556 return BytesEnvelope(b"".join(diff_iter))
557 except RepoLookupError as e:
557 except RepoLookupError as e:
558 raise exceptions.LookupException(e)()
558 raise exceptions.LookupException(e)()
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def node_history(self, wire, revision, path, limit):
561 def node_history(self, wire, revision, path, limit):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 region = self._region(wire)
563 region = self._region(wire)
564
564
565 @region.conditional_cache_on_arguments(condition=cache_on)
565 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
566 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
567 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
568
568
569 ctx = self._get_ctx(repo, revision)
569 ctx = self._get_ctx(repo, revision)
570 fctx = ctx.filectx(safe_bytes(path))
570 fctx = ctx.filectx(safe_bytes(path))
571
571
572 def history_iter():
572 def history_iter():
573 limit_rev = fctx.rev()
573 limit_rev = fctx.rev()
574
574
575 for fctx_candidate in reversed(list(fctx.filelog())):
575 for fctx_candidate in reversed(list(fctx.filelog())):
576 f_obj = fctx.filectx(fctx_candidate)
576 f_obj = fctx.filectx(fctx_candidate)
577
577
578 # NOTE: This can be problematic...we can hide ONLY history node resulting in empty history
578 # NOTE: This can be problematic...we can hide ONLY history node resulting in empty history
579 _ctx = f_obj.changectx()
579 _ctx = f_obj.changectx()
580 if _ctx.hidden() or _ctx.obsolete():
580 if _ctx.hidden() or _ctx.obsolete():
581 continue
581 continue
582
582
583 if limit_rev >= f_obj.rev():
583 if limit_rev >= f_obj.rev():
584 yield f_obj
584 yield f_obj
585
585
586 history = []
586 history = []
587 for cnt, obj in enumerate(history_iter()):
587 for cnt, obj in enumerate(history_iter()):
588 if limit and cnt >= limit:
588 if limit and cnt >= limit:
589 break
589 break
590 history.append(hex(obj.node()))
590 history.append(hex(obj.node()))
591
591
592 return [x for x in history]
592 return [x for x in history]
593 return _node_history(context_uid, repo_id, revision, path, limit)
593 return _node_history(context_uid, repo_id, revision, path, limit)
594
594
595 @reraise_safe_exceptions
595 @reraise_safe_exceptions
596 def node_history_until(self, wire, revision, path, limit):
596 def node_history_until(self, wire, revision, path, limit):
597 cache_on, context_uid, repo_id = self._cache_on(wire)
597 cache_on, context_uid, repo_id = self._cache_on(wire)
598 region = self._region(wire)
598 region = self._region(wire)
599
599
600 @region.conditional_cache_on_arguments(condition=cache_on)
600 @region.conditional_cache_on_arguments(condition=cache_on)
601 def _node_history_until(_context_uid, _repo_id):
601 def _node_history_until(_context_uid, _repo_id):
602 repo = self._factory.repo(wire)
602 repo = self._factory.repo(wire)
603 ctx = self._get_ctx(repo, revision)
603 ctx = self._get_ctx(repo, revision)
604 fctx = ctx.filectx(safe_bytes(path))
604 fctx = ctx.filectx(safe_bytes(path))
605
605
606 file_log = list(fctx.filelog())
606 file_log = list(fctx.filelog())
607 if limit:
607 if limit:
608 # Limit to the last n items
608 # Limit to the last n items
609 file_log = file_log[-limit:]
609 file_log = file_log[-limit:]
610
610
611 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
611 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
612 return _node_history_until(context_uid, repo_id, revision, path, limit)
612 return _node_history_until(context_uid, repo_id, revision, path, limit)
613
613
614 @reraise_safe_exceptions
614 @reraise_safe_exceptions
615 def bulk_file_request(self, wire, commit_id, path, pre_load):
615 def bulk_file_request(self, wire, commit_id, path, pre_load):
616 cache_on, context_uid, repo_id = self._cache_on(wire)
616 cache_on, context_uid, repo_id = self._cache_on(wire)
617 region = self._region(wire)
617 region = self._region(wire)
618
618
619 @region.conditional_cache_on_arguments(condition=cache_on)
619 @region.conditional_cache_on_arguments(condition=cache_on)
620 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
620 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
621 result = {}
621 result = {}
622 for attr in pre_load:
622 for attr in pre_load:
623 try:
623 try:
624 method = self._bulk_file_methods[attr]
624 method = self._bulk_file_methods[attr]
625 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
625 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
626 result[attr] = method(wire, _commit_id, _path)
626 result[attr] = method(wire, _commit_id, _path)
627 except KeyError as e:
627 except KeyError as e:
628 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
628 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
629 return result
629 return result
630
630
631 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
631 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
632
632
633 @reraise_safe_exceptions
633 @reraise_safe_exceptions
634 def fctx_annotate(self, wire, revision, path):
634 def fctx_annotate(self, wire, revision, path):
635 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
636 ctx = self._get_ctx(repo, revision)
636 ctx = self._get_ctx(repo, revision)
637 fctx = ctx.filectx(safe_bytes(path))
637 fctx = ctx.filectx(safe_bytes(path))
638
638
639 result = []
639 result = []
640 for i, annotate_obj in enumerate(fctx.annotate(), 1):
640 for i, annotate_obj in enumerate(fctx.annotate(), 1):
641 ln_no = i
641 ln_no = i
642 sha = hex(annotate_obj.fctx.node())
642 sha = hex(annotate_obj.fctx.node())
643 content = annotate_obj.text
643 content = annotate_obj.text
644 result.append((ln_no, ascii_str(sha), content))
644 result.append((ln_no, ascii_str(sha), content))
645 return BinaryEnvelope(result)
645 return BinaryEnvelope(result)
646
646
647 @reraise_safe_exceptions
647 @reraise_safe_exceptions
648 def fctx_node_data(self, wire, revision, path):
648 def fctx_node_data(self, wire, revision, path):
649 repo = self._factory.repo(wire)
649 repo = self._factory.repo(wire)
650 ctx = self._get_ctx(repo, revision)
650 ctx = self._get_ctx(repo, revision)
651 fctx = ctx.filectx(safe_bytes(path))
651 fctx = ctx.filectx(safe_bytes(path))
652 return BytesEnvelope(fctx.data())
652 return BytesEnvelope(fctx.data())
653
653
654 @reraise_safe_exceptions
654 @reraise_safe_exceptions
655 def fctx_flags(self, wire, commit_id, path):
655 def fctx_flags(self, wire, commit_id, path):
656 cache_on, context_uid, repo_id = self._cache_on(wire)
656 cache_on, context_uid, repo_id = self._cache_on(wire)
657 region = self._region(wire)
657 region = self._region(wire)
658
658
659 @region.conditional_cache_on_arguments(condition=cache_on)
659 @region.conditional_cache_on_arguments(condition=cache_on)
660 def _fctx_flags(_repo_id, _commit_id, _path):
660 def _fctx_flags(_repo_id, _commit_id, _path):
661 repo = self._factory.repo(wire)
661 repo = self._factory.repo(wire)
662 ctx = self._get_ctx(repo, commit_id)
662 ctx = self._get_ctx(repo, commit_id)
663 fctx = ctx.filectx(safe_bytes(path))
663 fctx = ctx.filectx(safe_bytes(path))
664 return fctx.flags()
664 return fctx.flags()
665
665
666 return _fctx_flags(repo_id, commit_id, path)
666 return _fctx_flags(repo_id, commit_id, path)
667
667
668 @reraise_safe_exceptions
668 @reraise_safe_exceptions
669 def fctx_size(self, wire, commit_id, path):
669 def fctx_size(self, wire, commit_id, path):
670 cache_on, context_uid, repo_id = self._cache_on(wire)
670 cache_on, context_uid, repo_id = self._cache_on(wire)
671 region = self._region(wire)
671 region = self._region(wire)
672
672
673 @region.conditional_cache_on_arguments(condition=cache_on)
673 @region.conditional_cache_on_arguments(condition=cache_on)
674 def _fctx_size(_repo_id, _revision, _path):
674 def _fctx_size(_repo_id, _revision, _path):
675 repo = self._factory.repo(wire)
675 repo = self._factory.repo(wire)
676 ctx = self._get_ctx(repo, commit_id)
676 ctx = self._get_ctx(repo, commit_id)
677 fctx = ctx.filectx(safe_bytes(path))
677 fctx = ctx.filectx(safe_bytes(path))
678 return fctx.size()
678 return fctx.size()
679 return _fctx_size(repo_id, commit_id, path)
679 return _fctx_size(repo_id, commit_id, path)
680
680
681 @reraise_safe_exceptions
681 @reraise_safe_exceptions
682 def get_all_commit_ids(self, wire, name):
682 def get_all_commit_ids(self, wire, name):
683 cache_on, context_uid, repo_id = self._cache_on(wire)
683 cache_on, context_uid, repo_id = self._cache_on(wire)
684 region = self._region(wire)
684 region = self._region(wire)
685
685
686 @region.conditional_cache_on_arguments(condition=cache_on)
686 @region.conditional_cache_on_arguments(condition=cache_on)
687 def _get_all_commit_ids(_context_uid, _repo_id, _name):
687 def _get_all_commit_ids(_context_uid, _repo_id, _name):
688 repo = self._factory.repo(wire)
688 repo = self._factory.repo(wire)
689 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
689 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
690 return revs
690 return revs
691 return _get_all_commit_ids(context_uid, repo_id, name)
691 return _get_all_commit_ids(context_uid, repo_id, name)
692
692
693 @reraise_safe_exceptions
693 @reraise_safe_exceptions
694 def get_config_value(self, wire, section, name, untrusted=False):
694 def get_config_value(self, wire, section, name, untrusted=False):
695 repo = self._factory.repo(wire)
695 repo = self._factory.repo(wire)
696 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
696 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
697
697
698 @reraise_safe_exceptions
698 @reraise_safe_exceptions
699 def is_large_file(self, wire, commit_id, path):
699 def is_large_file(self, wire, commit_id, path):
700 cache_on, context_uid, repo_id = self._cache_on(wire)
700 cache_on, context_uid, repo_id = self._cache_on(wire)
701 region = self._region(wire)
701 region = self._region(wire)
702
702
703 @region.conditional_cache_on_arguments(condition=cache_on)
703 @region.conditional_cache_on_arguments(condition=cache_on)
704 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
704 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
705 return largefiles.lfutil.isstandin(safe_bytes(path))
705 return largefiles.lfutil.isstandin(safe_bytes(path))
706
706
707 return _is_large_file(context_uid, repo_id, commit_id, path)
707 return _is_large_file(context_uid, repo_id, commit_id, path)
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def is_binary(self, wire, revision, path):
710 def is_binary(self, wire, revision, path):
711 cache_on, context_uid, repo_id = self._cache_on(wire)
711 cache_on, context_uid, repo_id = self._cache_on(wire)
712 region = self._region(wire)
712 region = self._region(wire)
713
713
714 @region.conditional_cache_on_arguments(condition=cache_on)
714 @region.conditional_cache_on_arguments(condition=cache_on)
715 def _is_binary(_repo_id, _sha, _path):
715 def _is_binary(_repo_id, _sha, _path):
716 repo = self._factory.repo(wire)
716 repo = self._factory.repo(wire)
717 ctx = self._get_ctx(repo, revision)
717 ctx = self._get_ctx(repo, revision)
718 fctx = ctx.filectx(safe_bytes(path))
718 fctx = ctx.filectx(safe_bytes(path))
719 return fctx.isbinary()
719 return fctx.isbinary()
720
720
721 return _is_binary(repo_id, revision, path)
721 return _is_binary(repo_id, revision, path)
722
722
723 @reraise_safe_exceptions
723 @reraise_safe_exceptions
724 def md5_hash(self, wire, revision, path):
724 def md5_hash(self, wire, revision, path):
725 cache_on, context_uid, repo_id = self._cache_on(wire)
725 cache_on, context_uid, repo_id = self._cache_on(wire)
726 region = self._region(wire)
726 region = self._region(wire)
727
727
728 @region.conditional_cache_on_arguments(condition=cache_on)
728 @region.conditional_cache_on_arguments(condition=cache_on)
729 def _md5_hash(_repo_id, _sha, _path):
729 def _md5_hash(_repo_id, _sha, _path):
730 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
731 ctx = self._get_ctx(repo, revision)
731 ctx = self._get_ctx(repo, revision)
732 fctx = ctx.filectx(safe_bytes(path))
732 fctx = ctx.filectx(safe_bytes(path))
733 return hashlib.md5(fctx.data()).hexdigest()
733 return hashlib.md5(fctx.data()).hexdigest()
734
734
735 return _md5_hash(repo_id, revision, path)
735 return _md5_hash(repo_id, revision, path)
736
736
737 @reraise_safe_exceptions
737 @reraise_safe_exceptions
738 def in_largefiles_store(self, wire, sha):
738 def in_largefiles_store(self, wire, sha):
739 repo = self._factory.repo(wire)
739 repo = self._factory.repo(wire)
740 return largefiles.lfutil.instore(repo, sha)
740 return largefiles.lfutil.instore(repo, sha)
741
741
742 @reraise_safe_exceptions
742 @reraise_safe_exceptions
743 def in_user_cache(self, wire, sha):
743 def in_user_cache(self, wire, sha):
744 repo = self._factory.repo(wire)
744 repo = self._factory.repo(wire)
745 return largefiles.lfutil.inusercache(repo.ui, sha)
745 return largefiles.lfutil.inusercache(repo.ui, sha)
746
746
747 @reraise_safe_exceptions
747 @reraise_safe_exceptions
748 def store_path(self, wire, sha):
748 def store_path(self, wire, sha):
749 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
750 return largefiles.lfutil.storepath(repo, sha)
750 return largefiles.lfutil.storepath(repo, sha)
751
751
752 @reraise_safe_exceptions
752 @reraise_safe_exceptions
753 def link(self, wire, sha, path):
753 def link(self, wire, sha, path):
754 repo = self._factory.repo(wire)
754 repo = self._factory.repo(wire)
755 largefiles.lfutil.link(
755 largefiles.lfutil.link(
756 largefiles.lfutil.usercachepath(repo.ui, sha), path)
756 largefiles.lfutil.usercachepath(repo.ui, sha), path)
757
757
758 @reraise_safe_exceptions
758 @reraise_safe_exceptions
759 def localrepository(self, wire, create=False):
759 def localrepository(self, wire, create=False):
760 self._factory.repo(wire, create=create)
760 self._factory.repo(wire, create=create)
761
761
762 @reraise_safe_exceptions
762 @reraise_safe_exceptions
763 def assert_correct_path(self, wire):
763 def assert_correct_path(self, wire):
764 cache_on, context_uid, repo_id = self._cache_on(wire)
764 cache_on, context_uid, repo_id = self._cache_on(wire)
765 region = self._region(wire)
765 region = self._region(wire)
766
766
767 @region.conditional_cache_on_arguments(condition=cache_on)
767 @region.conditional_cache_on_arguments(condition=cache_on)
768 def _assert_correct_path(_context_uid, _repo_id):
768 def _assert_correct_path(_context_uid, _repo_id):
769 try:
769 try:
770 self._factory.repo(wire, create=False)
770 self._factory.repo(wire, create=False)
771 except Exception:
771 except Exception:
772 path = wire.get('path')
772 path = wire.get('path')
773 tb = traceback.format_exc()
773 tb = traceback.format_exc()
774 log.debug("Invalid Mercurial path `%s`, tb: %s", path, tb)
774 log.debug("Invalid Mercurial path `%s`, tb: %s", path, tb)
775 return False
775 return False
776 return True
776 return True
777
777
778 return _assert_correct_path(context_uid, repo_id)
778 return _assert_correct_path(context_uid, repo_id)
779
779
780 @reraise_safe_exceptions
780 @reraise_safe_exceptions
781 def lookup(self, wire, revision, both):
781 def lookup(self, wire, revision, both):
782 cache_on, context_uid, repo_id = self._cache_on(wire)
782 cache_on, context_uid, repo_id = self._cache_on(wire)
783 region = self._region(wire)
783 region = self._region(wire)
784
784
785 @region.conditional_cache_on_arguments(condition=cache_on)
785 @region.conditional_cache_on_arguments(condition=cache_on)
786 def _lookup(_context_uid, _repo_id, _revision, _both):
786 def _lookup(_context_uid, _repo_id, _revision, _both):
787 repo = self._factory.repo(wire)
787 repo = self._factory.repo(wire)
788 rev = _revision
788 rev = _revision
789 if isinstance(rev, int):
789 if isinstance(rev, int):
790 # NOTE(marcink):
790 # NOTE(marcink):
791 # since Mercurial doesn't support negative indexes properly
791 # since Mercurial doesn't support negative indexes properly
792 # we need to shift accordingly by one to get proper index, e.g
792 # we need to shift accordingly by one to get proper index, e.g
793 # repo[-1] => repo[-2]
793 # repo[-1] => repo[-2]
794 # repo[0] => repo[-1]
794 # repo[0] => repo[-1]
795 if rev <= 0:
795 if rev <= 0:
796 rev = rev + -1
796 rev = rev + -1
797 try:
797 try:
798 ctx = self._get_ctx(repo, rev)
798 ctx = self._get_ctx(repo, rev)
799 except AmbiguousPrefixLookupError:
799 except AmbiguousPrefixLookupError:
800 e = RepoLookupError(rev)
800 e = RepoLookupError(rev)
801 e._org_exc_tb = format_exc(sys.exc_info())
801 e._org_exc_tb = format_exc(sys.exc_info())
802 raise exceptions.LookupException(e)(rev)
802 raise exceptions.LookupException(e)(rev)
803 except (TypeError, RepoLookupError, binascii.Error) as e:
803 except (TypeError, RepoLookupError, binascii.Error) as e:
804 e._org_exc_tb = format_exc(sys.exc_info())
804 e._org_exc_tb = format_exc(sys.exc_info())
805 raise exceptions.LookupException(e)(rev)
805 raise exceptions.LookupException(e)(rev)
806 except LookupError as e:
806 except LookupError as e:
807 e._org_exc_tb = format_exc(sys.exc_info())
807 e._org_exc_tb = format_exc(sys.exc_info())
808 raise exceptions.LookupException(e)(e.name)
808 raise exceptions.LookupException(e)(e.name)
809
809
810 if not both:
810 if not both:
811 return ctx.hex()
811 return ctx.hex()
812
812
813 ctx = repo[ctx.hex()]
813 ctx = repo[ctx.hex()]
814 return ctx.hex(), ctx.rev()
814 return ctx.hex(), ctx.rev()
815
815
816 return _lookup(context_uid, repo_id, revision, both)
816 return _lookup(context_uid, repo_id, revision, both)
817
817
818 @reraise_safe_exceptions
818 @reraise_safe_exceptions
819 def sync_push(self, wire, url):
819 def sync_push(self, wire, url):
820 if not self.check_url(url, wire['config']):
820 if not self.check_url(url, wire['config']):
821 return
821 return
822
822
823 repo = self._factory.repo(wire)
823 repo = self._factory.repo(wire)
824
824
825 # Disable any prompts for this repo
825 # Disable any prompts for this repo
826 repo.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
826 repo.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
827
827
828 bookmarks = list(dict(repo._bookmarks).keys())
828 bookmarks = list(dict(repo._bookmarks).keys())
829 remote = peer(repo, {}, safe_bytes(url))
829 remote = peer(repo, {}, safe_bytes(url))
830 # Disable any prompts for this remote
830 # Disable any prompts for this remote
831 remote.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
831 remote.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
832
832
833 return exchange.push(
833 return exchange.push(
834 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
834 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
835
835
836 @reraise_safe_exceptions
836 @reraise_safe_exceptions
837 def revision(self, wire, rev):
837 def revision(self, wire, rev):
838 repo = self._factory.repo(wire)
838 repo = self._factory.repo(wire)
839 ctx = self._get_ctx(repo, rev)
839 ctx = self._get_ctx(repo, rev)
840 return ctx.rev()
840 return ctx.rev()
841
841
842 @reraise_safe_exceptions
842 @reraise_safe_exceptions
843 def rev_range(self, wire, commit_filter):
843 def rev_range(self, wire, commit_filter):
844 cache_on, context_uid, repo_id = self._cache_on(wire)
844 cache_on, context_uid, repo_id = self._cache_on(wire)
845 region = self._region(wire)
845 region = self._region(wire)
846
846
847 @region.conditional_cache_on_arguments(condition=cache_on)
847 @region.conditional_cache_on_arguments(condition=cache_on)
848 def _rev_range(_context_uid, _repo_id, _filter):
848 def _rev_range(_context_uid, _repo_id, _filter):
849 repo = self._factory.repo(wire)
849 repo = self._factory.repo(wire)
850 revisions = [
850 revisions = [
851 ascii_str(repo[rev].hex())
851 ascii_str(repo[rev].hex())
852 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
852 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
853 ]
853 ]
854 return revisions
854 return revisions
855
855
856 return _rev_range(context_uid, repo_id, sorted(commit_filter))
856 return _rev_range(context_uid, repo_id, sorted(commit_filter))
857
857
858 @reraise_safe_exceptions
858 @reraise_safe_exceptions
859 def rev_range_hash(self, wire, node):
859 def rev_range_hash(self, wire, node):
860 repo = self._factory.repo(wire)
860 repo = self._factory.repo(wire)
861
861
862 def get_revs(repo, rev_opt):
862 def get_revs(repo, rev_opt):
863 if rev_opt:
863 if rev_opt:
864 revs = revrange(repo, rev_opt)
864 revs = revrange(repo, rev_opt)
865 if len(revs) == 0:
865 if len(revs) == 0:
866 return (nullrev, nullrev)
866 return (nullrev, nullrev)
867 return max(revs), min(revs)
867 return max(revs), min(revs)
868 else:
868 else:
869 return len(repo) - 1, 0
869 return len(repo) - 1, 0
870
870
871 stop, start = get_revs(repo, [node + ':'])
871 stop, start = get_revs(repo, [node + ':'])
872 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
872 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
873 return revs
873 return revs
874
874
875 @reraise_safe_exceptions
875 @reraise_safe_exceptions
876 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
876 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
877 org_path = safe_bytes(wire["path"])
877 org_path = safe_bytes(wire["path"])
878 other_path = safe_bytes(kwargs.pop('other_path', ''))
878 other_path = safe_bytes(kwargs.pop('other_path', ''))
879
879
880 # case when we want to compare two independent repositories
880 # case when we want to compare two independent repositories
881 if other_path and other_path != wire["path"]:
881 if other_path and other_path != wire["path"]:
882 baseui = self._factory._create_config(wire["config"])
882 baseui = self._factory._create_config(wire["config"])
883 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
883 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
884 else:
884 else:
885 repo = self._factory.repo(wire)
885 repo = self._factory.repo(wire)
886 return list(repo.revs(rev_spec, *args))
886 return list(repo.revs(rev_spec, *args))
887
887
888 @reraise_safe_exceptions
888 @reraise_safe_exceptions
889 def verify(self, wire,):
889 def verify(self, wire,):
890 repo = self._factory.repo(wire)
890 repo = self._factory.repo(wire)
891 baseui = self._factory._create_config(wire['config'])
891 baseui = self._factory._create_config(wire['config'])
892
892
893 baseui, output = patch_ui_message_output(baseui)
893 baseui, output = patch_ui_message_output(baseui)
894
894
895 repo.ui = baseui
895 repo.ui = baseui
896 verify.verify(repo)
896 verify.verify(repo)
897 return output.getvalue()
897 return output.getvalue()
898
898
899 @reraise_safe_exceptions
899 @reraise_safe_exceptions
900 def hg_update_cache(self, wire,):
900 def hg_update_cache(self, wire,):
901 repo = self._factory.repo(wire)
901 repo = self._factory.repo(wire)
902 baseui = self._factory._create_config(wire['config'])
902 baseui = self._factory._create_config(wire['config'])
903 baseui, output = patch_ui_message_output(baseui)
903 baseui, output = patch_ui_message_output(baseui)
904
904
905 repo.ui = baseui
905 repo.ui = baseui
906 with repo.wlock(), repo.lock():
906 with repo.wlock(), repo.lock():
907 repo.updatecaches(full=True)
907 repo.updatecaches(full=True)
908
908
909 return output.getvalue()
909 return output.getvalue()
910
910
911 @reraise_safe_exceptions
911 @reraise_safe_exceptions
912 def hg_rebuild_fn_cache(self, wire,):
912 def hg_rebuild_fn_cache(self, wire,):
913 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
914 baseui = self._factory._create_config(wire['config'])
914 baseui = self._factory._create_config(wire['config'])
915 baseui, output = patch_ui_message_output(baseui)
915 baseui, output = patch_ui_message_output(baseui)
916
916
917 repo.ui = baseui
917 repo.ui = baseui
918
918
919 repair.rebuildfncache(baseui, repo)
919 repair.rebuildfncache(baseui, repo)
920
920
921 return output.getvalue()
921 return output.getvalue()
922
922
923 @reraise_safe_exceptions
923 @reraise_safe_exceptions
924 def tags(self, wire):
924 def tags(self, wire):
925 cache_on, context_uid, repo_id = self._cache_on(wire)
925 cache_on, context_uid, repo_id = self._cache_on(wire)
926 region = self._region(wire)
926 region = self._region(wire)
927
927
928 @region.conditional_cache_on_arguments(condition=cache_on)
928 @region.conditional_cache_on_arguments(condition=cache_on)
929 def _tags(_context_uid, _repo_id):
929 def _tags(_context_uid, _repo_id):
930 repo = self._factory.repo(wire)
930 repo = self._factory.repo(wire)
931 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
931 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
932
932
933 return _tags(context_uid, repo_id)
933 return _tags(context_uid, repo_id)
934
934
935 @reraise_safe_exceptions
935 @reraise_safe_exceptions
936 def update(self, wire, node='', clean=False):
936 def update(self, wire, node='', clean=False):
937 repo = self._factory.repo(wire)
937 repo = self._factory.repo(wire)
938 baseui = self._factory._create_config(wire['config'])
938 baseui = self._factory._create_config(wire['config'])
939 node = safe_bytes(node)
939 node = safe_bytes(node)
940
940
941 commands.update(baseui, repo, node=node, clean=clean)
941 commands.update(baseui, repo, node=node, clean=clean)
942
942
943 @reraise_safe_exceptions
943 @reraise_safe_exceptions
944 def identify(self, wire):
944 def identify(self, wire):
945 repo = self._factory.repo(wire)
945 repo = self._factory.repo(wire)
946 baseui = self._factory._create_config(wire['config'])
946 baseui = self._factory._create_config(wire['config'])
947 output = io.BytesIO()
947 output = io.BytesIO()
948 baseui.write = output.write
948 baseui.write = output.write
949 # This is required to get a full node id
949 # This is required to get a full node id
950 baseui.debugflag = True
950 baseui.debugflag = True
951 commands.identify(baseui, repo, id=True)
951 commands.identify(baseui, repo, id=True)
952
952
953 return output.getvalue()
953 return output.getvalue()
954
954
955 @reraise_safe_exceptions
955 @reraise_safe_exceptions
956 def heads(self, wire, branch=None):
956 def heads(self, wire, branch=None):
957 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
958 baseui = self._factory._create_config(wire['config'])
958 baseui = self._factory._create_config(wire['config'])
959 output = io.BytesIO()
959 output = io.BytesIO()
960
960
961 def write(data, **unused_kwargs):
961 def write(data, **unused_kwargs):
962 output.write(data)
962 output.write(data)
963
963
964 baseui.write = write
964 baseui.write = write
965 if branch:
965 if branch:
966 args = [safe_bytes(branch)]
966 args = [safe_bytes(branch)]
967 else:
967 else:
968 args = []
968 args = []
969 commands.heads(baseui, repo, template=b'{node} ', *args)
969 commands.heads(baseui, repo, template=b'{node} ', *args)
970
970
971 return output.getvalue()
971 return output.getvalue()
972
972
973 @reraise_safe_exceptions
973 @reraise_safe_exceptions
974 def ancestor(self, wire, revision1, revision2):
974 def ancestor(self, wire, revision1, revision2):
975 repo = self._factory.repo(wire)
975 repo = self._factory.repo(wire)
976 changelog = repo.changelog
976 changelog = repo.changelog
977 lookup = repo.lookup
977 lookup = repo.lookup
978 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
978 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
979 return hex(a)
979 return hex(a)
980
980
981 @reraise_safe_exceptions
981 @reraise_safe_exceptions
982 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
982 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
983 baseui = self._factory._create_config(wire["config"], hooks=hooks)
983 baseui = self._factory._create_config(wire["config"], hooks=hooks)
984 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
984 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
985
985
986 @reraise_safe_exceptions
986 @reraise_safe_exceptions
987 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
987 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
988
988
989 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
990 baseui = self._factory._create_config(wire['config'])
990 baseui = self._factory._create_config(wire['config'])
991 publishing = baseui.configbool(b'phases', b'publish')
991 publishing = baseui.configbool(b'phases', b'publish')
992
992
993 def _filectxfn(_repo, ctx, path: bytes):
993 def _filectxfn(_repo, ctx, path: bytes):
994 """
994 """
995 Marks given path as added/changed/removed in a given _repo. This is
995 Marks given path as added/changed/removed in a given _repo. This is
996 for internal mercurial commit function.
996 for internal mercurial commit function.
997 """
997 """
998
998
999 # check if this path is removed
999 # check if this path is removed
1000 if safe_str(path) in removed:
1000 if safe_str(path) in removed:
1001 # returning None is a way to mark node for removal
1001 # returning None is a way to mark node for removal
1002 return None
1002 return None
1003
1003
1004 # check if this path is added
1004 # check if this path is added
1005 for node in updated:
1005 for node in updated:
1006 if safe_bytes(node['path']) == path:
1006 if safe_bytes(node['path']) == path:
1007 return memfilectx(
1007 return memfilectx(
1008 _repo,
1008 _repo,
1009 changectx=ctx,
1009 changectx=ctx,
1010 path=safe_bytes(node['path']),
1010 path=safe_bytes(node['path']),
1011 data=safe_bytes(node['content']),
1011 data=safe_bytes(node['content']),
1012 islink=False,
1012 islink=False,
1013 isexec=bool(node['mode'] & stat.S_IXUSR),
1013 isexec=bool(node['mode'] & stat.S_IXUSR),
1014 copysource=False)
1014 copysource=False)
1015 abort_exc = exceptions.AbortException()
1015 abort_exc = exceptions.AbortException()
1016 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
1016 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
1017
1017
1018 if publishing:
1018 if publishing:
1019 new_commit_phase = b'public'
1019 new_commit_phase = b'public'
1020 else:
1020 else:
1021 new_commit_phase = b'draft'
1021 new_commit_phase = b'draft'
1022 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
1022 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
1023 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
1023 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
1024 commit_ctx = memctx(
1024 commit_ctx = memctx(
1025 repo=repo,
1025 repo=repo,
1026 parents=parents,
1026 parents=parents,
1027 text=safe_bytes(message),
1027 text=safe_bytes(message),
1028 files=[safe_bytes(x) for x in files],
1028 files=[safe_bytes(x) for x in files],
1029 filectxfn=_filectxfn,
1029 filectxfn=_filectxfn,
1030 user=safe_bytes(user),
1030 user=safe_bytes(user),
1031 date=(commit_time, commit_timezone),
1031 date=(commit_time, commit_timezone),
1032 extra=kwargs)
1032 extra=kwargs)
1033
1033
1034 n = repo.commitctx(commit_ctx)
1034 n = repo.commitctx(commit_ctx)
1035 new_id = hex(n)
1035 new_id = hex(n)
1036
1036
1037 return new_id
1037 return new_id
1038
1038
1039 @reraise_safe_exceptions
1039 @reraise_safe_exceptions
1040 def pull(self, wire, url, commit_ids=None):
1040 def pull(self, wire, url, commit_ids=None):
1041 repo = self._factory.repo(wire)
1041 repo = self._factory.repo(wire)
1042 # Disable any prompts for this repo
1042 # Disable any prompts for this repo
1043 repo.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
1043 repo.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
1044
1044
1045 remote = peer(repo, {}, safe_bytes(url))
1045 remote = peer(repo, {}, safe_bytes(url))
1046 # Disable any prompts for this remote
1046 # Disable any prompts for this remote
1047 remote.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
1047 remote.ui.setconfig(b'ui', b'interactive', b'false', b'-y')
1048
1048
1049 if commit_ids:
1049 if commit_ids:
1050 commit_ids = [bin(commit_id) for commit_id in commit_ids]
1050 commit_ids = [bin(commit_id) for commit_id in commit_ids]
1051
1051
1052 return exchange.pull(
1052 return exchange.pull(
1053 repo, remote, heads=commit_ids, force=None).cgresult
1053 repo, remote, heads=commit_ids, force=None).cgresult
1054
1054
1055 @reraise_safe_exceptions
1055 @reraise_safe_exceptions
1056 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
1056 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
1057 repo = self._factory.repo(wire)
1057 repo = self._factory.repo(wire)
1058 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1058 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1059
1059
1060 source = safe_bytes(source)
1060 source = safe_bytes(source)
1061
1061
1062 # Mercurial internally has a lot of logic that checks ONLY if
1062 # Mercurial internally has a lot of logic that checks ONLY if
1063 # option is defined, we just pass those if they are defined then
1063 # option is defined, we just pass those if they are defined then
1064 opts = {"remote_hidden": False}
1064 opts = {"remote_hidden": False}
1065
1065
1066 if bookmark:
1066 if bookmark:
1067 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1067 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1068 if isinstance(bookmark, list) else safe_bytes(bookmark)
1068 if isinstance(bookmark, list) else safe_bytes(bookmark)
1069
1069
1070 if branch:
1070 if branch:
1071 opts['branch'] = [safe_bytes(x) for x in branch] \
1071 opts['branch'] = [safe_bytes(x) for x in branch] \
1072 if isinstance(branch, list) else safe_bytes(branch)
1072 if isinstance(branch, list) else safe_bytes(branch)
1073
1073
1074 if revision:
1074 if revision:
1075 opts['rev'] = [safe_bytes(x) for x in revision] \
1075 opts['rev'] = [safe_bytes(x) for x in revision] \
1076 if isinstance(revision, list) else safe_bytes(revision)
1076 if isinstance(revision, list) else safe_bytes(revision)
1077
1077
1078 commands.pull(baseui, repo, source, **opts)
1078 commands.pull(baseui, repo, source, **opts)
1079
1079
1080 @reraise_safe_exceptions
1080 @reraise_safe_exceptions
1081 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1081 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1082 repo = self._factory.repo(wire)
1082 repo = self._factory.repo(wire)
1083 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1083 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1084
1084
1085 revisions = [safe_bytes(x) for x in revisions] \
1085 revisions = [safe_bytes(x) for x in revisions] \
1086 if isinstance(revisions, list) else safe_bytes(revisions)
1086 if isinstance(revisions, list) else safe_bytes(revisions)
1087
1087
1088 commands.push(baseui, repo, safe_bytes(dest_path),
1088 commands.push(baseui, repo, safe_bytes(dest_path),
1089 rev=revisions,
1089 rev=revisions,
1090 new_branch=push_branches)
1090 new_branch=push_branches)
1091
1091
1092 @reraise_safe_exceptions
1092 @reraise_safe_exceptions
1093 def strip(self, wire, revision, update, backup):
1093 def strip(self, wire, revision, update, backup):
1094 repo = self._factory.repo(wire)
1094 repo = self._factory.repo(wire)
1095 ctx = self._get_ctx(repo, revision)
1095 ctx = self._get_ctx(repo, revision)
1096 hgext_strip.strip(
1096 hgext_strip.strip(
1097 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1097 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1098
1098
1099 @reraise_safe_exceptions
1099 @reraise_safe_exceptions
1100 def get_unresolved_files(self, wire):
1100 def get_unresolved_files(self, wire):
1101 repo = self._factory.repo(wire)
1101 repo = self._factory.repo(wire)
1102
1102
1103 log.debug('Calculating unresolved files for repo: %s', repo)
1103 log.debug('Calculating unresolved files for repo: %s', repo)
1104 output = io.BytesIO()
1104 output = io.BytesIO()
1105
1105
1106 def write(data, **unused_kwargs):
1106 def write(data, **unused_kwargs):
1107 output.write(data)
1107 output.write(data)
1108
1108
1109 baseui = self._factory._create_config(wire['config'])
1109 baseui = self._factory._create_config(wire['config'])
1110 baseui.write = write
1110 baseui.write = write
1111
1111
1112 commands.resolve(baseui, repo, list=True)
1112 commands.resolve(baseui, repo, list=True)
1113 unresolved = output.getvalue().splitlines(0)
1113 unresolved = output.getvalue().splitlines(0)
1114 return unresolved
1114 return unresolved
1115
1115
1116 @reraise_safe_exceptions
1116 @reraise_safe_exceptions
1117 def merge(self, wire, revision):
1117 def merge(self, wire, revision):
1118 repo = self._factory.repo(wire)
1118 repo = self._factory.repo(wire)
1119 baseui = self._factory._create_config(wire['config'])
1119 baseui = self._factory._create_config(wire['config'])
1120 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1120 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1121
1121
1122 # In case of sub repositories are used mercurial prompts the user in
1122 # In case of sub repositories are used mercurial prompts the user in
1123 # case of merge conflicts or different sub repository sources. By
1123 # case of merge conflicts or different sub repository sources. By
1124 # setting the interactive flag to `False` mercurial doesn't prompt the
1124 # setting the interactive flag to `False` mercurial doesn't prompt the
1125 # used but instead uses a default value.
1125 # used but instead uses a default value.
1126 repo.ui.setconfig(b'ui', b'interactive', b'false')
1126 repo.ui.setconfig(b'ui', b'interactive', b'false')
1127 commands.merge(baseui, repo, rev=safe_bytes(revision))
1127 commands.merge(baseui, repo, rev=safe_bytes(revision))
1128
1128
1129 @reraise_safe_exceptions
1129 @reraise_safe_exceptions
1130 def merge_state(self, wire):
1130 def merge_state(self, wire):
1131 repo = self._factory.repo(wire)
1131 repo = self._factory.repo(wire)
1132 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1132 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1133
1133
1134 # In case of sub repositories are used mercurial prompts the user in
1134 # In case of sub repositories are used mercurial prompts the user in
1135 # case of merge conflicts or different sub repository sources. By
1135 # case of merge conflicts or different sub repository sources. By
1136 # setting the interactive flag to `False` mercurial doesn't prompt the
1136 # setting the interactive flag to `False` mercurial doesn't prompt the
1137 # used but instead uses a default value.
1137 # used but instead uses a default value.
1138 repo.ui.setconfig(b'ui', b'interactive', b'false')
1138 repo.ui.setconfig(b'ui', b'interactive', b'false')
1139 ms = hg_merge.mergestate(repo)
1139 ms = hg_merge.mergestate(repo)
1140 return [x for x in ms.unresolved()]
1140 return [x for x in ms.unresolved()]
1141
1141
1142 @reraise_safe_exceptions
1142 @reraise_safe_exceptions
1143 def commit(self, wire, message, username, close_branch=False):
1143 def commit(self, wire, message, username, close_branch=False):
1144 repo = self._factory.repo(wire)
1144 repo = self._factory.repo(wire)
1145 baseui = self._factory._create_config(wire['config'])
1145 baseui = self._factory._create_config(wire['config'])
1146 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1146 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1147 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1147 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1148
1148
1149 @reraise_safe_exceptions
1149 @reraise_safe_exceptions
1150 def rebase(self, wire, source='', dest='', abort=False):
1150 def rebase(self, wire, source='', dest='', abort=False):
1151
1151
1152 repo = self._factory.repo(wire)
1152 repo = self._factory.repo(wire)
1153 baseui = self._factory._create_config(wire['config'])
1153 baseui = self._factory._create_config(wire['config'])
1154 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1154 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1155 # In case of sub repositories are used mercurial prompts the user in
1155 # In case of sub repositories are used mercurial prompts the user in
1156 # case of merge conflicts or different sub repository sources. By
1156 # case of merge conflicts or different sub repository sources. By
1157 # setting the interactive flag to `False` mercurial doesn't prompt the
1157 # setting the interactive flag to `False` mercurial doesn't prompt the
1158 # used but instead uses a default value.
1158 # used but instead uses a default value.
1159 repo.ui.setconfig(b'ui', b'interactive', b'false')
1159 repo.ui.setconfig(b'ui', b'interactive', b'false')
1160
1160
1161 rebase_kws = dict(
1161 rebase_kws = dict(
1162 keep=not abort,
1162 keep=not abort,
1163 abort=abort
1163 abort=abort
1164 )
1164 )
1165
1165
1166 if source:
1166 if source:
1167 source = repo[source]
1167 source = repo[source]
1168 rebase_kws['base'] = [source.hex()]
1168 rebase_kws['base'] = [source.hex()]
1169 if dest:
1169 if dest:
1170 dest = repo[dest]
1170 dest = repo[dest]
1171 rebase_kws['dest'] = dest.hex()
1171 rebase_kws['dest'] = dest.hex()
1172
1172
1173 rebase.rebase(baseui, repo, **rebase_kws)
1173 rebase.rebase(baseui, repo, **rebase_kws)
1174
1174
1175 @reraise_safe_exceptions
1175 @reraise_safe_exceptions
1176 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1176 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1177 repo = self._factory.repo(wire)
1177 repo = self._factory.repo(wire)
1178 ctx = self._get_ctx(repo, revision)
1178 ctx = self._get_ctx(repo, revision)
1179 node = ctx.node()
1179 node = ctx.node()
1180
1180
1181 date = (tag_time, tag_timezone)
1181 date = (tag_time, tag_timezone)
1182 try:
1182 try:
1183 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1183 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1184 except Abort as e:
1184 except Abort as e:
1185 log.exception("Tag operation aborted")
1185 log.exception("Tag operation aborted")
1186 # Exception can contain unicode which we convert
1186 # Exception can contain unicode which we convert
1187 raise exceptions.AbortException(e)(repr(e))
1187 raise exceptions.AbortException(e)(repr(e))
1188
1188
1189 @reraise_safe_exceptions
1189 @reraise_safe_exceptions
1190 def bookmark(self, wire, bookmark, revision=''):
1190 def bookmark(self, wire, bookmark, revision=''):
1191 repo = self._factory.repo(wire)
1191 repo = self._factory.repo(wire)
1192 baseui = self._factory._create_config(wire['config'])
1192 baseui = self._factory._create_config(wire['config'])
1193 revision = revision or ''
1193 revision = revision or ''
1194 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1194 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1195
1195
1196 @reraise_safe_exceptions
1196 @reraise_safe_exceptions
1197 def install_hooks(self, wire, force=False):
1197 def install_hooks(self, wire, force=False):
1198 # we don't need any special hooks for Mercurial
1198 # we don't need any special hooks for Mercurial
1199 pass
1199 pass
1200
1200
1201 @reraise_safe_exceptions
1201 @reraise_safe_exceptions
1202 def get_hooks_info(self, wire):
1202 def get_hooks_info(self, wire):
1203 return {
1203 return {
1204 'pre_version': vcsserver.get_version(),
1204 'pre_version': vcsserver.get_version(),
1205 'post_version': vcsserver.get_version(),
1205 'post_version': vcsserver.get_version(),
1206 }
1206 }
1207
1207
1208 @reraise_safe_exceptions
1208 @reraise_safe_exceptions
1209 def set_head_ref(self, wire, head_name):
1209 def set_head_ref(self, wire, head_name):
1210 pass
1210 pass
1211
1211
1212 @reraise_safe_exceptions
1212 @reraise_safe_exceptions
1213 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1213 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1214 archive_dir_name, commit_id, cache_config):
1214 archive_dir_name, commit_id, cache_config):
1215
1215
1216 def file_walker(_commit_id, path):
1216 def file_walker(_commit_id, path):
1217 repo = self._factory.repo(wire)
1217 repo = self._factory.repo(wire)
1218 ctx = repo[_commit_id]
1218 ctx = repo[_commit_id]
1219 is_root = path in ['', '/']
1219 is_root = path in ['', '/']
1220 if is_root:
1220 if is_root:
1221 matcher = alwaysmatcher(badfn=None)
1221 matcher = alwaysmatcher(badfn=None)
1222 else:
1222 else:
1223 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1223 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1224 file_iter = ctx.manifest().walk(matcher)
1224 file_iter = ctx.manifest().walk(matcher)
1225
1225
1226 for fn in file_iter:
1226 for fn in file_iter:
1227 file_path = fn
1227 file_path = fn
1228 flags = ctx.flags(fn)
1228 flags = ctx.flags(fn)
1229 mode = b'x' in flags and 0o755 or 0o644
1229 mode = b'x' in flags and 0o755 or 0o644
1230 is_link = b'l' in flags
1230 is_link = b'l' in flags
1231
1231
1232 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1232 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1233
1233
1234 return store_archive_in_cache(
1234 return store_archive_in_cache(
1235 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1235 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1236
1236
@@ -1,959 +1,959 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31 import svn.client # noqa
31 import svn.client # noqa
32 import svn.core # noqa
32 import svn.core # noqa
33 import svn.delta # noqa
33 import svn.delta # noqa
34 import svn.diff # noqa
34 import svn.diff # noqa
35 import svn.fs # noqa
35 import svn.fs # noqa
36 import svn.repos # noqa
36 import svn.repos # noqa
37
37
38 import vcsserver
38 import vcsserver
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import (
40 from vcsserver.base import (
41 RepoFactory,
41 RepoFactory,
42 raise_from_original,
42 raise_from_original,
43 ArchiveNode,
43 ArchiveNode,
44 store_archive_in_cache,
44 store_archive_in_cache,
45 BytesEnvelope,
45 BytesEnvelope,
46 BinaryEnvelope,
46 BinaryEnvelope,
47 )
47 )
48 from vcsserver.exceptions import NoContentException
48 from vcsserver.exceptions import NoContentException
49 from vcsserver.vcs_base import RemoteBase
49 from vcsserver.vcs_base import RemoteBase
50 from vcsserver.lib.str_utils import safe_str, safe_bytes
50 from vcsserver.lib.str_utils import safe_str, safe_bytes
51 from vcsserver.lib.type_utils import assert_bytes
51 from vcsserver.lib.type_utils import assert_bytes
52 from vcsserver.lib.svnremoterepo import svnremoterepo
52 from vcsserver.lib.svnremoterepo import svnremoterepo
53 from vcsserver.lib.svn_txn_utils import store_txn_id_data
53 from vcsserver.lib.svn_txn_utils import store_txn_id_data
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 svn_compatible_versions_map = {
58 svn_compatible_versions_map = {
59 'pre-1.4-compatible': '1.3',
59 'pre-1.4-compatible': '1.3',
60 'pre-1.5-compatible': '1.4',
60 'pre-1.5-compatible': '1.4',
61 'pre-1.6-compatible': '1.5',
61 'pre-1.6-compatible': '1.5',
62 'pre-1.8-compatible': '1.7',
62 'pre-1.8-compatible': '1.7',
63 'pre-1.9-compatible': '1.8',
63 'pre-1.9-compatible': '1.8',
64 }
64 }
65
65
66 current_compatible_version = '1.14'
66 current_compatible_version = '1.14'
67
67
68
68
69 def reraise_safe_exceptions(func):
69 def reraise_safe_exceptions(func):
70 """Decorator for converting svn exceptions to something neutral."""
70 """Decorator for converting svn exceptions to something neutral."""
71 def wrapper(*args, **kwargs):
71 def wrapper(*args, **kwargs):
72 try:
72 try:
73 return func(*args, **kwargs)
73 return func(*args, **kwargs)
74 except Exception as e:
74 except Exception as e:
75 if not hasattr(e, '_vcs_kind'):
75 if not hasattr(e, '_vcs_kind'):
76 log.exception("Unhandled exception in svn remote call")
76 log.exception("Unhandled exception in svn remote call")
77 raise_from_original(exceptions.UnhandledException(e), e)
77 raise_from_original(exceptions.UnhandledException(e), e)
78 raise
78 raise
79 return wrapper
79 return wrapper
80
80
81
81
82 class SubversionFactory(RepoFactory):
82 class SubversionFactory(RepoFactory):
83 repo_type = 'svn'
83 repo_type = 'svn'
84
84
85 def _create_repo(self, wire, create, compatible_version):
85 def _create_repo(self, wire, create, compatible_version):
86 path = svn.core.svn_path_canonicalize(wire['path'])
86 path = svn.core.svn_path_canonicalize(wire['path'])
87 if create:
87 if create:
88 fs_config = {'compatible-version': current_compatible_version}
88 fs_config = {'compatible-version': current_compatible_version}
89 if compatible_version:
89 if compatible_version:
90
90
91 compatible_version_string = \
91 compatible_version_string = \
92 svn_compatible_versions_map.get(compatible_version) \
92 svn_compatible_versions_map.get(compatible_version) \
93 or compatible_version
93 or compatible_version
94 fs_config['compatible-version'] = compatible_version_string
94 fs_config['compatible-version'] = compatible_version_string
95
95
96 log.debug('Create SVN repo with config `%s`', fs_config)
96 log.debug('Create SVN repo with config `%s`', fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
98 else:
98 else:
99 repo = svn.repos.open(path)
99 repo = svn.repos.open(path)
100
100
101 log.debug('repository created: got SVN object: %s', repo)
101 log.debug('repository created: got SVN object: %s', repo)
102 return repo
102 return repo
103
103
104 def repo(self, wire, create=False, compatible_version=None):
104 def repo(self, wire, create=False, compatible_version=None):
105 """
105 """
106 Get a repository instance for the given path.
106 Get a repository instance for the given path.
107 """
107 """
108 return self._create_repo(wire, create, compatible_version)
108 return self._create_repo(wire, create, compatible_version)
109
109
110
110
111 NODE_TYPE_MAPPING = {
111 NODE_TYPE_MAPPING = {
112 svn.core.svn_node_file: 'file',
112 svn.core.svn_node_file: 'file',
113 svn.core.svn_node_dir: 'dir',
113 svn.core.svn_node_dir: 'dir',
114 }
114 }
115
115
116
116
117 class SvnRemote(RemoteBase):
117 class SvnRemote(RemoteBase):
118
118
119 def __init__(self, factory, hg_factory=None):
119 def __init__(self, factory, hg_factory=None):
120 self._factory = factory
120 self._factory = factory
121
121
122 self._bulk_methods = {
122 self._bulk_methods = {
123 # NOT supported in SVN ATM...
123 # NOT supported in SVN ATM...
124 }
124 }
125 self._bulk_file_methods = {
125 self._bulk_file_methods = {
126 "size": self.get_file_size,
126 "size": self.get_file_size,
127 "data": self.get_file_content,
127 "data": self.get_file_content,
128 "flags": self.get_node_type,
128 "flags": self.get_node_type,
129 "is_binary": self.is_binary,
129 "is_binary": self.is_binary,
130 "md5": self.md5_hash
130 "md5": self.md5_hash
131 }
131 }
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
135 cache_on, context_uid, repo_id = self._cache_on(wire)
135 cache_on, context_uid, repo_id = self._cache_on(wire)
136 region = self._region(wire)
136 region = self._region(wire)
137
137
138 # since we use unified API, we need to cast from str to in for SVN
138 # since we use unified API, we need to cast from str to in for SVN
139 commit_id = int(commit_id)
139 commit_id = int(commit_id)
140
140
141 @region.conditional_cache_on_arguments(condition=cache_on)
141 @region.conditional_cache_on_arguments(condition=cache_on)
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
143 result = {}
143 result = {}
144 for attr in pre_load:
144 for attr in pre_load:
145 try:
145 try:
146 method = self._bulk_file_methods[attr]
146 method = self._bulk_file_methods[attr]
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
148 result[attr] = method(wire, _commit_id, _path)
148 result[attr] = method(wire, _commit_id, _path)
149 except KeyError as e:
149 except KeyError as e:
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
151 return result
151 return result
152
152
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
154
154
155 @reraise_safe_exceptions
155 @reraise_safe_exceptions
156 def discover_svn_version(self):
156 def discover_svn_version(self):
157 try:
157 try:
158 import svn.core
158 import svn.core
159 svn_ver = svn.core.SVN_VERSION
159 svn_ver = svn.core.SVN_VERSION
160 except ImportError:
160 except ImportError:
161 svn_ver = None
161 svn_ver = None
162 return safe_str(svn_ver)
162 return safe_str(svn_ver)
163
163
164 @reraise_safe_exceptions
164 @reraise_safe_exceptions
165 def is_empty(self, wire):
165 def is_empty(self, wire):
166 try:
166 try:
167 return self.lookup(wire, -1) == 0
167 return self.lookup(wire, -1) == 0
168 except Exception:
168 except Exception:
169 log.exception("failed to read object_store")
169 log.exception("failed to read object_store")
170 return False
170 return False
171
171
172 def check_url(self, url, config):
172 def check_url(self, url, config):
173
173
174 # uuid function gets only valid UUID from proper repo, else
174 # uuid function gets only valid UUID from proper repo, else
175 # throws exception
175 # throws exception
176 username, password, src_url = self.get_url_and_credentials(url)
176 username, password, src_url = self.get_url_and_credentials(url)
177 try:
177 try:
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
179 except Exception:
179 except Exception:
180 tb = traceback.format_exc()
180 tb = traceback.format_exc()
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
183 return True
183 return True
184
184
185 def is_path_valid_repository(self, wire, path):
185 def is_path_valid_repository(self, wire, path):
186 # NOTE(marcink): short circuit the check for SVN repo
186 # NOTE(marcink): short circuit the check for SVN repo
187 # the repos.open might be expensive to check, but we have one cheap
187 # the repos.open might be expensive to check, but we have one cheap
188 # pre-condition that we can use, to check for 'format' file
188 # pre-condition that we can use, to check for 'format' file
189 if not os.path.isfile(os.path.join(path, 'format')):
189 if not os.path.isfile(os.path.join(path, 'format')):
190 return False
190 return False
191
191
192 cache_on, context_uid, repo_id = self._cache_on(wire)
192 cache_on, context_uid, repo_id = self._cache_on(wire)
193 region = self._region(wire)
193 region = self._region(wire)
194
194
195 @region.conditional_cache_on_arguments(condition=cache_on)
195 @region.conditional_cache_on_arguments(condition=cache_on)
196 def _assert_correct_path(_context_uid, _repo_id, fast_check):
196 def _assert_correct_path(_context_uid, _repo_id, fast_check):
197
197
198 try:
198 try:
199 svn.repos.open(path)
199 svn.repos.open(path)
200 except svn.core.SubversionException:
200 except svn.core.SubversionException:
201 tb = traceback.format_exc()
201 tb = traceback.format_exc()
202 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
202 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
203 return False
203 return False
204 return True
204 return True
205
205
206 return _assert_correct_path(context_uid, repo_id, True)
206 return _assert_correct_path(context_uid, repo_id, True)
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def verify(self, wire,):
209 def verify(self, wire,):
210 repo_path = wire['path']
210 repo_path = wire['path']
211 if not self.is_path_valid_repository(wire, repo_path):
211 if not self.is_path_valid_repository(wire, repo_path):
212 raise Exception(
212 raise Exception(
213 f"Path {repo_path} is not a valid Subversion repository.")
213 f"Path {repo_path} is not a valid Subversion repository.")
214
214
215 cmd = ['svnadmin', 'info', repo_path]
215 cmd = ['svnadmin', 'info', repo_path]
216 stdout, stderr = subprocessio.run_command(cmd)
216 stdout, stderr = subprocessio.run_command(cmd)
217 return stdout
217 return stdout
218
218
219 @reraise_safe_exceptions
219 @reraise_safe_exceptions
220 def lookup(self, wire, revision):
220 def lookup(self, wire, revision):
221 if revision not in [-1, None, 'HEAD']:
221 if revision not in [-1, None, 'HEAD']:
222 raise NotImplementedError
222 raise NotImplementedError
223 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
224 fs_ptr = svn.repos.fs(repo)
224 fs_ptr = svn.repos.fs(repo)
225 head = svn.fs.youngest_rev(fs_ptr)
225 head = svn.fs.youngest_rev(fs_ptr)
226 return head
226 return head
227
227
228 @reraise_safe_exceptions
228 @reraise_safe_exceptions
229 def lookup_interval(self, wire, start_ts, end_ts):
229 def lookup_interval(self, wire, start_ts, end_ts):
230 repo = self._factory.repo(wire)
230 repo = self._factory.repo(wire)
231 fsobj = svn.repos.fs(repo)
231 fsobj = svn.repos.fs(repo)
232 start_rev = None
232 start_rev = None
233 end_rev = None
233 end_rev = None
234 if start_ts:
234 if start_ts:
235 start_ts_svn = apr_time_t(start_ts)
235 start_ts_svn = apr_time_t(start_ts)
236 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
236 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
237 else:
237 else:
238 start_rev = 1
238 start_rev = 1
239 if end_ts:
239 if end_ts:
240 end_ts_svn = apr_time_t(end_ts)
240 end_ts_svn = apr_time_t(end_ts)
241 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
241 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
242 else:
242 else:
243 end_rev = svn.fs.youngest_rev(fsobj)
243 end_rev = svn.fs.youngest_rev(fsobj)
244 return start_rev, end_rev
244 return start_rev, end_rev
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def revision_properties(self, wire, revision):
247 def revision_properties(self, wire, revision):
248
248
249 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 region = self._region(wire)
250 region = self._region(wire)
251
251
252 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
253 def _revision_properties(_repo_id, _revision):
253 def _revision_properties(_repo_id, _revision):
254 repo = self._factory.repo(wire)
254 repo = self._factory.repo(wire)
255 fs_ptr = svn.repos.fs(repo)
255 fs_ptr = svn.repos.fs(repo)
256 return svn.fs.revision_proplist(fs_ptr, revision)
256 return svn.fs.revision_proplist(fs_ptr, revision)
257 return _revision_properties(repo_id, revision)
257 return _revision_properties(repo_id, revision)
258
258
259 def revision_changes(self, wire, revision):
259 def revision_changes(self, wire, revision):
260
260
261 repo = self._factory.repo(wire)
261 repo = self._factory.repo(wire)
262 fsobj = svn.repos.fs(repo)
262 fsobj = svn.repos.fs(repo)
263 rev_root = svn.fs.revision_root(fsobj, revision)
263 rev_root = svn.fs.revision_root(fsobj, revision)
264
264
265 editor = svn.repos.ChangeCollector(fsobj, rev_root)
265 editor = svn.repos.ChangeCollector(fsobj, rev_root)
266 editor_ptr, editor_baton = svn.delta.make_editor(editor)
266 editor_ptr, editor_baton = svn.delta.make_editor(editor)
267 base_dir = ""
267 base_dir = ""
268 send_deltas = False
268 send_deltas = False
269 svn.repos.replay2(
269 svn.repos.replay2(
270 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
270 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
271 editor_ptr, editor_baton, None)
271 editor_ptr, editor_baton, None)
272
272
273 added = []
273 added = []
274 changed = []
274 changed = []
275 removed = []
275 removed = []
276
276
277 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
277 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
278 for path, change in editor.changes.items():
278 for path, change in editor.changes.items():
279 # TODO: Decide what to do with directory nodes. Subversion can add
279 # TODO: Decide what to do with directory nodes. Subversion can add
280 # empty directories.
280 # empty directories.
281
281
282 if change.item_kind == svn.core.svn_node_dir:
282 if change.item_kind == svn.core.svn_node_dir:
283 continue
283 continue
284 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
284 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
285 added.append(path)
285 added.append(path)
286 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
286 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
287 svn.repos.CHANGE_ACTION_REPLACE]:
287 svn.repos.CHANGE_ACTION_REPLACE]:
288 changed.append(path)
288 changed.append(path)
289 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
289 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
290 removed.append(path)
290 removed.append(path)
291 else:
291 else:
292 raise NotImplementedError(
292 raise NotImplementedError(
293 "Action {} not supported on path {}".format(
293 "Action {} not supported on path {}".format(
294 change.action, path))
294 change.action, path))
295
295
296 changes = {
296 changes = {
297 'added': added,
297 'added': added,
298 'changed': changed,
298 'changed': changed,
299 'removed': removed,
299 'removed': removed,
300 }
300 }
301 return changes
301 return changes
302
302
303 @reraise_safe_exceptions
303 @reraise_safe_exceptions
304 def node_history(self, wire, path, revision, limit):
304 def node_history(self, wire, path, revision, limit):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self._region(wire)
306 region = self._region(wire)
307
307
308 @region.conditional_cache_on_arguments(condition=cache_on)
308 @region.conditional_cache_on_arguments(condition=cache_on)
309 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
309 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
310 cross_copies = False
310 cross_copies = False
311 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
312 fsobj = svn.repos.fs(repo)
312 fsobj = svn.repos.fs(repo)
313 rev_root = svn.fs.revision_root(fsobj, revision)
313 rev_root = svn.fs.revision_root(fsobj, revision)
314
314
315 history_revisions = []
315 history_revisions = []
316 history = svn.fs.node_history(rev_root, path)
316 history = svn.fs.node_history(rev_root, path)
317 history = svn.fs.history_prev(history, cross_copies)
317 history = svn.fs.history_prev(history, cross_copies)
318 while history:
318 while history:
319 __, node_revision = svn.fs.history_location(history)
319 __, node_revision = svn.fs.history_location(history)
320 history_revisions.append(node_revision)
320 history_revisions.append(node_revision)
321 if limit and len(history_revisions) >= limit:
321 if limit and len(history_revisions) >= limit:
322 break
322 break
323 history = svn.fs.history_prev(history, cross_copies)
323 history = svn.fs.history_prev(history, cross_copies)
324 return history_revisions
324 return history_revisions
325 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
325 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
326
326
327 @reraise_safe_exceptions
327 @reraise_safe_exceptions
328 def node_properties(self, wire, path, revision):
328 def node_properties(self, wire, path, revision):
329 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 region = self._region(wire)
330 region = self._region(wire)
331
331
332 @region.conditional_cache_on_arguments(condition=cache_on)
332 @region.conditional_cache_on_arguments(condition=cache_on)
333 def _node_properties(_repo_id, _path, _revision):
333 def _node_properties(_repo_id, _path, _revision):
334 repo = self._factory.repo(wire)
334 repo = self._factory.repo(wire)
335 fsobj = svn.repos.fs(repo)
335 fsobj = svn.repos.fs(repo)
336 rev_root = svn.fs.revision_root(fsobj, revision)
336 rev_root = svn.fs.revision_root(fsobj, revision)
337 return svn.fs.node_proplist(rev_root, path)
337 return svn.fs.node_proplist(rev_root, path)
338 return _node_properties(repo_id, path, revision)
338 return _node_properties(repo_id, path, revision)
339
339
340 def file_annotate(self, wire, path, revision):
340 def file_annotate(self, wire, path, revision):
341 abs_path = 'file://' + urllib.request.pathname2url(
341 abs_path = 'file://' + urllib.request.pathname2url(
342 vcspath.join(wire['path'], path))
342 vcspath.join(wire['path'], path))
343 file_uri = svn.core.svn_path_canonicalize(abs_path)
343 file_uri = svn.core.svn_path_canonicalize(abs_path)
344
344
345 start_rev = svn_opt_revision_value_t(0)
345 start_rev = svn_opt_revision_value_t(0)
346 peg_rev = svn_opt_revision_value_t(revision)
346 peg_rev = svn_opt_revision_value_t(revision)
347 end_rev = peg_rev
347 end_rev = peg_rev
348
348
349 annotations = []
349 annotations = []
350
350
351 def receiver(line_no, revision, author, date, line, pool):
351 def receiver(line_no, revision, author, date, line, pool):
352 annotations.append((line_no, revision, line))
352 annotations.append((line_no, revision, line))
353
353
354 # TODO: Cannot use blame5, missing typemap function in the swig code
354 # TODO: Cannot use blame5, missing typemap function in the swig code
355 try:
355 try:
356 svn.client.blame2(
356 svn.client.blame2(
357 file_uri, peg_rev, start_rev, end_rev,
357 file_uri, peg_rev, start_rev, end_rev,
358 receiver, svn.client.create_context())
358 receiver, svn.client.create_context())
359 except svn.core.SubversionException as exc:
359 except svn.core.SubversionException as exc:
360 log.exception("Error during blame operation.")
360 log.exception("Error during blame operation.")
361 raise Exception(
361 raise Exception(
362 f"Blame not supported or file does not exist at path {path}. "
362 f"Blame not supported or file does not exist at path {path}. "
363 f"Error {exc}.")
363 f"Error {exc}.")
364
364
365 return BinaryEnvelope(annotations)
365 return BinaryEnvelope(annotations)
366
366
367 @reraise_safe_exceptions
367 @reraise_safe_exceptions
368 def get_node_type(self, wire, revision=None, path=''):
368 def get_node_type(self, wire, revision=None, path=''):
369
369
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _get_node_type(_repo_id, _revision, _path):
374 def _get_node_type(_repo_id, _revision, _path):
375 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
376 fs_ptr = svn.repos.fs(repo)
376 fs_ptr = svn.repos.fs(repo)
377 if _revision is None:
377 if _revision is None:
378 _revision = svn.fs.youngest_rev(fs_ptr)
378 _revision = svn.fs.youngest_rev(fs_ptr)
379 root = svn.fs.revision_root(fs_ptr, _revision)
379 root = svn.fs.revision_root(fs_ptr, _revision)
380 node = svn.fs.check_path(root, path)
380 node = svn.fs.check_path(root, path)
381 return NODE_TYPE_MAPPING.get(node, None)
381 return NODE_TYPE_MAPPING.get(node, None)
382 return _get_node_type(repo_id, revision, path)
382 return _get_node_type(repo_id, revision, path)
383
383
384 @reraise_safe_exceptions
384 @reraise_safe_exceptions
385 def get_nodes(self, wire, revision=None, path=''):
385 def get_nodes(self, wire, revision=None, path=''):
386
386
387 cache_on, context_uid, repo_id = self._cache_on(wire)
387 cache_on, context_uid, repo_id = self._cache_on(wire)
388 region = self._region(wire)
388 region = self._region(wire)
389
389
390 @region.conditional_cache_on_arguments(condition=cache_on)
390 @region.conditional_cache_on_arguments(condition=cache_on)
391 def _get_nodes(_repo_id, _path, _revision):
391 def _get_nodes(_repo_id, _path, _revision):
392 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394 if _revision is None:
394 if _revision is None:
395 _revision = svn.fs.youngest_rev(fsobj)
395 _revision = svn.fs.youngest_rev(fsobj)
396 root = svn.fs.revision_root(fsobj, _revision)
396 root = svn.fs.revision_root(fsobj, _revision)
397 entries = svn.fs.dir_entries(root, path)
397 entries = svn.fs.dir_entries(root, path)
398 result = []
398 result = []
399 for entry_path, entry_info in entries.items():
399 for entry_path, entry_info in entries.items():
400 result.append(
400 result.append(
401 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
401 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
402 return result
402 return result
403 return _get_nodes(repo_id, path, revision)
403 return _get_nodes(repo_id, path, revision)
404
404
405 @reraise_safe_exceptions
405 @reraise_safe_exceptions
406 def get_file_content(self, wire, rev=None, path=''):
406 def get_file_content(self, wire, rev=None, path=''):
407 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
408 fsobj = svn.repos.fs(repo)
408 fsobj = svn.repos.fs(repo)
409
409
410 if rev is None:
410 if rev is None:
411 rev = svn.fs.youngest_rev(fsobj)
411 rev = svn.fs.youngest_rev(fsobj)
412
412
413 root = svn.fs.revision_root(fsobj, rev)
413 root = svn.fs.revision_root(fsobj, rev)
414 content = svn.core.Stream(svn.fs.file_contents(root, path))
414 content = svn.core.Stream(svn.fs.file_contents(root, path))
415 return BytesEnvelope(content.read())
415 return BytesEnvelope(content.read())
416
416
417 @reraise_safe_exceptions
417 @reraise_safe_exceptions
418 def get_file_size(self, wire, revision=None, path=''):
418 def get_file_size(self, wire, revision=None, path=''):
419
419
420 cache_on, context_uid, repo_id = self._cache_on(wire)
420 cache_on, context_uid, repo_id = self._cache_on(wire)
421 region = self._region(wire)
421 region = self._region(wire)
422
422
423 @region.conditional_cache_on_arguments(condition=cache_on)
423 @region.conditional_cache_on_arguments(condition=cache_on)
424 def _get_file_size(_repo_id, _revision, _path):
424 def _get_file_size(_repo_id, _revision, _path):
425 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
426 fsobj = svn.repos.fs(repo)
426 fsobj = svn.repos.fs(repo)
427 if _revision is None:
427 if _revision is None:
428 _revision = svn.fs.youngest_revision(fsobj)
428 _revision = svn.fs.youngest_revision(fsobj)
429 root = svn.fs.revision_root(fsobj, _revision)
429 root = svn.fs.revision_root(fsobj, _revision)
430 size = svn.fs.file_length(root, path)
430 size = svn.fs.file_length(root, path)
431 return size
431 return size
432 return _get_file_size(repo_id, revision, path)
432 return _get_file_size(repo_id, revision, path)
433
433
434 def create_repository(self, wire, compatible_version=None):
434 def create_repository(self, wire, compatible_version=None):
435 log.info('Creating Subversion repository in path "%s"', wire['path'])
435 log.info('Creating Subversion repository in path "%s"', wire['path'])
436 self._factory.repo(wire, create=True,
436 self._factory.repo(wire, create=True,
437 compatible_version=compatible_version)
437 compatible_version=compatible_version)
438
438
439 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
439 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
440 obj = urllib.parse.urlparse(src_url)
440 obj = urllib.parse.urlparse(src_url)
441 username = obj.username or ''
441 username = obj.username or ''
442 password = obj.password or ''
442 password = obj.password or ''
443 return username, password, src_url
443 return username, password, src_url
444
444
445 def import_remote_repository(self, wire, src_url):
445 def import_remote_repository(self, wire, src_url):
446 repo_path = wire['path']
446 repo_path = wire['path']
447 if not self.is_path_valid_repository(wire, repo_path):
447 if not self.is_path_valid_repository(wire, repo_path):
448 raise Exception(
448 raise Exception(
449 f"Path {repo_path} is not a valid Subversion repository.")
449 f"Path {repo_path} is not a valid Subversion repository.")
450
450
451 username, password, src_url = self.get_url_and_credentials(src_url)
451 username, password, src_url = self.get_url_and_credentials(src_url)
452 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
452 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
453 '--trust-server-cert-failures=unknown-ca']
453 '--trust-server-cert-failures=unknown-ca']
454 if username and password:
454 if username and password:
455 rdump_cmd += ['--username', username, '--password', password]
455 rdump_cmd += ['--username', username, '--password', password]
456 rdump_cmd += [src_url]
456 rdump_cmd += [src_url]
457
457
458 rdump = subprocess.Popen(
458 rdump = subprocess.Popen(
459 rdump_cmd,
459 rdump_cmd,
460 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
460 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
461 load = subprocess.Popen(
461 load = subprocess.Popen(
462 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
462 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
463
463
464 # TODO: johbo: This can be a very long operation, might be better
464 # TODO: johbo: This can be a very long operation, might be better
465 # to track some kind of status and provide an api to check if the
465 # to track some kind of status and provide an api to check if the
466 # import is done.
466 # import is done.
467 rdump.wait()
467 rdump.wait()
468 load.wait()
468 load.wait()
469
469
470 log.debug('Return process ended with code: %s', rdump.returncode)
470 log.debug('Return process ended with code: %s', rdump.returncode)
471 if rdump.returncode != 0:
471 if rdump.returncode != 0:
472 errors = rdump.stderr.read()
472 errors = rdump.stderr.read()
473 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
473 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
474
474
475 reason = 'UNKNOWN'
475 reason = 'UNKNOWN'
476 if b'svnrdump: E230001:' in errors:
476 if b'svnrdump: E230001:' in errors:
477 reason = 'INVALID_CERTIFICATE'
477 reason = 'INVALID_CERTIFICATE'
478
478
479 if reason == 'UNKNOWN':
479 if reason == 'UNKNOWN':
480 reason = f'UNKNOWN:{safe_str(errors)}'
480 reason = f'UNKNOWN:{safe_str(errors)}'
481
481
482 raise Exception(
482 raise Exception(
483 'Failed to dump the remote repository from {}. Reason:{}'.format(
483 'Failed to dump the remote repository from {}. Reason:{}'.format(
484 src_url, reason))
484 src_url, reason))
485 if load.returncode != 0:
485 if load.returncode != 0:
486 raise Exception(
486 raise Exception(
487 f'Failed to load the dump of remote repository from {src_url}.')
487 f'Failed to load the dump of remote repository from {src_url}.')
488
488
489 def commit(self, wire, message, author, timestamp, updated, removed):
489 def commit(self, wire, message, author, timestamp, updated, removed):
490
490
491 message = safe_bytes(message)
491 message = safe_bytes(message)
492 author = safe_bytes(author)
492 author = safe_bytes(author)
493
493
494 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
495 fsobj = svn.repos.fs(repo)
495 fsobj = svn.repos.fs(repo)
496
496
497 rev = svn.fs.youngest_rev(fsobj)
497 rev = svn.fs.youngest_rev(fsobj)
498 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
498 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
499 txn_root = svn.fs.txn_root(txn)
499 txn_root = svn.fs.txn_root(txn)
500
500
501 for node in updated:
501 for node in updated:
502 TxnNodeProcessor(node, txn_root).update()
502 TxnNodeProcessor(node, txn_root).update()
503 for node in removed:
503 for node in removed:
504 TxnNodeProcessor(node, txn_root).remove()
504 TxnNodeProcessor(node, txn_root).remove()
505
505
506 svn_txn_id = safe_str(svn.fs.svn_fs_txn_name(txn))
506 svn_txn_id = safe_str(svn.fs.svn_fs_txn_name(txn))
507 full_repo_path = wire['path']
507 full_repo_path = wire['path']
508 txn_id_data = {'svn_txn_id': svn_txn_id, 'rc_internal_commit': True}
508 txn_id_data = {'svn_txn_id': svn_txn_id, 'rc_internal_commit': True}
509
509
510 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
510 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
511 commit_id = svn.repos.fs_commit_txn(repo, txn)
511 commit_id = svn.repos.fs_commit_txn(repo, txn)
512
512
513 if timestamp:
513 if timestamp:
514 apr_time = apr_time_t(timestamp)
514 apr_time = apr_time_t(timestamp)
515 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
515 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
516 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
516 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
517
517
518 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
518 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
519 return commit_id
519 return commit_id
520
520
521 @reraise_safe_exceptions
521 @reraise_safe_exceptions
522 def diff(self, wire, rev1, rev2, path1=None, path2=None,
522 def diff(self, wire, rev1, rev2, path1=None, path2=None,
523 ignore_whitespace=False, context=3):
523 ignore_whitespace=False, context=3):
524
524
525 wire.update(cache=False)
525 wire.update(cache=False)
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 diff_creator = SvnDiffer(
527 diff_creator = SvnDiffer(
528 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
528 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
529 try:
529 try:
530 return BytesEnvelope(diff_creator.generate_diff())
530 return BytesEnvelope(diff_creator.generate_diff())
531 except svn.core.SubversionException as e:
531 except svn.core.SubversionException as e:
532 log.exception(
532 log.exception(
533 "Error during diff operation operation. "
533 "Error during diff operation operation. "
534 "Path might not exist %s, %s", path1, path2)
534 "Path might not exist %s, %s", path1, path2)
535 return BytesEnvelope(b'')
535 return BytesEnvelope(b'')
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def is_large_file(self, wire, path):
538 def is_large_file(self, wire, path):
539 return False
539 return False
540
540
541 @reraise_safe_exceptions
541 @reraise_safe_exceptions
542 def is_binary(self, wire, rev, path):
542 def is_binary(self, wire, rev, path):
543 cache_on, context_uid, repo_id = self._cache_on(wire)
543 cache_on, context_uid, repo_id = self._cache_on(wire)
544 region = self._region(wire)
544 region = self._region(wire)
545
545
546 @region.conditional_cache_on_arguments(condition=cache_on)
546 @region.conditional_cache_on_arguments(condition=cache_on)
547 def _is_binary(_repo_id, _rev, _path):
547 def _is_binary(_repo_id, _rev, _path):
548 raw_bytes = self.get_file_content(wire, rev, path)
548 raw_bytes = self.get_file_content(wire, rev, path)
549 if not raw_bytes:
549 if not raw_bytes:
550 return False
550 return False
551 return b'\0' in raw_bytes
551 return b'\0' in raw_bytes
552
552
553 return _is_binary(repo_id, rev, path)
553 return _is_binary(repo_id, rev, path)
554
554
555 @reraise_safe_exceptions
555 @reraise_safe_exceptions
556 def md5_hash(self, wire, rev, path):
556 def md5_hash(self, wire, rev, path):
557 cache_on, context_uid, repo_id = self._cache_on(wire)
557 cache_on, context_uid, repo_id = self._cache_on(wire)
558 region = self._region(wire)
558 region = self._region(wire)
559
559
560 @region.conditional_cache_on_arguments(condition=cache_on)
560 @region.conditional_cache_on_arguments(condition=cache_on)
561 def _md5_hash(_repo_id, _rev, _path):
561 def _md5_hash(_repo_id, _rev, _path):
562 return ''
562 return ''
563
563
564 return _md5_hash(repo_id, rev, path)
564 return _md5_hash(repo_id, rev, path)
565
565
566 @reraise_safe_exceptions
566 @reraise_safe_exceptions
567 def run_svn_command(self, wire, cmd, **opts):
567 def run_svn_command(self, wire, cmd, **opts):
568 path = wire.get('path', None)
568 path = wire.get('path', None)
569 debug_mode = vcsserver.ConfigGet().get_bool('debug')
569 debug_mode = vcsserver.ConfigGet().get_bool('debug')
570
570
571 if path and os.path.isdir(path):
571 if path and os.path.isdir(path):
572 opts['cwd'] = path
572 opts['cwd'] = path
573
573
574 safe_call = opts.pop('_safe', False)
574 safe_call = opts.pop('_safe', False)
575
575
576 svnenv = os.environ.copy()
576 svnenv = os.environ.copy()
577 svnenv.update(opts.pop('extra_env', {}))
577 svnenv.update(opts.pop('extra_env', {}))
578
578
579 _opts = {'env': svnenv, 'shell': False}
579 _opts = {'env': svnenv, 'shell': False}
580
580
581 try:
581 try:
582 _opts.update(opts)
582 _opts.update(opts)
583 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
583 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
584
584
585 return b''.join(proc), b''.join(proc.stderr)
585 return b''.join(proc), b''.join(proc.stderr)
586 except OSError as err:
586 except OSError as err:
587 if safe_call:
587 if safe_call:
588 return '', safe_str(err).strip()
588 return '', safe_str(err).strip()
589 else:
589 else:
590 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
590 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
591 call_opts = {}
591 call_opts = {}
592 if debug_mode:
592 if debug_mode:
593 call_opts = _opts
593 call_opts = _opts
594
594
595 tb_err = ("Couldn't run svn command ({}).\n"
595 tb_err = ("Couldn't run svn command ({}).\n"
596 "Original error was:{}\n"
596 "Original error was:{}\n"
597 "Call options:{}\n"
597 "Call options:{}\n"
598 .format(cmd, err, call_opts))
598 .format(cmd, err, call_opts))
599 log.exception(tb_err)
599 log.exception(tb_err)
600 raise exceptions.VcsException()(tb_err)
600 raise exceptions.VcsException()(tb_err)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def install_hooks(self, wire, force=False):
603 def install_hooks(self, wire, force=False):
604 from vcsserver.hook_utils import install_svn_hooks
604 from vcsserver.hook_utils import install_svn_hooks
605 repo_path = wire['path']
605 repo_path = wire['path']
606 binary_dir = settings.BINARY_DIR
606 binary_dir = settings.BINARY_DIR
607 executable = None
607 executable = None
608 if binary_dir:
608 if binary_dir:
609 executable = os.path.join(binary_dir, 'python3')
609 executable = os.path.join(binary_dir, 'python3')
610 return install_svn_hooks(repo_path, force_create=force)
610 return install_svn_hooks(repo_path, force_create=force)
611
611
612 @reraise_safe_exceptions
612 @reraise_safe_exceptions
613 def get_hooks_info(self, wire):
613 def get_hooks_info(self, wire):
614 from vcsserver.hook_utils import (
614 from vcsserver.hook_utils import (
615 get_svn_pre_hook_version, get_svn_post_hook_version)
615 get_svn_pre_hook_version, get_svn_post_hook_version)
616 repo_path = wire['path']
616 repo_path = wire['path']
617 return {
617 return {
618 'pre_version': get_svn_pre_hook_version(repo_path),
618 'pre_version': get_svn_pre_hook_version(repo_path),
619 'post_version': get_svn_post_hook_version(repo_path),
619 'post_version': get_svn_post_hook_version(repo_path),
620 }
620 }
621
621
622 @reraise_safe_exceptions
622 @reraise_safe_exceptions
623 def set_head_ref(self, wire, head_name):
623 def set_head_ref(self, wire, head_name):
624 pass
624 pass
625
625
626 @reraise_safe_exceptions
626 @reraise_safe_exceptions
627 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
627 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
628 archive_dir_name, commit_id, cache_config):
628 archive_dir_name, commit_id, cache_config):
629
629
630 def walk_tree(root, root_dir, _commit_id):
630 def walk_tree(root, root_dir, _commit_id):
631 """
631 """
632 Special recursive svn repo walker
632 Special recursive svn repo walker
633 """
633 """
634 root_dir = safe_bytes(root_dir)
634 root_dir = safe_bytes(root_dir)
635
635
636 filemode_default = 0o100644
636 filemode_default = 0o100644
637 filemode_executable = 0o100755
637 filemode_executable = 0o100755
638
638
639 file_iter = svn.fs.dir_entries(root, root_dir)
639 file_iter = svn.fs.dir_entries(root, root_dir)
640 for f_name in file_iter:
640 for f_name in file_iter:
641 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
641 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
642
642
643 if f_type == 'dir':
643 if f_type == 'dir':
644 # return only DIR, and then all entries in that dir
644 # return only DIR, and then all entries in that dir
645 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
645 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
646 new_root = os.path.join(root_dir, f_name)
646 new_root = os.path.join(root_dir, f_name)
647 yield from walk_tree(root, new_root, _commit_id)
647 yield from walk_tree(root, new_root, _commit_id)
648 else:
648 else:
649
649
650 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
650 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
651 prop_list = svn.fs.node_proplist(root, f_path)
651 prop_list = svn.fs.node_proplist(root, f_path)
652
652
653 f_mode = filemode_default
653 f_mode = filemode_default
654 if prop_list.get('svn:executable'):
654 if prop_list.get('svn:executable'):
655 f_mode = filemode_executable
655 f_mode = filemode_executable
656
656
657 f_is_link = False
657 f_is_link = False
658 if prop_list.get('svn:special'):
658 if prop_list.get('svn:special'):
659 f_is_link = True
659 f_is_link = True
660
660
661 data = {
661 data = {
662 'is_link': f_is_link,
662 'is_link': f_is_link,
663 'mode': f_mode,
663 'mode': f_mode,
664 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
664 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
665 }
665 }
666
666
667 yield f_path, data, f_type
667 yield f_path, data, f_type
668
668
669 def file_walker(_commit_id, path):
669 def file_walker(_commit_id, path):
670 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
671 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
671 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
672
672
673 def no_content():
673 def no_content():
674 raise NoContentException()
674 raise NoContentException()
675
675
676 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
676 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
677 file_path = f_name
677 file_path = f_name
678
678
679 if f_type == 'dir':
679 if f_type == 'dir':
680 mode = f_data['mode']
680 mode = f_data['mode']
681 yield ArchiveNode(file_path, mode, False, no_content)
681 yield ArchiveNode(file_path, mode, False, no_content)
682 else:
682 else:
683 mode = f_data['mode']
683 mode = f_data['mode']
684 is_link = f_data['is_link']
684 is_link = f_data['is_link']
685 data_stream = f_data['content_stream']
685 data_stream = f_data['content_stream']
686 yield ArchiveNode(file_path, mode, is_link, data_stream)
686 yield ArchiveNode(file_path, mode, is_link, data_stream)
687
687
688 return store_archive_in_cache(
688 return store_archive_in_cache(
689 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
689 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
690
690
691
691
692 class SvnDiffer:
692 class SvnDiffer:
693 """
693 """
694 Utility to create diffs based on difflib and the Subversion api
694 Utility to create diffs based on difflib and the Subversion api
695 """
695 """
696
696
697 binary_content = False
697 binary_content = False
698
698
699 def __init__(
699 def __init__(
700 self, repo, src_rev, src_path, tgt_rev, tgt_path,
700 self, repo, src_rev, src_path, tgt_rev, tgt_path,
701 ignore_whitespace, context):
701 ignore_whitespace, context):
702 self.repo = repo
702 self.repo = repo
703 self.ignore_whitespace = ignore_whitespace
703 self.ignore_whitespace = ignore_whitespace
704 self.context = context
704 self.context = context
705
705
706 fsobj = svn.repos.fs(repo)
706 fsobj = svn.repos.fs(repo)
707
707
708 self.tgt_rev = tgt_rev
708 self.tgt_rev = tgt_rev
709 self.tgt_path = tgt_path or ''
709 self.tgt_path = tgt_path or ''
710 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
710 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
711 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
711 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
712
712
713 self.src_rev = src_rev
713 self.src_rev = src_rev
714 self.src_path = src_path or self.tgt_path
714 self.src_path = src_path or self.tgt_path
715 self.src_root = svn.fs.revision_root(fsobj, src_rev)
715 self.src_root = svn.fs.revision_root(fsobj, src_rev)
716 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
716 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
717
717
718 self._validate()
718 self._validate()
719
719
720 def _validate(self):
720 def _validate(self):
721 if (self.tgt_kind != svn.core.svn_node_none and
721 if (self.tgt_kind != svn.core.svn_node_none and
722 self.src_kind != svn.core.svn_node_none and
722 self.src_kind != svn.core.svn_node_none and
723 self.src_kind != self.tgt_kind):
723 self.src_kind != self.tgt_kind):
724 # TODO: johbo: proper error handling
724 # TODO: johbo: proper error handling
725 raise Exception(
725 raise Exception(
726 "Source and target are not compatible for diff generation. "
726 "Source and target are not compatible for diff generation. "
727 "Source type: %s, target type: %s" %
727 "Source type: %s, target type: %s" %
728 (self.src_kind, self.tgt_kind))
728 (self.src_kind, self.tgt_kind))
729
729
730 def generate_diff(self) -> bytes:
730 def generate_diff(self) -> bytes:
731 buf = io.BytesIO()
731 buf = io.BytesIO()
732 if self.tgt_kind == svn.core.svn_node_dir:
732 if self.tgt_kind == svn.core.svn_node_dir:
733 self._generate_dir_diff(buf)
733 self._generate_dir_diff(buf)
734 else:
734 else:
735 self._generate_file_diff(buf)
735 self._generate_file_diff(buf)
736 return buf.getvalue()
736 return buf.getvalue()
737
737
738 def _generate_dir_diff(self, buf: io.BytesIO):
738 def _generate_dir_diff(self, buf: io.BytesIO):
739 editor = DiffChangeEditor()
739 editor = DiffChangeEditor()
740 editor_ptr, editor_baton = svn.delta.make_editor(editor)
740 editor_ptr, editor_baton = svn.delta.make_editor(editor)
741 svn.repos.dir_delta2(
741 svn.repos.dir_delta2(
742 self.src_root,
742 self.src_root,
743 self.src_path,
743 self.src_path,
744 '', # src_entry
744 '', # src_entry
745 self.tgt_root,
745 self.tgt_root,
746 self.tgt_path,
746 self.tgt_path,
747 editor_ptr, editor_baton,
747 editor_ptr, editor_baton,
748 authorization_callback_allow_all,
748 authorization_callback_allow_all,
749 False, # text_deltas
749 False, # text_deltas
750 svn.core.svn_depth_infinity, # depth
750 svn.core.svn_depth_infinity, # depth
751 False, # entry_props
751 False, # entry_props
752 False, # ignore_ancestry
752 False, # ignore_ancestry
753 )
753 )
754
754
755 for path, __, change in sorted(editor.changes):
755 for path, __, change in sorted(editor.changes):
756 self._generate_node_diff(
756 self._generate_node_diff(
757 buf, change, path, self.tgt_path, path, self.src_path)
757 buf, change, path, self.tgt_path, path, self.src_path)
758
758
759 def _generate_file_diff(self, buf: io.BytesIO):
759 def _generate_file_diff(self, buf: io.BytesIO):
760 change = None
760 change = None
761 if self.src_kind == svn.core.svn_node_none:
761 if self.src_kind == svn.core.svn_node_none:
762 change = "add"
762 change = "add"
763 elif self.tgt_kind == svn.core.svn_node_none:
763 elif self.tgt_kind == svn.core.svn_node_none:
764 change = "delete"
764 change = "delete"
765 tgt_base, tgt_path = vcspath.split(self.tgt_path)
765 tgt_base, tgt_path = vcspath.split(self.tgt_path)
766 src_base, src_path = vcspath.split(self.src_path)
766 src_base, src_path = vcspath.split(self.src_path)
767 self._generate_node_diff(
767 self._generate_node_diff(
768 buf, change, tgt_path, tgt_base, src_path, src_base)
768 buf, change, tgt_path, tgt_base, src_path, src_base)
769
769
770 def _generate_node_diff(
770 def _generate_node_diff(
771 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
771 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
772
772
773 tgt_path_bytes = safe_bytes(tgt_path)
773 tgt_path_bytes = safe_bytes(tgt_path)
774 tgt_path = safe_str(tgt_path)
774 tgt_path = safe_str(tgt_path)
775
775
776 src_path_bytes = safe_bytes(src_path)
776 src_path_bytes = safe_bytes(src_path)
777 src_path = safe_str(src_path)
777 src_path = safe_str(src_path)
778
778
779 if self.src_rev == self.tgt_rev and tgt_base == src_base:
779 if self.src_rev == self.tgt_rev and tgt_base == src_base:
780 # makes consistent behaviour with git/hg to return empty diff if
780 # makes consistent behaviour with git/hg to return empty diff if
781 # we compare same revisions
781 # we compare same revisions
782 return
782 return
783
783
784 tgt_full_path = vcspath.join(tgt_base, tgt_path)
784 tgt_full_path = vcspath.join(tgt_base, tgt_path)
785 src_full_path = vcspath.join(src_base, src_path)
785 src_full_path = vcspath.join(src_base, src_path)
786
786
787 self.binary_content = False
787 self.binary_content = False
788 mime_type = self._get_mime_type(tgt_full_path)
788 mime_type = self._get_mime_type(tgt_full_path)
789
789
790 if mime_type and not mime_type.startswith(b'text'):
790 if mime_type and not mime_type.startswith(b'text'):
791 self.binary_content = True
791 self.binary_content = True
792 buf.write(b"=" * 67 + b'\n')
792 buf.write(b"=" * 67 + b'\n')
793 buf.write(b"Cannot display: file marked as a binary type.\n")
793 buf.write(b"Cannot display: file marked as a binary type.\n")
794 buf.write(b"svn:mime-type = %s\n" % mime_type)
794 buf.write(b"svn:mime-type = %s\n" % mime_type)
795 buf.write(b"Index: %b\n" % tgt_path_bytes)
795 buf.write(b"Index: %b\n" % tgt_path_bytes)
796 buf.write(b"=" * 67 + b'\n')
796 buf.write(b"=" * 67 + b'\n')
797 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
797 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
798
798
799 if change == 'add':
799 if change == 'add':
800 # TODO: johbo: SVN is missing a zero here compared to git
800 # TODO: johbo: SVN is missing a zero here compared to git
801 buf.write(b"new file mode 10644\n")
801 buf.write(b"new file mode 10644\n")
802
802
803 # TODO(marcink): intro to binary detection of svn patches
803 # TODO(marcink): intro to binary detection of svn patches
804 # if self.binary_content:
804 # if self.binary_content:
805 # buf.write(b'GIT binary patch\n')
805 # buf.write(b'GIT binary patch\n')
806
806
807 buf.write(b"--- /dev/null\t(revision 0)\n")
807 buf.write(b"--- /dev/null\t(revision 0)\n")
808 src_lines = []
808 src_lines = []
809 else:
809 else:
810 if change == 'delete':
810 if change == 'delete':
811 buf.write(b"deleted file mode 10644\n")
811 buf.write(b"deleted file mode 10644\n")
812
812
813 # TODO(marcink): intro to binary detection of svn patches
813 # TODO(marcink): intro to binary detection of svn patches
814 # if self.binary_content:
814 # if self.binary_content:
815 # buf.write('GIT binary patch\n')
815 # buf.write('GIT binary patch\n')
816
816
817 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
817 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
818 src_lines = self._svn_readlines(self.src_root, src_full_path)
818 src_lines = self._svn_readlines(self.src_root, src_full_path)
819
819
820 if change == 'delete':
820 if change == 'delete':
821 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
821 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
822 tgt_lines = []
822 tgt_lines = []
823 else:
823 else:
824 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
824 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
825 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
825 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
826
826
827 # we made our diff header, time to generate the diff content into our buffer
827 # we made our diff header, time to generate the diff content into our buffer
828
828
829 if not self.binary_content:
829 if not self.binary_content:
830 udiff = svn_diff.unified_diff(
830 udiff = svn_diff.unified_diff(
831 src_lines, tgt_lines, context=self.context,
831 src_lines, tgt_lines, context=self.context,
832 ignore_blank_lines=self.ignore_whitespace,
832 ignore_blank_lines=self.ignore_whitespace,
833 ignore_case=False,
833 ignore_case=False,
834 ignore_space_changes=self.ignore_whitespace)
834 ignore_space_changes=self.ignore_whitespace)
835
835
836 buf.writelines(udiff)
836 buf.writelines(udiff)
837
837
838 def _get_mime_type(self, path) -> bytes:
838 def _get_mime_type(self, path) -> bytes:
839 try:
839 try:
840 mime_type = svn.fs.node_prop(
840 mime_type = svn.fs.node_prop(
841 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
841 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
842 except svn.core.SubversionException:
842 except svn.core.SubversionException:
843 mime_type = svn.fs.node_prop(
843 mime_type = svn.fs.node_prop(
844 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
844 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
845 return mime_type
845 return mime_type
846
846
847 def _svn_readlines(self, fs_root, node_path):
847 def _svn_readlines(self, fs_root, node_path):
848 if self.binary_content:
848 if self.binary_content:
849 return []
849 return []
850 node_kind = svn.fs.check_path(fs_root, node_path)
850 node_kind = svn.fs.check_path(fs_root, node_path)
851 if node_kind not in (
851 if node_kind not in (
852 svn.core.svn_node_file, svn.core.svn_node_symlink):
852 svn.core.svn_node_file, svn.core.svn_node_symlink):
853 return []
853 return []
854 content = svn.core.Stream(
854 content = svn.core.Stream(
855 svn.fs.file_contents(fs_root, node_path)).read()
855 svn.fs.file_contents(fs_root, node_path)).read()
856
856
857 return content.splitlines(True)
857 return content.splitlines(True)
858
858
859
859
860 class DiffChangeEditor(svn.delta.Editor):
860 class DiffChangeEditor(svn.delta.Editor):
861 """
861 """
862 Records changes between two given revisions
862 Records changes between two given revisions
863 """
863 """
864
864
865 def __init__(self):
865 def __init__(self):
866 self.changes = []
866 self.changes = []
867
867
868 def delete_entry(self, path, revision, parent_baton, pool=None):
868 def delete_entry(self, path, revision, parent_baton, pool=None):
869 self.changes.append((path, None, 'delete'))
869 self.changes.append((path, None, 'delete'))
870
870
871 def add_file(
871 def add_file(
872 self, path, parent_baton, copyfrom_path, copyfrom_revision,
872 self, path, parent_baton, copyfrom_path, copyfrom_revision,
873 file_pool=None):
873 file_pool=None):
874 self.changes.append((path, 'file', 'add'))
874 self.changes.append((path, 'file', 'add'))
875
875
876 def open_file(self, path, parent_baton, base_revision, file_pool=None):
876 def open_file(self, path, parent_baton, base_revision, file_pool=None):
877 self.changes.append((path, 'file', 'change'))
877 self.changes.append((path, 'file', 'change'))
878
878
879
879
880 def authorization_callback_allow_all(root, path, pool):
880 def authorization_callback_allow_all(root, path, pool):
881 return True
881 return True
882
882
883
883
884 class TxnNodeProcessor:
884 class TxnNodeProcessor:
885 """
885 """
886 Utility to process the change of one node within a transaction root.
886 Utility to process the change of one node within a transaction root.
887
887
888 It encapsulates the knowledge of how to add, update or remove
888 It encapsulates the knowledge of how to add, update or remove
889 a node for a given transaction root. The purpose is to support the method
889 a node for a given transaction root. The purpose is to support the method
890 `SvnRemote.commit`.
890 `SvnRemote.commit`.
891 """
891 """
892
892
893 def __init__(self, node, txn_root):
893 def __init__(self, node, txn_root):
894 assert_bytes(node['path'])
894 assert_bytes(node['path'])
895
895
896 self.node = node
896 self.node = node
897 self.txn_root = txn_root
897 self.txn_root = txn_root
898
898
899 def update(self):
899 def update(self):
900 self._ensure_parent_dirs()
900 self._ensure_parent_dirs()
901 self._add_file_if_node_does_not_exist()
901 self._add_file_if_node_does_not_exist()
902 self._update_file_content()
902 self._update_file_content()
903 self._update_file_properties()
903 self._update_file_properties()
904
904
905 def remove(self):
905 def remove(self):
906 svn.fs.delete(self.txn_root, self.node['path'])
906 svn.fs.delete(self.txn_root, self.node['path'])
907 # TODO: Clean up directory if empty
907 # TODO: Clean up directory if empty
908
908
909 def _ensure_parent_dirs(self):
909 def _ensure_parent_dirs(self):
910 curdir = vcspath.dirname(self.node['path'])
910 curdir = vcspath.dirname(self.node['path'])
911 dirs_to_create = []
911 dirs_to_create = []
912 while not self._svn_path_exists(curdir):
912 while not self._svn_path_exists(curdir):
913 dirs_to_create.append(curdir)
913 dirs_to_create.append(curdir)
914 curdir = vcspath.dirname(curdir)
914 curdir = vcspath.dirname(curdir)
915
915
916 for curdir in reversed(dirs_to_create):
916 for curdir in reversed(dirs_to_create):
917 log.debug('Creating missing directory "%s"', curdir)
917 log.debug('Creating missing directory "%s"', curdir)
918 svn.fs.make_dir(self.txn_root, curdir)
918 svn.fs.make_dir(self.txn_root, curdir)
919
919
920 def _svn_path_exists(self, path):
920 def _svn_path_exists(self, path):
921 path_status = svn.fs.check_path(self.txn_root, path)
921 path_status = svn.fs.check_path(self.txn_root, path)
922 return path_status != svn.core.svn_node_none
922 return path_status != svn.core.svn_node_none
923
923
924 def _add_file_if_node_does_not_exist(self):
924 def _add_file_if_node_does_not_exist(self):
925 kind = svn.fs.check_path(self.txn_root, self.node['path'])
925 kind = svn.fs.check_path(self.txn_root, self.node['path'])
926 if kind == svn.core.svn_node_none:
926 if kind == svn.core.svn_node_none:
927 svn.fs.make_file(self.txn_root, self.node['path'])
927 svn.fs.make_file(self.txn_root, self.node['path'])
928
928
929 def _update_file_content(self):
929 def _update_file_content(self):
930 assert_bytes(self.node['content'])
930 assert_bytes(self.node['content'])
931
931
932 handler, baton = svn.fs.apply_textdelta(
932 handler, baton = svn.fs.apply_textdelta(
933 self.txn_root, self.node['path'], None, None)
933 self.txn_root, self.node['path'], None, None)
934 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
934 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
935
935
936 def _update_file_properties(self):
936 def _update_file_properties(self):
937 properties = self.node.get('properties', {})
937 properties = self.node.get('properties', {})
938 for key, value in properties.items():
938 for key, value in properties.items():
939 svn.fs.change_node_prop(
939 svn.fs.change_node_prop(
940 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
940 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
941
941
942
942
943 def apr_time_t(timestamp):
943 def apr_time_t(timestamp):
944 """
944 """
945 Convert a Python timestamp into APR timestamp type apr_time_t
945 Convert a Python timestamp into APR timestamp type apr_time_t
946 """
946 """
947 return int(timestamp * 1E6)
947 return int(timestamp * 1E6)
948
948
949
949
950 def svn_opt_revision_value_t(num):
950 def svn_opt_revision_value_t(num):
951 """
951 """
952 Put `num` into a `svn_opt_revision_value_t` structure.
952 Put `num` into a `svn_opt_revision_value_t` structure.
953 """
953 """
954 value = svn.core.svn_opt_revision_value_t()
954 value = svn.core.svn_opt_revision_value_t()
955 value.number = num
955 value.number = num
956 revision = svn.core.svn_opt_revision_t()
956 revision = svn.core.svn_opt_revision_t()
957 revision.kind = svn.core.svn_opt_revision_number
957 revision.kind = svn.core.svn_opt_revision_number
958 revision.value = value
958 revision.value = value
959 return revision
959 return revision
@@ -1,34 +1,34 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver import scm_app, wsgi_app_caller
18 from vcsserver import scm_app, wsgi_app_caller
19
19
20
20
21 class GitRemoteWsgi:
21 class GitRemoteWsgi:
22 def handle(self, environ, input_data, *args, **kwargs):
22 def handle(self, environ, input_data, *args, **kwargs):
23 app = wsgi_app_caller.WSGIAppCaller(
23 app = wsgi_app_caller.WSGIAppCaller(
24 scm_app.create_git_wsgi_app(*args, **kwargs))
24 scm_app.create_git_wsgi_app(*args, **kwargs))
25
25
26 return app.handle(environ, input_data)
26 return app.handle(environ, input_data)
27
27
28
28
29 class HgRemoteWsgi:
29 class HgRemoteWsgi:
30 def handle(self, environ, input_data, *args, **kwargs):
30 def handle(self, environ, input_data, *args, **kwargs):
31 app = wsgi_app_caller.WSGIAppCaller(
31 app = wsgi_app_caller.WSGIAppCaller(
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33
33
34 return app.handle(environ, input_data)
34 return app.handle(environ, input_data)
@@ -1,258 +1,258 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.wireprotoserver
25 import mercurial.hgweb.common
25 import mercurial.hgweb.common
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 from vcsserver.lib.str_utils import ascii_bytes, safe_bytes
30 from vcsserver.lib.str_utils import ascii_bytes, safe_bytes
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 from mercurial.hgweb import request as requestmod
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
79 gen = self.run_wsgi(req, res)
80
80
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = next(gen)
84 data = next(gen)
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
88 except StopIteration:
88 except StopIteration:
89 pass
89 pass
90
90
91 if first_chunk:
91 if first_chunk:
92 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
93 return gen
93 return gen
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get(b'cmd', '')
97 cmd = req.qsparams.get(b'cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 log.warning('cmd: `%s` is not supported by the mercurial wireprotocol v1', cmd)
101 log.warning('cmd: `%s` is not supported by the mercurial wireprotocol v1', cmd)
102 from mercurial.hgweb.common import statusmessage
102 from mercurial.hgweb.common import statusmessage
103 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
104 res.setbodybytes(b'')
104 res.setbodybytes(b'')
105 return res.sendresponse()
105 return res.sendresponse()
106
106
107 return super()._runwsgi(req, res, repo)
107 return super()._runwsgi(req, res, repo)
108
108
109
109
110 def sanitize_hg_ui(baseui):
110 def sanitize_hg_ui(baseui):
111 # NOTE(marcink): since python3 hgsubversion is deprecated.
111 # NOTE(marcink): since python3 hgsubversion is deprecated.
112 # From old installations we might still have this set enabled
112 # From old installations we might still have this set enabled
113 # we explicitly remove this now here to make sure it wont propagate further
113 # we explicitly remove this now here to make sure it wont propagate further
114
114
115 if baseui.config(b'extensions', b'hgsubversion') is not None:
115 if baseui.config(b'extensions', b'hgsubversion') is not None:
116 for cfg in (baseui._ocfg, baseui._tcfg, baseui._ucfg):
116 for cfg in (baseui._ocfg, baseui._tcfg, baseui._ucfg):
117 if b'extensions' in cfg:
117 if b'extensions' in cfg:
118 if b'hgsubversion' in cfg[b'extensions']:
118 if b'hgsubversion' in cfg[b'extensions']:
119 del cfg[b'extensions'][b'hgsubversion']
119 del cfg[b'extensions'][b'hgsubversion']
120
120
121
121
122 def make_hg_ui_from_config(repo_config):
122 def make_hg_ui_from_config(repo_config):
123 baseui = mercurial.ui.ui()
123 baseui = mercurial.ui.ui()
124
124
125 # clean the baseui object
125 # clean the baseui object
126 baseui._ocfg = mercurial.config.config()
126 baseui._ocfg = mercurial.config.config()
127 baseui._ucfg = mercurial.config.config()
127 baseui._ucfg = mercurial.config.config()
128 baseui._tcfg = mercurial.config.config()
128 baseui._tcfg = mercurial.config.config()
129
129
130 for section, option, value in repo_config:
130 for section, option, value in repo_config:
131 baseui.setconfig(
131 baseui.setconfig(
132 ascii_bytes(section, allow_bytes=True),
132 ascii_bytes(section, allow_bytes=True),
133 ascii_bytes(option, allow_bytes=True),
133 ascii_bytes(option, allow_bytes=True),
134 ascii_bytes(value, allow_bytes=True))
134 ascii_bytes(value, allow_bytes=True))
135
135
136 # make our hgweb quiet so it doesn't print output
136 # make our hgweb quiet so it doesn't print output
137 baseui.setconfig(b'ui', b'quiet', b'true')
137 baseui.setconfig(b'ui', b'quiet', b'true')
138
138
139 # use POST requests with args instead of GET with headers - fixes issues with big repos with lots of branches
139 # use POST requests with args instead of GET with headers - fixes issues with big repos with lots of branches
140 baseui.setconfig(b'experimental', b'httppostargs', b'false')
140 baseui.setconfig(b'experimental', b'httppostargs', b'false')
141
141
142 return baseui
142 return baseui
143
143
144
144
145 def update_hg_ui_from_hgrc(baseui, repo_path):
145 def update_hg_ui_from_hgrc(baseui, repo_path):
146 path = os.path.join(repo_path, '.hg', 'hgrc')
146 path = os.path.join(repo_path, '.hg', 'hgrc')
147
147
148 if not os.path.isfile(path):
148 if not os.path.isfile(path):
149 log.debug('hgrc file is not present at %s, skipping...', path)
149 log.debug('hgrc file is not present at %s, skipping...', path)
150 return
150 return
151 log.debug('reading hgrc from %s', path)
151 log.debug('reading hgrc from %s', path)
152 cfg = mercurial.config.config()
152 cfg = mercurial.config.config()
153 cfg.read(ascii_bytes(path))
153 cfg.read(ascii_bytes(path))
154 for section in HG_UI_SECTIONS:
154 for section in HG_UI_SECTIONS:
155 for k, v in cfg.items(section):
155 for k, v in cfg.items(section):
156 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
156 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
157 baseui.setconfig(
157 baseui.setconfig(
158 ascii_bytes(section, allow_bytes=True),
158 ascii_bytes(section, allow_bytes=True),
159 ascii_bytes(k, allow_bytes=True),
159 ascii_bytes(k, allow_bytes=True),
160 ascii_bytes(v, allow_bytes=True))
160 ascii_bytes(v, allow_bytes=True))
161
161
162
162
163 def create_hg_wsgi_app(repo_path, repo_name, config):
163 def create_hg_wsgi_app(repo_path, repo_name, config):
164 """
164 """
165 Prepares a WSGI application to handle Mercurial requests.
165 Prepares a WSGI application to handle Mercurial requests.
166
166
167 :param config: is a list of 3-item tuples representing a ConfigObject
167 :param config: is a list of 3-item tuples representing a ConfigObject
168 (it is the serialized version of the config object).
168 (it is the serialized version of the config object).
169 """
169 """
170 log.debug("Creating Mercurial WSGI application")
170 log.debug("Creating Mercurial WSGI application")
171
171
172 baseui = make_hg_ui_from_config(config)
172 baseui = make_hg_ui_from_config(config)
173 update_hg_ui_from_hgrc(baseui, repo_path)
173 update_hg_ui_from_hgrc(baseui, repo_path)
174 sanitize_hg_ui(baseui)
174 sanitize_hg_ui(baseui)
175
175
176 try:
176 try:
177 return HgWeb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
177 return HgWeb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
178 except mercurial.error.RequirementError as e:
178 except mercurial.error.RequirementError as e:
179 raise exceptions.RequirementException(e)(e)
179 raise exceptions.RequirementException(e)(e)
180
180
181
181
182 class GitHandler:
182 class GitHandler:
183 """
183 """
184 Handler for Git operations like push/pull etc
184 Handler for Git operations like push/pull etc
185 """
185 """
186 def __init__(self, repo_location, repo_name, git_path, update_server_info,
186 def __init__(self, repo_location, repo_name, git_path, update_server_info,
187 extras):
187 extras):
188 if not os.path.isdir(repo_location):
188 if not os.path.isdir(repo_location):
189 raise OSError(repo_location)
189 raise OSError(repo_location)
190 self.content_path = repo_location
190 self.content_path = repo_location
191 self.repo_name = repo_name
191 self.repo_name = repo_name
192 self.repo_location = repo_location
192 self.repo_location = repo_location
193 self.extras = extras
193 self.extras = extras
194 self.git_path = git_path
194 self.git_path = git_path
195 self.update_server_info = update_server_info
195 self.update_server_info = update_server_info
196
196
197 def __call__(self, environ, start_response):
197 def __call__(self, environ, start_response):
198 app = webob.exc.HTTPNotFound()
198 app = webob.exc.HTTPNotFound()
199 candidate_paths = (
199 candidate_paths = (
200 self.content_path, os.path.join(self.content_path, '.git'))
200 self.content_path, os.path.join(self.content_path, '.git'))
201
201
202 for content_path in candidate_paths:
202 for content_path in candidate_paths:
203 try:
203 try:
204 app = pygrack.GitRepository(
204 app = pygrack.GitRepository(
205 self.repo_name, content_path, self.git_path,
205 self.repo_name, content_path, self.git_path,
206 self.update_server_info, self.extras)
206 self.update_server_info, self.extras)
207 break
207 break
208 except OSError:
208 except OSError:
209 continue
209 continue
210
210
211 return app(environ, start_response)
211 return app(environ, start_response)
212
212
213
213
214 def create_git_wsgi_app(repo_path, repo_name, config):
214 def create_git_wsgi_app(repo_path, repo_name, config):
215 """
215 """
216 Creates a WSGI application to handle Git requests.
216 Creates a WSGI application to handle Git requests.
217
217
218 :param config: is a dictionary holding the extras.
218 :param config: is a dictionary holding the extras.
219 """
219 """
220 git_path = settings.GIT_EXECUTABLE()
220 git_path = settings.GIT_EXECUTABLE()
221 update_server_info = config.pop('git_update_server_info')
221 update_server_info = config.pop('git_update_server_info')
222 app = GitHandler(
222 app = GitHandler(
223 repo_path, repo_name, git_path, update_server_info, config)
223 repo_path, repo_name, git_path, update_server_info, config)
224
224
225 return app
225 return app
226
226
227
227
228 class GitLFSHandler:
228 class GitLFSHandler:
229 """
229 """
230 Handler for Git LFS operations
230 Handler for Git LFS operations
231 """
231 """
232
232
233 def __init__(self, repo_location, repo_name, git_path, update_server_info,
233 def __init__(self, repo_location, repo_name, git_path, update_server_info,
234 extras):
234 extras):
235 if not os.path.isdir(repo_location):
235 if not os.path.isdir(repo_location):
236 raise OSError(repo_location)
236 raise OSError(repo_location)
237 self.content_path = repo_location
237 self.content_path = repo_location
238 self.repo_name = repo_name
238 self.repo_name = repo_name
239 self.repo_location = repo_location
239 self.repo_location = repo_location
240 self.extras = extras
240 self.extras = extras
241 self.git_path = git_path
241 self.git_path = git_path
242 self.update_server_info = update_server_info
242 self.update_server_info = update_server_info
243
243
244 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
244 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
245 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
245 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
246 return app
246 return app
247
247
248
248
249 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
249 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
250 git_path = settings.GIT_EXECUTABLE()
250 git_path = settings.GIT_EXECUTABLE()
251 update_server_info = config.pop('git_update_server_info')
251 update_server_info = config.pop('git_update_server_info')
252 git_lfs_enabled = config.pop('git_lfs_enabled')
252 git_lfs_enabled = config.pop('git_lfs_enabled')
253 git_lfs_store_path = config.pop('git_lfs_store_path')
253 git_lfs_store_path = config.pop('git_lfs_store_path')
254 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
254 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
255 app = GitLFSHandler(
255 app = GitLFSHandler(
256 repo_path, repo_name, git_path, update_server_info, config)
256 repo_path, repo_name, git_path, update_server_info, config)
257
257
258 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
258 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,78 +1,78 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import gc
18 import gc
19 import logging
19 import logging
20 import os
20 import os
21 import time
21 import time
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class VcsServer:
27 class VcsServer:
28 """
28 """
29 Exposed remote interface of the vcsserver itself.
29 Exposed remote interface of the vcsserver itself.
30
30
31 This object can be used to manage the server remotely. Right now the main
31 This object can be used to manage the server remotely. Right now the main
32 use case is to allow to shut down the server.
32 use case is to allow to shut down the server.
33 """
33 """
34
34
35 _shutdown = False
35 _shutdown = False
36
36
37 def shutdown(self):
37 def shutdown(self):
38 self._shutdown = True
38 self._shutdown = True
39
39
40 def ping(self):
40 def ping(self):
41 """
41 """
42 Utility to probe a server connection.
42 Utility to probe a server connection.
43 """
43 """
44 log.debug("Received server ping.")
44 log.debug("Received server ping.")
45
45
46 def echo(self, data):
46 def echo(self, data):
47 """
47 """
48 Utility for performance testing.
48 Utility for performance testing.
49
49
50 Allows to pass in arbitrary data and will return this data.
50 Allows to pass in arbitrary data and will return this data.
51 """
51 """
52 log.debug("Received server echo.")
52 log.debug("Received server echo.")
53 return data
53 return data
54
54
55 def sleep(self, seconds):
55 def sleep(self, seconds):
56 """
56 """
57 Utility to simulate long running server interaction.
57 Utility to simulate long running server interaction.
58 """
58 """
59 log.debug("Sleeping %s seconds", seconds)
59 log.debug("Sleeping %s seconds", seconds)
60 time.sleep(seconds)
60 time.sleep(seconds)
61
61
62 def get_pid(self):
62 def get_pid(self):
63 """
63 """
64 Allows to discover the PID based on a proxy object.
64 Allows to discover the PID based on a proxy object.
65 """
65 """
66 return os.getpid()
66 return os.getpid()
67
67
68 def run_gc(self):
68 def run_gc(self):
69 """
69 """
70 Allows to trigger the garbage collector.
70 Allows to trigger the garbage collector.
71
71
72 Main intention is to support statistics gathering during test runs.
72 Main intention is to support statistics gathering during test runs.
73 """
73 """
74 freed_objects = gc.collect()
74 freed_objects = gc.collect()
75 return {
75 return {
76 'freed_objects': freed_objects,
76 'freed_objects': freed_objects,
77 'garbage': len(gc.garbage),
77 'garbage': len(gc.garbage),
78 }
78 }
@@ -1,31 +1,31 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18
18
19 WIRE_ENCODING = 'UTF-8'
19 WIRE_ENCODING = 'UTF-8'
20
20
21 # Path where we can find binary dir
21 # Path where we can find binary dir
22 BINARY_DIR = ''
22 BINARY_DIR = ''
23
23
24 def GIT_EXECUTABLE() -> str:
24 def GIT_EXECUTABLE() -> str:
25 return os.environ.get('RC_GIT_EXECUTABLE') or os.path.join(BINARY_DIR, 'git')
25 return os.environ.get('RC_GIT_EXECUTABLE') or os.path.join(BINARY_DIR, 'git')
26
26
27 def SVN_EXECUTABLE() -> str:
27 def SVN_EXECUTABLE() -> str:
28 return os.environ.get('RC_SVN_EXECUTABLE') or os.path.join(BINARY_DIR, 'svn')
28 return os.environ.get('RC_SVN_EXECUTABLE') or os.path.join(BINARY_DIR, 'svn')
29
29
30 def SVNLOOK_EXECUTABLE() -> str:
30 def SVNLOOK_EXECUTABLE() -> str:
31 return os.environ.get('RC_SVNLOOK_EXECUTABLE') or os.path.join(BINARY_DIR, 'svnlook')
31 return os.environ.get('RC_SVNLOOK_EXECUTABLE') or os.path.join(BINARY_DIR, 'svnlook')
@@ -1,212 +1,212 b''
1 #
1 #
2 # Copyright (C) 2004-2009 Edgewall Software
2 # Copyright (C) 2004-2009 Edgewall Software
3 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
3 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
4 # Copyright (C) 2014-2023 RhodeCode GmbH
4 # Copyright (C) 2014-2024 RhodeCode GmbH
5 # All rights reserved.
5 # All rights reserved.
6 #
6 #
7 # This software is licensed as described in the file COPYING, which
7 # This software is licensed as described in the file COPYING, which
8 # you should have received as part of this distribution. The terms
8 # you should have received as part of this distribution. The terms
9 # are also available at http://trac.edgewall.org/wiki/TracLicense.
9 # are also available at http://trac.edgewall.org/wiki/TracLicense.
10 #
10 #
11 # This software consists of voluntary contributions made by many
11 # This software consists of voluntary contributions made by many
12 # individuals. For the exact contribution history, see the revision
12 # individuals. For the exact contribution history, see the revision
13 # history and logs, available at http://trac.edgewall.org/log/.
13 # history and logs, available at http://trac.edgewall.org/log/.
14 #
14 #
15 # Author: Christopher Lenz <cmlenz@gmx.de>
15 # Author: Christopher Lenz <cmlenz@gmx.de>
16
16
17 import difflib
17 import difflib
18
18
19
19
20 def get_filtered_hunks(from_lines, to_lines, context=None,
20 def get_filtered_hunks(from_lines, to_lines, context=None,
21 ignore_blank_lines: bool = False, ignore_case: bool = False,
21 ignore_blank_lines: bool = False, ignore_case: bool = False,
22 ignore_space_changes: bool = False):
22 ignore_space_changes: bool = False):
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
24 opcodes, grouped according to the ``context`` and ``ignore_*``
24 opcodes, grouped according to the ``context`` and ``ignore_*``
25 parameters.
25 parameters.
26
26
27 :param from_lines: list of lines corresponding to the old content
27 :param from_lines: list of lines corresponding to the old content
28 :param to_lines: list of lines corresponding to the new content
28 :param to_lines: list of lines corresponding to the new content
29 :param ignore_blank_lines: differences about empty lines only are ignored
29 :param ignore_blank_lines: differences about empty lines only are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
32 :param context: the number of "equal" lines kept for representing
32 :param context: the number of "equal" lines kept for representing
33 the context of the change
33 the context of the change
34 :return: generator of grouped `difflib.SequenceMatcher` opcodes
34 :return: generator of grouped `difflib.SequenceMatcher` opcodes
35
35
36 If none of the ``ignore_*`` parameters is `True`, there's nothing
36 If none of the ``ignore_*`` parameters is `True`, there's nothing
37 to filter out the results will come straight from the
37 to filter out the results will come straight from the
38 SequenceMatcher.
38 SequenceMatcher.
39 """
39 """
40 hunks = get_hunks(from_lines, to_lines, context)
40 hunks = get_hunks(from_lines, to_lines, context)
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
42 hunks = filter_ignorable_lines(hunks, from_lines, to_lines, context,
42 hunks = filter_ignorable_lines(hunks, from_lines, to_lines, context,
43 ignore_blank_lines, ignore_case,
43 ignore_blank_lines, ignore_case,
44 ignore_space_changes)
44 ignore_space_changes)
45 return hunks
45 return hunks
46
46
47
47
48 def get_hunks(from_lines, to_lines, context=None):
48 def get_hunks(from_lines, to_lines, context=None):
49 """Generator yielding grouped opcodes describing differences .
49 """Generator yielding grouped opcodes describing differences .
50
50
51 See `get_filtered_hunks` for the parameter descriptions.
51 See `get_filtered_hunks` for the parameter descriptions.
52 """
52 """
53 matcher = difflib.SequenceMatcher(None, from_lines, to_lines)
53 matcher = difflib.SequenceMatcher(None, from_lines, to_lines)
54 if context is None:
54 if context is None:
55 return (hunk for hunk in [matcher.get_opcodes()])
55 return (hunk for hunk in [matcher.get_opcodes()])
56 else:
56 else:
57 return matcher.get_grouped_opcodes(context)
57 return matcher.get_grouped_opcodes(context)
58
58
59
59
60 def filter_ignorable_lines(hunks, from_lines, to_lines, context,
60 def filter_ignorable_lines(hunks, from_lines, to_lines, context,
61 ignore_blank_lines, ignore_case,
61 ignore_blank_lines, ignore_case,
62 ignore_space_changes):
62 ignore_space_changes):
63 """Detect line changes that should be ignored and emits them as
63 """Detect line changes that should be ignored and emits them as
64 tagged as "equal", possibly joined with the preceding and/or
64 tagged as "equal", possibly joined with the preceding and/or
65 following "equal" block.
65 following "equal" block.
66
66
67 See `get_filtered_hunks` for the parameter descriptions.
67 See `get_filtered_hunks` for the parameter descriptions.
68 """
68 """
69 def is_ignorable(tag, fromlines, tolines):
69 def is_ignorable(tag, fromlines, tolines):
70
70
71 if tag == 'delete' and ignore_blank_lines:
71 if tag == 'delete' and ignore_blank_lines:
72 if b''.join(fromlines) == b'':
72 if b''.join(fromlines) == b'':
73 return True
73 return True
74 elif tag == 'insert' and ignore_blank_lines:
74 elif tag == 'insert' and ignore_blank_lines:
75 if b''.join(tolines) == b'':
75 if b''.join(tolines) == b'':
76 return True
76 return True
77 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 elif tag == 'replace' and (ignore_case or ignore_space_changes):
78 if len(fromlines) != len(tolines):
78 if len(fromlines) != len(tolines):
79 return False
79 return False
80
80
81 def f(input_str):
81 def f(input_str):
82 if ignore_case:
82 if ignore_case:
83 input_str = input_str.lower()
83 input_str = input_str.lower()
84 if ignore_space_changes:
84 if ignore_space_changes:
85 input_str = b' '.join(input_str.split())
85 input_str = b' '.join(input_str.split())
86 return input_str
86 return input_str
87
87
88 for i in range(len(fromlines)):
88 for i in range(len(fromlines)):
89 if f(fromlines[i]) != f(tolines[i]):
89 if f(fromlines[i]) != f(tolines[i]):
90 return False
90 return False
91 return True
91 return True
92
92
93 hunks = list(hunks)
93 hunks = list(hunks)
94 opcodes = []
94 opcodes = []
95 ignored_lines = False
95 ignored_lines = False
96 prev = None
96 prev = None
97 for hunk in hunks:
97 for hunk in hunks:
98 for tag, i1, i2, j1, j2 in hunk:
98 for tag, i1, i2, j1, j2 in hunk:
99 if tag == 'equal':
99 if tag == 'equal':
100 if prev:
100 if prev:
101 prev = (tag, prev[1], i2, prev[3], j2)
101 prev = (tag, prev[1], i2, prev[3], j2)
102 else:
102 else:
103 prev = (tag, i1, i2, j1, j2)
103 prev = (tag, i1, i2, j1, j2)
104 else:
104 else:
105 if is_ignorable(tag, from_lines[i1:i2], to_lines[j1:j2]):
105 if is_ignorable(tag, from_lines[i1:i2], to_lines[j1:j2]):
106 ignored_lines = True
106 ignored_lines = True
107 if prev:
107 if prev:
108 prev = 'equal', prev[1], i2, prev[3], j2
108 prev = 'equal', prev[1], i2, prev[3], j2
109 else:
109 else:
110 prev = 'equal', i1, i2, j1, j2
110 prev = 'equal', i1, i2, j1, j2
111 continue
111 continue
112 if prev:
112 if prev:
113 opcodes.append(prev)
113 opcodes.append(prev)
114 opcodes.append((tag, i1, i2, j1, j2))
114 opcodes.append((tag, i1, i2, j1, j2))
115 prev = None
115 prev = None
116 if prev:
116 if prev:
117 opcodes.append(prev)
117 opcodes.append(prev)
118
118
119 if ignored_lines:
119 if ignored_lines:
120 if context is None:
120 if context is None:
121 yield opcodes
121 yield opcodes
122 else:
122 else:
123 # we leave at most n lines with the tag 'equal' before and after
123 # we leave at most n lines with the tag 'equal' before and after
124 # every change
124 # every change
125 n = context
125 n = context
126 nn = n + n
126 nn = n + n
127
127
128 group = []
128 group = []
129
129
130 def all_equal():
130 def all_equal():
131 all(op[0] == 'equal' for op in group)
131 all(op[0] == 'equal' for op in group)
132 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
132 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
133 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
133 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
134 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
134 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
135 elif tag == 'equal' and i2 - i1 > nn:
135 elif tag == 'equal' and i2 - i1 > nn:
136 group.append((tag, i1, min(i2, i1 + n), j1,
136 group.append((tag, i1, min(i2, i1 + n), j1,
137 min(j2, j1 + n)))
137 min(j2, j1 + n)))
138 if not all_equal():
138 if not all_equal():
139 yield group
139 yield group
140 group = []
140 group = []
141 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
141 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
142 group.append((tag, i1, i2, j1, j2))
142 group.append((tag, i1, i2, j1, j2))
143
143
144 if group and not (len(group) == 1 and group[0][0] == 'equal'):
144 if group and not (len(group) == 1 and group[0][0] == 'equal'):
145 if group[-1][0] == 'equal': # Fixup trailing unchanged block
145 if group[-1][0] == 'equal': # Fixup trailing unchanged block
146 tag, i1, i2, j1, j2 = group[-1]
146 tag, i1, i2, j1, j2 = group[-1]
147 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
147 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
148 if not all_equal():
148 if not all_equal():
149 yield group
149 yield group
150 else:
150 else:
151 for hunk in hunks:
151 for hunk in hunks:
152 yield hunk
152 yield hunk
153
153
154
154
155 NO_NEWLINE_AT_END = b'\\ No newline at end of file'
155 NO_NEWLINE_AT_END = b'\\ No newline at end of file'
156 LINE_TERM = b'\n'
156 LINE_TERM = b'\n'
157
157
158
158
159 def unified_diff(from_lines, to_lines, context=None, ignore_blank_lines: bool = False,
159 def unified_diff(from_lines, to_lines, context=None, ignore_blank_lines: bool = False,
160 ignore_case: bool = False, ignore_space_changes: bool = False, lineterm=LINE_TERM) -> bytes:
160 ignore_case: bool = False, ignore_space_changes: bool = False, lineterm=LINE_TERM) -> bytes:
161 """
161 """
162 Generator producing lines corresponding to a textual diff.
162 Generator producing lines corresponding to a textual diff.
163
163
164 See `get_filtered_hunks` for the parameter descriptions.
164 See `get_filtered_hunks` for the parameter descriptions.
165 """
165 """
166 # TODO: johbo: Check if this can be nicely integrated into the matching
166 # TODO: johbo: Check if this can be nicely integrated into the matching
167
167
168 if ignore_space_changes:
168 if ignore_space_changes:
169 from_lines = [l.strip() for l in from_lines]
169 from_lines = [l.strip() for l in from_lines]
170 to_lines = [l.strip() for l in to_lines]
170 to_lines = [l.strip() for l in to_lines]
171
171
172 def _hunk_range(start, length) -> bytes:
172 def _hunk_range(start, length) -> bytes:
173 if length != 1:
173 if length != 1:
174 return b'%d,%d' % (start, length)
174 return b'%d,%d' % (start, length)
175 else:
175 else:
176 return b'%d' % (start,)
176 return b'%d' % (start,)
177
177
178 for group in get_filtered_hunks(from_lines, to_lines, context,
178 for group in get_filtered_hunks(from_lines, to_lines, context,
179 ignore_blank_lines, ignore_case,
179 ignore_blank_lines, ignore_case,
180 ignore_space_changes):
180 ignore_space_changes):
181 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
181 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
182 if i1 == 0 and i2 == 0:
182 if i1 == 0 and i2 == 0:
183 i1, i2 = -1, -1 # support for Add changes
183 i1, i2 = -1, -1 # support for Add changes
184 if j1 == 0 and j2 == 0:
184 if j1 == 0 and j2 == 0:
185 j1, j2 = -1, -1 # support for Delete changes
185 j1, j2 = -1, -1 # support for Delete changes
186 yield b'@@ -%b +%b @@%b' % (
186 yield b'@@ -%b +%b @@%b' % (
187 _hunk_range(i1 + 1, i2 - i1),
187 _hunk_range(i1 + 1, i2 - i1),
188 _hunk_range(j1 + 1, j2 - j1),
188 _hunk_range(j1 + 1, j2 - j1),
189 lineterm)
189 lineterm)
190 for tag, i1, i2, j1, j2 in group:
190 for tag, i1, i2, j1, j2 in group:
191 if tag == 'equal':
191 if tag == 'equal':
192 for line in from_lines[i1:i2]:
192 for line in from_lines[i1:i2]:
193 if not line.endswith(lineterm):
193 if not line.endswith(lineterm):
194 yield b' ' + line + lineterm
194 yield b' ' + line + lineterm
195 yield NO_NEWLINE_AT_END + lineterm
195 yield NO_NEWLINE_AT_END + lineterm
196 else:
196 else:
197 yield b' ' + line
197 yield b' ' + line
198 else:
198 else:
199 if tag in ('replace', 'delete'):
199 if tag in ('replace', 'delete'):
200 for line in from_lines[i1:i2]:
200 for line in from_lines[i1:i2]:
201 if not line.endswith(lineterm):
201 if not line.endswith(lineterm):
202 yield b'-' + line + lineterm
202 yield b'-' + line + lineterm
203 yield NO_NEWLINE_AT_END + lineterm
203 yield NO_NEWLINE_AT_END + lineterm
204 else:
204 else:
205 yield b'-' + line
205 yield b'-' + line
206 if tag in ('replace', 'insert'):
206 if tag in ('replace', 'insert'):
207 for line in to_lines[j1:j2]:
207 for line in to_lines[j1:j2]:
208 if not line.endswith(lineterm):
208 if not line.endswith(lineterm):
209 yield b'+' + line + lineterm
209 yield b'+' + line + lineterm
210 yield NO_NEWLINE_AT_END + lineterm
210 yield NO_NEWLINE_AT_END + lineterm
211 else:
211 else:
212 yield b'+' + line
212 yield b'+' + line
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,85 +1,85 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21 import configparser
21 import configparser
22
22
23
23
24 class ContextINI:
24 class ContextINI:
25 """
25 """
26 Allows to create a new test.ini file as a copy of existing one with edited
26 Allows to create a new test.ini file as a copy of existing one with edited
27 data. If existing file is not present, it creates a new one. Example usage::
27 data. If existing file is not present, it creates a new one. Example usage::
28
28
29 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
29 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 print 'vcsserver --config=%s' % new_test_ini
30 print 'vcsserver --config=%s' % new_test_ini
31 """
31 """
32
32
33 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
33 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 destroy=True):
34 destroy=True):
35 self.ini_file_path = ini_file_path
35 self.ini_file_path = ini_file_path
36 self.ini_params = ini_params
36 self.ini_params = ini_params
37 self.new_path = None
37 self.new_path = None
38 self.new_path_prefix = new_file_prefix or 'test'
38 self.new_path_prefix = new_file_prefix or 'test'
39 self.destroy = destroy
39 self.destroy = destroy
40
40
41 def __enter__(self):
41 def __enter__(self):
42 _, pref = tempfile.mkstemp()
42 _, pref = tempfile.mkstemp()
43 loc = tempfile.gettempdir()
43 loc = tempfile.gettempdir()
44 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
44 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 pref, self.new_path_prefix, self.ini_file_path))
45 pref, self.new_path_prefix, self.ini_file_path))
46
46
47 # copy ini file and modify according to the params, if we re-use a file
47 # copy ini file and modify according to the params, if we re-use a file
48 if os.path.isfile(self.ini_file_path):
48 if os.path.isfile(self.ini_file_path):
49 shutil.copy(self.ini_file_path, self.new_path)
49 shutil.copy(self.ini_file_path, self.new_path)
50 else:
50 else:
51 # create new dump file for configObj to write to.
51 # create new dump file for configObj to write to.
52 with open(self.new_path, 'wb'):
52 with open(self.new_path, 'wb'):
53 pass
53 pass
54
54
55 parser = configparser.ConfigParser()
55 parser = configparser.ConfigParser()
56 parser.read(self.ini_file_path)
56 parser.read(self.ini_file_path)
57
57
58 for data in self.ini_params:
58 for data in self.ini_params:
59 section, ini_params = list(data.items())[0]
59 section, ini_params = list(data.items())[0]
60 key, val = list(ini_params.items())[0]
60 key, val = list(ini_params.items())[0]
61 if section not in parser:
61 if section not in parser:
62 parser[section] = {}
62 parser[section] = {}
63 parser[section][key] = val
63 parser[section][key] = val
64 with open(self.ini_file_path, 'w') as f:
64 with open(self.ini_file_path, 'w') as f:
65 parser.write(f)
65 parser.write(f)
66 return self.new_path
66 return self.new_path
67
67
68 def __exit__(self, exc_type, exc_val, exc_tb):
68 def __exit__(self, exc_type, exc_val, exc_tb):
69 if self.destroy:
69 if self.destroy:
70 os.remove(self.new_path)
70 os.remove(self.new_path)
71
71
72
72
73 def no_newline_id_generator(test_name):
73 def no_newline_id_generator(test_name):
74 """
74 """
75 Generates a test name without spaces or newlines characters. Used for
75 Generates a test name without spaces or newlines characters. Used for
76 nicer output of progress of test
76 nicer output of progress of test
77 """
77 """
78 org_name = test_name
78 org_name = test_name
79 test_name = str(test_name)\
79 test_name = str(test_name)\
80 .replace('\n', '_N') \
80 .replace('\n', '_N') \
81 .replace('\r', '_N') \
81 .replace('\r', '_N') \
82 .replace('\t', '_T') \
82 .replace('\t', '_T') \
83 .replace(' ', '_S')
83 .replace(' ', '_S')
84
84
85 return test_name or 'test-with-empty-name'
85 return test_name or 'test-with-empty-name'
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver.remote import git_remote
24 from vcsserver.remote import git_remote
25
25
26 SAMPLE_REFS = {
26 SAMPLE_REFS = {
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 }
32 }
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def git_remote_fix():
36 def git_remote_fix():
37 """
37 """
38 A GitRemote instance with a mock factory.
38 A GitRemote instance with a mock factory.
39 """
39 """
40 factory = Mock()
40 factory = Mock()
41 remote = git_remote.GitRemote(factory)
41 remote = git_remote.GitRemote(factory)
42 return remote
42 return remote
43
43
44
44
45 def test_discover_git_version(git_remote_fix):
45 def test_discover_git_version(git_remote_fix):
46 version = git_remote_fix.discover_git_version()
46 version = git_remote_fix.discover_git_version()
47 assert version
47 assert version
48
48
49
49
50 class TestGitFetch:
50 class TestGitFetch:
51 def setup_method(self):
51 def setup_method(self):
52 self.mock_repo = Mock()
52 self.mock_repo = Mock()
53 factory = Mock()
53 factory = Mock()
54 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git_remote.GitRemote(factory)
55 self.remote_git = git_remote.GitRemote(factory)
56
56
57 def test_fetches_all_when_no_commit_ids_specified(self):
57 def test_fetches_all_when_no_commit_ids_specified(self):
58 def side_effect(determine_wants, *args, **kwargs):
58 def side_effect(determine_wants, *args, **kwargs):
59 determine_wants(SAMPLE_REFS)
59 determine_wants(SAMPLE_REFS)
60
60
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 mock_fetch.side_effect = side_effect
62 mock_fetch.side_effect = side_effect
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 determine_wants = self.mock_repo.object_store.determine_wants_all
64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66
66
67 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
68 selected_refs = {
68 selected_refs = {
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 }
71 }
72
72
73 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
74 result = determine_wants(SAMPLE_REFS)
74 result = determine_wants(SAMPLE_REFS)
75 assert sorted(result) == sorted(selected_refs.values())
75 assert sorted(result) == sorted(selected_refs.values())
76 return result
76 return result
77
77
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
80 self.remote_git.pull(
80 self.remote_git.pull(
81 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
82 refs=list(selected_refs.keys()))
82 refs=list(selected_refs.keys()))
83 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
85
85
86 def test_get_remote_refs(self):
86 def test_get_remote_refs(self):
87 factory = Mock()
87 factory = Mock()
88 remote_git = git_remote.GitRemote(factory)
88 remote_git = git_remote.GitRemote(factory)
89 url = 'https://example.com/test/test.git'
89 url = 'https://example.com/test/test.git'
90 sample_refs = {
90 sample_refs = {
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 }
93 }
94
94
95 with patch('vcsserver.remote.git_remote.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git_remote.Repo', create=False) as mock_repo:
96 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
99 assert remote_refs == sample_refs
99 assert remote_refs == sample_refs
100
100
101
101
102 class TestReraiseSafeExceptions:
102 class TestReraiseSafeExceptions:
103
103
104 def test_method_decorated_with_reraise_safe_exceptions(self):
104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 factory = Mock()
105 factory = Mock()
106 git_remote_instance = git_remote.GitRemote(factory)
106 git_remote_instance = git_remote.GitRemote(factory)
107
107
108 def fake_function():
108 def fake_function():
109 return None
109 return None
110
110
111 decorator = git_remote.reraise_safe_exceptions(fake_function)
111 decorator = git_remote.reraise_safe_exceptions(fake_function)
112
112
113 methods = inspect.getmembers(git_remote_instance, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote_instance, predicate=inspect.ismethod)
114 for method_name, method in methods:
114 for method_name, method in methods:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 assert method.__func__.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
117
117
118 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.HangupException(), 'error'),
123 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 ])
125 ])
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git_remote.reraise_safe_exceptions
127 @git_remote.reraise_safe_exceptions
128 def fake_method():
128 def fake_method():
129 raise side_effect
129 raise side_effect
130
130
131 with pytest.raises(Exception) as exc_info:
131 with pytest.raises(Exception) as exc_info:
132 fake_method()
132 fake_method()
133 assert type(exc_info.value) == Exception
133 assert type(exc_info.value) == Exception
134 assert exc_info.value._vcs_kind == expected_type
134 assert exc_info.value._vcs_kind == expected_type
135
135
136
136
137 class TestDulwichRepoWrapper:
137 class TestDulwichRepoWrapper:
138 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git_remote.Repo, 'close') as close_mock:
140 with patch.object(git_remote.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git_remote.Repo('/tmp/abcde')
142 repo = git_remote.Repo('/tmp/abcde')
143 assert repo is not None
143 assert repo is not None
144 repo.__del__()
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
146
147 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
148
148
149
149
150 class TestGitFactory:
150 class TestGitFactory:
151 def test_create_repo_returns_dulwich_wrapper(self):
151 def test_create_repo_returns_dulwich_wrapper(self):
152
152
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 mock.side_effect = {'repo_objects': ''}
154 mock.side_effect = {'repo_objects': ''}
155 factory = git_remote.GitFactory()
155 factory = git_remote.GitFactory()
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git_remote.Repo)
162 assert isinstance(result, git_remote.Repo)
@@ -1,112 +1,112 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, patch
24 from mock import Mock, patch
25
25
26 from vcsserver import exceptions, hgcompat
26 from vcsserver import exceptions, hgcompat
27 from vcsserver.remote import hg_remote
27 from vcsserver.remote import hg_remote
28
28
29
29
30 class TestDiff:
30 class TestDiff:
31 def test_raising_safe_exception_when_lookup_failed(self):
31 def test_raising_safe_exception_when_lookup_failed(self):
32
32
33 factory = Mock()
33 factory = Mock()
34 hg_remote_instance = hg_remote.HgRemote(factory)
34 hg_remote_instance = hg_remote.HgRemote(factory)
35 with patch('mercurial.patch.diff') as diff_mock:
35 with patch('mercurial.patch.diff') as diff_mock:
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
37
37
38 with pytest.raises(Exception) as exc_info:
38 with pytest.raises(Exception) as exc_info:
39 hg_remote_instance.diff(
39 hg_remote_instance.diff(
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
41 file_filter=None, opt_git=True, opt_ignorews=True,
41 file_filter=None, opt_git=True, opt_ignorews=True,
42 context=3)
42 context=3)
43 assert type(exc_info.value) == Exception
43 assert type(exc_info.value) == Exception
44 assert exc_info.value._vcs_kind == 'lookup'
44 assert exc_info.value._vcs_kind == 'lookup'
45
45
46
46
47 class TestReraiseSafeExceptions:
47 class TestReraiseSafeExceptions:
48 original_traceback = None
48 original_traceback = None
49
49
50 def test_method_decorated_with_reraise_safe_exceptions(self):
50 def test_method_decorated_with_reraise_safe_exceptions(self):
51 factory = Mock()
51 factory = Mock()
52 hg_remote_instance = hg_remote.HgRemote(factory)
52 hg_remote_instance = hg_remote.HgRemote(factory)
53 methods = inspect.getmembers(hg_remote_instance, predicate=inspect.ismethod)
53 methods = inspect.getmembers(hg_remote_instance, predicate=inspect.ismethod)
54 decorator = hg_remote.reraise_safe_exceptions(None)
54 decorator = hg_remote.reraise_safe_exceptions(None)
55 for method_name, method in methods:
55 for method_name, method in methods:
56 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
56 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
57 assert method.__func__.__code__ == decorator.__code__
57 assert method.__func__.__code__ == decorator.__code__
58
58
59 @pytest.mark.parametrize('side_effect, expected_type', [
59 @pytest.mark.parametrize('side_effect, expected_type', [
60 (hgcompat.Abort(b'failed-abort'), 'abort'),
60 (hgcompat.Abort(b'failed-abort'), 'abort'),
61 (hgcompat.InterventionRequired(b'intervention-required'), 'abort'),
61 (hgcompat.InterventionRequired(b'intervention-required'), 'abort'),
62 (hgcompat.RepoLookupError(), 'lookup'),
62 (hgcompat.RepoLookupError(), 'lookup'),
63 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
63 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
64 (hgcompat.RepoError(), 'error'),
64 (hgcompat.RepoError(), 'error'),
65 (hgcompat.RequirementError(), 'requirement'),
65 (hgcompat.RequirementError(), 'requirement'),
66 ])
66 ])
67 def test_safe_exceptions_reraised(self, side_effect, expected_type):
67 def test_safe_exceptions_reraised(self, side_effect, expected_type):
68 @hg_remote.reraise_safe_exceptions
68 @hg_remote.reraise_safe_exceptions
69 def fake_method():
69 def fake_method():
70 raise side_effect
70 raise side_effect
71
71
72 with pytest.raises(Exception) as exc_info:
72 with pytest.raises(Exception) as exc_info:
73 fake_method()
73 fake_method()
74 assert type(exc_info.value) == Exception
74 assert type(exc_info.value) == Exception
75 assert exc_info.value._vcs_kind == expected_type
75 assert exc_info.value._vcs_kind == expected_type
76
76
77 def test_keeps_original_traceback(self):
77 def test_keeps_original_traceback(self):
78
78
79 @hg_remote.reraise_safe_exceptions
79 @hg_remote.reraise_safe_exceptions
80 def fake_method():
80 def fake_method():
81 try:
81 try:
82 raise hgcompat.Abort(b'test-abort')
82 raise hgcompat.Abort(b'test-abort')
83 except:
83 except:
84 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
84 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
85 raise
85 raise
86
86
87 new_traceback = None
87 new_traceback = None
88 try:
88 try:
89 fake_method()
89 fake_method()
90 except Exception:
90 except Exception:
91 new_traceback = traceback.format_tb(sys.exc_info()[2])
91 new_traceback = traceback.format_tb(sys.exc_info()[2])
92
92
93 new_traceback_tail = new_traceback[-len(self.original_traceback):]
93 new_traceback_tail = new_traceback[-len(self.original_traceback):]
94 assert new_traceback_tail == self.original_traceback
94 assert new_traceback_tail == self.original_traceback
95
95
96 def test_maps_unknown_exceptions_to_unhandled(self):
96 def test_maps_unknown_exceptions_to_unhandled(self):
97 @hg_remote.reraise_safe_exceptions
97 @hg_remote.reraise_safe_exceptions
98 def stub_method():
98 def stub_method():
99 raise ValueError('stub')
99 raise ValueError('stub')
100
100
101 with pytest.raises(Exception) as exc_info:
101 with pytest.raises(Exception) as exc_info:
102 stub_method()
102 stub_method()
103 assert exc_info.value._vcs_kind == 'unhandled'
103 assert exc_info.value._vcs_kind == 'unhandled'
104
104
105 def test_does_not_map_known_exceptions(self):
105 def test_does_not_map_known_exceptions(self):
106 @hg_remote.reraise_safe_exceptions
106 @hg_remote.reraise_safe_exceptions
107 def stub_method():
107 def stub_method():
108 raise exceptions.LookupException()('stub')
108 raise exceptions.LookupException()('stub')
109
109
110 with pytest.raises(Exception) as exc_info:
110 with pytest.raises(Exception) as exc_info:
111 stub_method()
111 stub_method()
112 assert exc_info.value._vcs_kind == 'lookup'
112 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,119 +1,119 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto._capabilities.__name__ == '_dynamic_capabilities'
31 assert lfproto._capabilities.__name__ == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 orig_capabilities):
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
38 hgcompat.largefiles.proto, stub_extensions)
38 hgcompat.largefiles.proto, stub_extensions)
39
39
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
41
41
42 stub_extensions.assert_called_once_with(stub_ui)
42 stub_extensions.assert_called_once_with(stub_ui)
43 assert LARGEFILES_CAPABILITY not in caps
43 assert LARGEFILES_CAPABILITY not in caps
44
44
45
45
46 def test_dynamic_capabilities_ignores_updated_capabilities(
46 def test_dynamic_capabilities_ignores_updated_capabilities(
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
48 orig_capabilities):
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
51 hgcompat.largefiles.proto, stub_extensions)
51 hgcompat.largefiles.proto, stub_extensions)
52
52
53 # This happens when the extension is loaded for the first time, important
53 # This happens when the extension is loaded for the first time, important
54 # to ensure that an updated function is correctly picked up.
54 # to ensure that an updated function is correctly picked up.
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
56 side_effect=Exception('Must not be called'))
56 side_effect=Exception('Must not be called'))
57
57
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
59
59
60
60
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
63 orig_capabilities):
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
65
65
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
67 hgcompat.largefiles.proto, stub_extensions)
67 hgcompat.largefiles.proto, stub_extensions)
68
68
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
70
70
71 stub_extensions.assert_called_once_with(stub_ui)
71 stub_extensions.assert_called_once_with(stub_ui)
72 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
73
73
74
74
75 @pytest.fixture
75 @pytest.fixture
76 def patched_capabilities(request):
76 def patched_capabilities(request):
77 """
77 """
78 Patch in `capabilitiesorig` and restore both capability functions.
78 Patch in `capabilitiesorig` and restore both capability functions.
79 """
79 """
80 lfproto = hgcompat.largefiles.proto
80 lfproto = hgcompat.largefiles.proto
81 orig_capabilities = lfproto._capabilities
81 orig_capabilities = lfproto._capabilities
82
82
83 @request.addfinalizer
83 @request.addfinalizer
84 def restore():
84 def restore():
85 lfproto._capabilities = orig_capabilities
85 lfproto._capabilities = orig_capabilities
86
86
87
87
88 @pytest.fixture
88 @pytest.fixture
89 def stub_repo(stub_ui):
89 def stub_repo(stub_ui):
90 repo = mock.Mock()
90 repo = mock.Mock()
91 repo.ui = stub_ui
91 repo.ui = stub_ui
92 return repo
92 return repo
93
93
94
94
95 @pytest.fixture
95 @pytest.fixture
96 def stub_proto(stub_ui):
96 def stub_proto(stub_ui):
97 proto = mock.Mock()
97 proto = mock.Mock()
98 proto.ui = stub_ui
98 proto.ui = stub_ui
99 return proto
99 return proto
100
100
101
101
102 @pytest.fixture
102 @pytest.fixture
103 def orig_capabilities():
103 def orig_capabilities():
104 from mercurial.wireprotov1server import wireprotocaps
104 from mercurial.wireprotov1server import wireprotocaps
105
105
106 def _capabilities(repo, proto):
106 def _capabilities(repo, proto):
107 return wireprotocaps
107 return wireprotocaps
108 return _capabilities
108 return _capabilities
109
109
110
110
111 @pytest.fixture
111 @pytest.fixture
112 def stub_ui():
112 def stub_ui():
113 return hgcompat.ui.ui()
113 return hgcompat.ui.ui()
114
114
115
115
116 @pytest.fixture
116 @pytest.fixture
117 def stub_extensions():
117 def stub_extensions():
118 extensions = mock.Mock(return_value=tuple())
118 extensions = mock.Mock(return_value=tuple())
119 return extensions
119 return extensions
@@ -1,141 +1,141 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pytest
18 import pytest
19
19
20 import mercurial.ui
20 import mercurial.ui
21 import mock
21 import mock
22
22
23 from vcsserver.lib.ext_json import json
23 from vcsserver.lib.ext_json import json
24 from vcsserver import hooks
24 from vcsserver import hooks
25
25
26
26
27 def get_hg_ui(extras=None):
27 def get_hg_ui(extras=None):
28 """Create a Config object with a valid RC_SCM_DATA entry."""
28 """Create a Config object with a valid RC_SCM_DATA entry."""
29 extras = extras or {}
29 extras = extras or {}
30 required_extras = {
30 required_extras = {
31 'username': '',
31 'username': '',
32 'repository': '',
32 'repository': '',
33 'locked_by': '',
33 'locked_by': '',
34 'scm': '',
34 'scm': '',
35 'make_lock': '',
35 'make_lock': '',
36 'action': '',
36 'action': '',
37 'ip': '',
37 'ip': '',
38 }
38 }
39 required_extras.update(extras)
39 required_extras.update(extras)
40 hg_ui = mercurial.ui.ui()
40 hg_ui = mercurial.ui.ui()
41 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
41 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
42
42
43 return hg_ui
43 return hg_ui
44
44
45
45
46 def test_git_pre_receive_is_disabled():
46 def test_git_pre_receive_is_disabled():
47 extras = {'hooks': ['pull']}
47 extras = {'hooks': ['pull']}
48 response = hooks.git_pre_receive(None, None,
48 response = hooks.git_pre_receive(None, None,
49 {'RC_SCM_DATA': json.dumps(extras)})
49 {'RC_SCM_DATA': json.dumps(extras)})
50
50
51 assert response == 0
51 assert response == 0
52
52
53
53
54 def test_git_post_receive_is_disabled():
54 def test_git_post_receive_is_disabled():
55 extras = {'hooks': ['pull']}
55 extras = {'hooks': ['pull']}
56 response = hooks.git_post_receive(None, '',
56 response = hooks.git_post_receive(None, '',
57 {'RC_SCM_DATA': json.dumps(extras)})
57 {'RC_SCM_DATA': json.dumps(extras)})
58
58
59 assert response == 0
59 assert response == 0
60
60
61
61
62 def test_git_post_receive_calls_repo_size():
62 def test_git_post_receive_calls_repo_size():
63 extras = {'hooks': ['push', 'repo_size']}
63 extras = {'hooks': ['push', 'repo_size']}
64
64
65 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
65 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
66 hooks.git_post_receive(
66 hooks.git_post_receive(
67 None, '', {'RC_SCM_DATA': json.dumps(extras)})
67 None, '', {'RC_SCM_DATA': json.dumps(extras)})
68 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
68 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
69 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
69 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
70 expected_calls = [
70 expected_calls = [
71 mock.call('repo_size', extras, mock.ANY),
71 mock.call('repo_size', extras, mock.ANY),
72 mock.call('post_push', extras, mock.ANY),
72 mock.call('post_push', extras, mock.ANY),
73 ]
73 ]
74 assert call_hook_mock.call_args_list == expected_calls
74 assert call_hook_mock.call_args_list == expected_calls
75
75
76
76
77 def test_git_post_receive_does_not_call_disabled_repo_size():
77 def test_git_post_receive_does_not_call_disabled_repo_size():
78 extras = {'hooks': ['push']}
78 extras = {'hooks': ['push']}
79
79
80 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
80 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
81 hooks.git_post_receive(
81 hooks.git_post_receive(
82 None, '', {'RC_SCM_DATA': json.dumps(extras)})
82 None, '', {'RC_SCM_DATA': json.dumps(extras)})
83 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
83 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
84 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
84 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
85 expected_calls = [
85 expected_calls = [
86 mock.call('post_push', extras, mock.ANY)
86 mock.call('post_push', extras, mock.ANY)
87 ]
87 ]
88 assert call_hook_mock.call_args_list == expected_calls
88 assert call_hook_mock.call_args_list == expected_calls
89
89
90
90
91 def test_repo_size_exception_does_not_affect_git_post_receive():
91 def test_repo_size_exception_does_not_affect_git_post_receive():
92 extras = {'hooks': ['push', 'repo_size']}
92 extras = {'hooks': ['push', 'repo_size']}
93 status = 0
93 status = 0
94
94
95 def side_effect(name, *args, **kwargs):
95 def side_effect(name, *args, **kwargs):
96 if name == 'repo_size':
96 if name == 'repo_size':
97 raise Exception('Fake exception')
97 raise Exception('Fake exception')
98 else:
98 else:
99 return status
99 return status
100
100
101 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
101 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
102 call_hook_mock.side_effect = side_effect
102 call_hook_mock.side_effect = side_effect
103 result = hooks.git_post_receive(
103 result = hooks.git_post_receive(
104 None, '', {'RC_SCM_DATA': json.dumps(extras)})
104 None, '', {'RC_SCM_DATA': json.dumps(extras)})
105 assert result == status
105 assert result == status
106
106
107
107
108 def test_git_pre_pull_is_disabled():
108 def test_git_pre_pull_is_disabled():
109 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
109 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
110
110
111
111
112 def test_git_post_pull_is_disabled():
112 def test_git_post_pull_is_disabled():
113 assert (
113 assert (
114 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
114 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
115
115
116
116
117 class TestGetHooksClient:
117 class TestGetHooksClient:
118
118
119 def test_return_celery_client_when_queue_and_backend_provided(self):
119 def test_return_celery_client_when_queue_and_backend_provided(self):
120 extras = {
120 extras = {
121 'hooks_protocol': 'celery',
121 'hooks_protocol': 'celery',
122 'hooks_config': {'broker_url': 'redis://task_queue:0', 'result_backend': 'redis://task_queue:0'}
122 'hooks_config': {'broker_url': 'redis://task_queue:0', 'result_backend': 'redis://task_queue:0'}
123 }
123 }
124 result = hooks._get_hooks_client(extras)
124 result = hooks._get_hooks_client(extras)
125 assert isinstance(result, hooks.HooksCeleryClient)
125 assert isinstance(result, hooks.HooksCeleryClient)
126
126
127
127
128 class TestHooksCeleryClient:
128 class TestHooksCeleryClient:
129
129
130 def test_hooks_http_client_init(self):
130 def test_hooks_http_client_init(self):
131 queue = 'redis://redis:6379/0'
131 queue = 'redis://redis:6379/0'
132 backend = 'redis://redis:6379/0'
132 backend = 'redis://redis:6379/0'
133 client = hooks.HooksCeleryClient(queue, backend)
133 client = hooks.HooksCeleryClient(queue, backend)
134 assert client.celery_app.conf.broker_url == queue
134 assert client.celery_app.conf.broker_url == queue
135
135
136 def test_hooks_http_client_init_with_extra_opts(self):
136 def test_hooks_http_client_init_with_extra_opts(self):
137 queue = 'redis://redis:6379/0'
137 queue = 'redis://redis:6379/0'
138 backend = 'redis://redis:6379/0'
138 backend = 'redis://redis:6379/0'
139 client = hooks.HooksCeleryClient(queue, backend, _celery_opts={'task_always_eager': True})
139 client = hooks.HooksCeleryClient(queue, backend, _celery_opts={'task_always_eager': True})
140 assert client.celery_app.conf.broker_url == queue
140 assert client.celery_app.conf.broker_url == queue
141 assert client.celery_app.conf.task_always_eager == True
141 assert client.celery_app.conf.task_always_eager == True
@@ -1,289 +1,289 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import stat
20 import stat
21 import pytest
21 import pytest
22 import vcsserver
22 import vcsserver
23 import tempfile
23 import tempfile
24 from vcsserver import hook_utils
24 from vcsserver import hook_utils
25 from vcsserver.hook_utils import set_permissions_if_needed, HOOKS_DIR_MODE, HOOKS_FILE_MODE
25 from vcsserver.hook_utils import set_permissions_if_needed, HOOKS_DIR_MODE, HOOKS_FILE_MODE
26 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.tests.fixture import no_newline_id_generator
27 from vcsserver.lib.str_utils import safe_bytes
27 from vcsserver.lib.str_utils import safe_bytes
28 from vcsserver.utils import AttributeDict
28 from vcsserver.utils import AttributeDict
29
29
30
30
31 class TestCheckRhodecodeHook:
31 class TestCheckRhodecodeHook:
32
32
33 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
33 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
34 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
34 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
35 with open(hook, 'wb') as f:
35 with open(hook, 'wb') as f:
36 f.write(b'dummy test')
36 f.write(b'dummy test')
37 result = hook_utils.check_rhodecode_hook(hook)
37 result = hook_utils.check_rhodecode_hook(hook)
38 assert result is False
38 assert result is False
39
39
40 def test_returns_true_when_no_hook_file_found(self, tmpdir):
40 def test_returns_true_when_no_hook_file_found(self, tmpdir):
41 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
41 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
42 result = hook_utils.check_rhodecode_hook(hook)
42 result = hook_utils.check_rhodecode_hook(hook)
43 assert result
43 assert result
44
44
45 @pytest.mark.parametrize("file_content, expected_result", [
45 @pytest.mark.parametrize("file_content, expected_result", [
46 ("RC_HOOK_VER = '3.3.3'\n", True),
46 ("RC_HOOK_VER = '3.3.3'\n", True),
47 ("RC_HOOK = '3.3.3'\n", False),
47 ("RC_HOOK = '3.3.3'\n", False),
48 ], ids=no_newline_id_generator)
48 ], ids=no_newline_id_generator)
49 def test_signatures(self, file_content, expected_result, tmpdir):
49 def test_signatures(self, file_content, expected_result, tmpdir):
50 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
50 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
51 with open(hook, 'wb') as f:
51 with open(hook, 'wb') as f:
52 f.write(safe_bytes(file_content))
52 f.write(safe_bytes(file_content))
53
53
54 result = hook_utils.check_rhodecode_hook(hook)
54 result = hook_utils.check_rhodecode_hook(hook)
55
55
56 assert result is expected_result
56 assert result is expected_result
57
57
58
58
59 class BaseInstallHooks:
59 class BaseInstallHooks:
60 HOOK_FILES = ()
60 HOOK_FILES = ()
61
61
62 def _check_hook_file_dir_mode(self, file_path):
62 def _check_hook_file_dir_mode(self, file_path):
63 dir_path = os.path.dirname(file_path)
63 dir_path = os.path.dirname(file_path)
64 assert os.path.exists(dir_path), f'dir {file_path} missing'
64 assert os.path.exists(dir_path), f'dir {file_path} missing'
65 stat_info = os.stat(dir_path)
65 stat_info = os.stat(dir_path)
66
66
67 file_mode = stat.S_IMODE(stat_info.st_mode)
67 file_mode = stat.S_IMODE(stat_info.st_mode)
68 expected_mode = int(HOOKS_DIR_MODE)
68 expected_mode = int(HOOKS_DIR_MODE)
69 assert expected_mode == file_mode, f'expected mode: {oct(expected_mode)} got: {oct(file_mode)} for {dir_path}'
69 assert expected_mode == file_mode, f'expected mode: {oct(expected_mode)} got: {oct(file_mode)} for {dir_path}'
70
70
71 def _check_hook_file_mode(self, file_path):
71 def _check_hook_file_mode(self, file_path):
72 assert os.path.exists(file_path), f'path {file_path} missing'
72 assert os.path.exists(file_path), f'path {file_path} missing'
73 stat_info = os.stat(file_path)
73 stat_info = os.stat(file_path)
74
74
75 file_mode = stat.S_IMODE(stat_info.st_mode)
75 file_mode = stat.S_IMODE(stat_info.st_mode)
76 expected_mode = int(HOOKS_FILE_MODE)
76 expected_mode = int(HOOKS_FILE_MODE)
77 assert expected_mode == file_mode, f'expected mode: {oct(expected_mode)} got: {oct(file_mode)} for {file_path}'
77 assert expected_mode == file_mode, f'expected mode: {oct(expected_mode)} got: {oct(file_mode)} for {file_path}'
78
78
79 def _check_hook_file_content(self, file_path, executable):
79 def _check_hook_file_content(self, file_path, executable):
80 executable = executable or sys.executable
80 executable = executable or sys.executable
81 with open(file_path, 'rt') as hook_file:
81 with open(file_path, 'rt') as hook_file:
82 content = hook_file.read()
82 content = hook_file.read()
83
83
84 expected_env = '#!{}'.format(executable)
84 expected_env = '#!{}'.format(executable)
85 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.get_version())
85 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.get_version())
86 assert content.strip().startswith(expected_env)
86 assert content.strip().startswith(expected_env)
87 assert expected_rc_version in content
87 assert expected_rc_version in content
88
88
89 def _create_fake_hook(self, file_path, content):
89 def _create_fake_hook(self, file_path, content):
90 with open(file_path, 'w') as hook_file:
90 with open(file_path, 'w') as hook_file:
91 hook_file.write(content)
91 hook_file.write(content)
92
92
93 def create_dummy_repo(self, repo_type):
93 def create_dummy_repo(self, repo_type):
94 tmpdir = tempfile.mkdtemp()
94 tmpdir = tempfile.mkdtemp()
95 repo = AttributeDict()
95 repo = AttributeDict()
96 if repo_type == 'git':
96 if repo_type == 'git':
97 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
97 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
98 os.makedirs(repo.path)
98 os.makedirs(repo.path)
99 os.makedirs(os.path.join(repo.path, 'hooks'))
99 os.makedirs(os.path.join(repo.path, 'hooks'))
100 repo.bare = True
100 repo.bare = True
101
101
102 elif repo_type == 'svn':
102 elif repo_type == 'svn':
103 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
103 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
104 os.makedirs(repo.path)
104 os.makedirs(repo.path)
105 os.makedirs(os.path.join(repo.path, 'hooks'))
105 os.makedirs(os.path.join(repo.path, 'hooks'))
106
106
107 return repo
107 return repo
108
108
109 def check_hooks(self, repo_path, repo_bare=True):
109 def check_hooks(self, repo_path, repo_bare=True):
110 for file_name in self.HOOK_FILES:
110 for file_name in self.HOOK_FILES:
111 if repo_bare:
111 if repo_bare:
112 file_path = os.path.join(repo_path, 'hooks', file_name)
112 file_path = os.path.join(repo_path, 'hooks', file_name)
113 else:
113 else:
114 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
114 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
115
115
116 self._check_hook_file_dir_mode(file_path)
116 self._check_hook_file_dir_mode(file_path)
117 self._check_hook_file_mode(file_path)
117 self._check_hook_file_mode(file_path)
118 self._check_hook_file_content(file_path, sys.executable)
118 self._check_hook_file_content(file_path, sys.executable)
119
119
120
120
121 class TestInstallGitHooks(BaseInstallHooks):
121 class TestInstallGitHooks(BaseInstallHooks):
122 HOOK_FILES = ('pre-receive', 'post-receive')
122 HOOK_FILES = ('pre-receive', 'post-receive')
123
123
124 def test_hooks_are_installed(self):
124 def test_hooks_are_installed(self):
125 repo = self.create_dummy_repo('git')
125 repo = self.create_dummy_repo('git')
126 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 result = hook_utils.install_git_hooks(repo.path, repo.bare)
127 assert result
127 assert result
128 self.check_hooks(repo.path, repo.bare)
128 self.check_hooks(repo.path, repo.bare)
129
129
130 def test_hooks_are_replaced(self):
130 def test_hooks_are_replaced(self):
131 repo = self.create_dummy_repo('git')
131 repo = self.create_dummy_repo('git')
132 hooks_path = os.path.join(repo.path, 'hooks')
132 hooks_path = os.path.join(repo.path, 'hooks')
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
134 self._create_fake_hook(
135 file_path, content="RC_HOOK_VER = 'abcde'\n")
135 file_path, content="RC_HOOK_VER = 'abcde'\n")
136
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
138 assert result
139 self.check_hooks(repo.path, repo.bare)
139 self.check_hooks(repo.path, repo.bare)
140
140
141 def test_non_rc_hooks_are_not_replaced(self):
141 def test_non_rc_hooks_are_not_replaced(self):
142 repo = self.create_dummy_repo('git')
142 repo = self.create_dummy_repo('git')
143 hooks_path = os.path.join(repo.path, 'hooks')
143 hooks_path = os.path.join(repo.path, 'hooks')
144 non_rc_content = 'echo "non rc hook"\n'
144 non_rc_content = 'echo "non rc hook"\n'
145 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
145 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
146 self._create_fake_hook(
146 self._create_fake_hook(
147 file_path, content=non_rc_content)
147 file_path, content=non_rc_content)
148
148
149 result = hook_utils.install_git_hooks(repo.path, repo.bare)
149 result = hook_utils.install_git_hooks(repo.path, repo.bare)
150 assert result
150 assert result
151
151
152 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
152 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
153 with open(file_path, 'rt') as hook_file:
153 with open(file_path, 'rt') as hook_file:
154 content = hook_file.read()
154 content = hook_file.read()
155 assert content == non_rc_content
155 assert content == non_rc_content
156
156
157 def test_non_rc_hooks_are_replaced_with_force_flag(self):
157 def test_non_rc_hooks_are_replaced_with_force_flag(self):
158 repo = self.create_dummy_repo('git')
158 repo = self.create_dummy_repo('git')
159 hooks_path = os.path.join(repo.path, 'hooks')
159 hooks_path = os.path.join(repo.path, 'hooks')
160 non_rc_content = 'echo "non rc hook"\n'
160 non_rc_content = 'echo "non rc hook"\n'
161 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
161 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
162 self._create_fake_hook(
162 self._create_fake_hook(
163 file_path, content=non_rc_content)
163 file_path, content=non_rc_content)
164
164
165 result = hook_utils.install_git_hooks(
165 result = hook_utils.install_git_hooks(
166 repo.path, repo.bare, force_create=True)
166 repo.path, repo.bare, force_create=True)
167 assert result
167 assert result
168 self.check_hooks(repo.path, repo.bare)
168 self.check_hooks(repo.path, repo.bare)
169
169
170
170
171 class TestInstallSvnHooks(BaseInstallHooks):
171 class TestInstallSvnHooks(BaseInstallHooks):
172 HOOK_FILES = ('pre-commit', 'post-commit')
172 HOOK_FILES = ('pre-commit', 'post-commit')
173
173
174 def test_hooks_are_installed(self):
174 def test_hooks_are_installed(self):
175 repo = self.create_dummy_repo('svn')
175 repo = self.create_dummy_repo('svn')
176 result = hook_utils.install_svn_hooks(repo.path)
176 result = hook_utils.install_svn_hooks(repo.path)
177 assert result
177 assert result
178 self.check_hooks(repo.path)
178 self.check_hooks(repo.path)
179
179
180 def test_hooks_are_replaced(self):
180 def test_hooks_are_replaced(self):
181 repo = self.create_dummy_repo('svn')
181 repo = self.create_dummy_repo('svn')
182 hooks_path = os.path.join(repo.path, 'hooks')
182 hooks_path = os.path.join(repo.path, 'hooks')
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
184 self._create_fake_hook(
185 file_path, content="RC_HOOK_VER = 'abcde'\n")
185 file_path, content="RC_HOOK_VER = 'abcde'\n")
186
186
187 result = hook_utils.install_svn_hooks(repo.path)
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
188 assert result
189 self.check_hooks(repo.path)
189 self.check_hooks(repo.path)
190
190
191 def test_non_rc_hooks_are_not_replaced(self):
191 def test_non_rc_hooks_are_not_replaced(self):
192 repo = self.create_dummy_repo('svn')
192 repo = self.create_dummy_repo('svn')
193 hooks_path = os.path.join(repo.path, 'hooks')
193 hooks_path = os.path.join(repo.path, 'hooks')
194 non_rc_content = 'echo "non rc hook"\n'
194 non_rc_content = 'echo "non rc hook"\n'
195 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
195 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
196 self._create_fake_hook(
196 self._create_fake_hook(
197 file_path, content=non_rc_content)
197 file_path, content=non_rc_content)
198
198
199 result = hook_utils.install_svn_hooks(repo.path)
199 result = hook_utils.install_svn_hooks(repo.path)
200 assert result
200 assert result
201
201
202 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
202 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
203 with open(file_path, 'rt') as hook_file:
203 with open(file_path, 'rt') as hook_file:
204 content = hook_file.read()
204 content = hook_file.read()
205 assert content == non_rc_content
205 assert content == non_rc_content
206
206
207 def test_non_rc_hooks_are_replaced_with_force_flag(self):
207 def test_non_rc_hooks_are_replaced_with_force_flag(self):
208 repo = self.create_dummy_repo('svn')
208 repo = self.create_dummy_repo('svn')
209 hooks_path = os.path.join(repo.path, 'hooks')
209 hooks_path = os.path.join(repo.path, 'hooks')
210 non_rc_content = 'echo "non rc hook"\n'
210 non_rc_content = 'echo "non rc hook"\n'
211 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
211 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
212 self._create_fake_hook(
212 self._create_fake_hook(
213 file_path, content=non_rc_content)
213 file_path, content=non_rc_content)
214
214
215 result = hook_utils.install_svn_hooks(
215 result = hook_utils.install_svn_hooks(
216 repo.path, force_create=True)
216 repo.path, force_create=True)
217 assert result
217 assert result
218 self.check_hooks(repo.path, )
218 self.check_hooks(repo.path, )
219
219
220
220
221 def create_test_file(filename):
221 def create_test_file(filename):
222 """Utility function to create a test file."""
222 """Utility function to create a test file."""
223 with open(filename, 'w') as f:
223 with open(filename, 'w') as f:
224 f.write("Test file")
224 f.write("Test file")
225
225
226
226
227 def remove_test_file(filename):
227 def remove_test_file(filename):
228 """Utility function to remove a test file."""
228 """Utility function to remove a test file."""
229 if os.path.exists(filename):
229 if os.path.exists(filename):
230 os.remove(filename)
230 os.remove(filename)
231
231
232
232
233 @pytest.fixture
233 @pytest.fixture
234 def test_file():
234 def test_file():
235 filename = 'test_file.txt'
235 filename = 'test_file.txt'
236 create_test_file(filename)
236 create_test_file(filename)
237 yield filename
237 yield filename
238 remove_test_file(filename)
238 remove_test_file(filename)
239
239
240
240
241 def test_increase_permissions(test_file):
241 def test_increase_permissions(test_file):
242 # Set initial lower permissions
242 # Set initial lower permissions
243 initial_perms = 0o644
243 initial_perms = 0o644
244 os.chmod(test_file, initial_perms)
244 os.chmod(test_file, initial_perms)
245
245
246 # Set higher permissions
246 # Set higher permissions
247 new_perms = 0o666
247 new_perms = 0o666
248 set_permissions_if_needed(test_file, new_perms)
248 set_permissions_if_needed(test_file, new_perms)
249
249
250 # Check if permissions were updated
250 # Check if permissions were updated
251 assert (os.stat(test_file).st_mode & 0o777) == new_perms
251 assert (os.stat(test_file).st_mode & 0o777) == new_perms
252
252
253
253
254 def test_no_permission_change_needed(test_file):
254 def test_no_permission_change_needed(test_file):
255 # Set initial permissions
255 # Set initial permissions
256 initial_perms = 0o666
256 initial_perms = 0o666
257 os.chmod(test_file, initial_perms)
257 os.chmod(test_file, initial_perms)
258
258
259 # Attempt to set the same permissions
259 # Attempt to set the same permissions
260 set_permissions_if_needed(test_file, initial_perms)
260 set_permissions_if_needed(test_file, initial_perms)
261
261
262 # Check if permissions were unchanged
262 # Check if permissions were unchanged
263 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
263 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
264
264
265
265
266 def test_no_permission_reduction(test_file):
266 def test_no_permission_reduction(test_file):
267 # Set initial higher permissions
267 # Set initial higher permissions
268 initial_perms = 0o666
268 initial_perms = 0o666
269 os.chmod(test_file, initial_perms)
269 os.chmod(test_file, initial_perms)
270
270
271 # Attempt to set lower permissions
271 # Attempt to set lower permissions
272 lower_perms = 0o644
272 lower_perms = 0o644
273 set_permissions_if_needed(test_file, lower_perms)
273 set_permissions_if_needed(test_file, lower_perms)
274
274
275 # Check if permissions were not reduced
275 # Check if permissions were not reduced
276 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
276 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
277
277
278
278
279 def test_no_permission_reduction_when_on_777(test_file):
279 def test_no_permission_reduction_when_on_777(test_file):
280 # Set initial higher permissions
280 # Set initial higher permissions
281 initial_perms = 0o777
281 initial_perms = 0o777
282 os.chmod(test_file, initial_perms)
282 os.chmod(test_file, initial_perms)
283
283
284 # Attempt to set lower permissions
284 # Attempt to set lower permissions
285 lower_perms = 0o755
285 lower_perms = 0o755
286 set_permissions_if_needed(test_file, lower_perms)
286 set_permissions_if_needed(test_file, lower_perms)
287
287
288 # Check if permissions were not reduced
288 # Check if permissions were not reduced
289 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
289 assert (os.stat(test_file).st_mode & 0o777) == initial_perms
@@ -1,56 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import http_main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 http_main.main({'__file__': ''})
28 http_main.main({'__file__': ''})
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 http_main.main({'__file__': ''})
38 http_main.main({'__file__': ''})
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
46 ('', ''),
46 ('', ''),
47 (None, None),
47 (None, None),
48 ('foo=bar', 'foo=bar'),
48 ('foo=bar', 'foo=bar'),
49 ('auth_token=secret', 'auth_token=*****'),
49 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret&api_key=secret2',
50 ('auth_token=secret&api_key=secret2',
51 'auth_token=*****&api_key=*****'),
51 'auth_token=*****&api_key=*****'),
52 ('auth_token=secret&api_key=secret2&param=value',
52 ('auth_token=secret&api_key=secret2&param=value',
53 'auth_token=*****&api_key=*****&param=value'),
53 'auth_token=*****&api_key=*****&param=value'),
54 ])
54 ])
55 def test_obfuscate_qs(given, expected):
55 def test_obfuscate_qs(given, expected):
56 assert expected == obfuscate_qs(given)
56 assert expected == obfuscate_qs(given)
@@ -1,295 +1,295 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import more_itertools
19 import more_itertools
20
20
21 import dulwich.protocol
21 import dulwich.protocol
22 import mock
22 import mock
23 import pytest
23 import pytest
24 import webob
24 import webob
25 import webtest
25 import webtest
26
26
27 from vcsserver import hooks, pygrack
27 from vcsserver import hooks, pygrack
28
28
29 from vcsserver.lib.str_utils import ascii_bytes
29 from vcsserver.lib.str_utils import ascii_bytes
30
30
31
31
32 @pytest.fixture()
32 @pytest.fixture()
33 def pygrack_instance(tmpdir):
33 def pygrack_instance(tmpdir):
34 """
34 """
35 Creates a pygrack app instance.
35 Creates a pygrack app instance.
36
36
37 Right now, it does not much helpful regarding the passed directory.
37 Right now, it does not much helpful regarding the passed directory.
38 It just contains the required folders to pass the signature test.
38 It just contains the required folders to pass the signature test.
39 """
39 """
40 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
41 tmpdir.mkdir(dir_name)
41 tmpdir.mkdir(dir_name)
42
42
43 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
44
44
45
45
46 @pytest.fixture()
46 @pytest.fixture()
47 def pygrack_app(pygrack_instance):
47 def pygrack_app(pygrack_instance):
48 """
48 """
49 Creates a pygrack app wrapped in webtest.TestApp.
49 Creates a pygrack app wrapped in webtest.TestApp.
50 """
50 """
51 return webtest.TestApp(pygrack_instance)
51 return webtest.TestApp(pygrack_instance)
52
52
53
53
54 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 def test_invalid_service_info_refs_returns_403(pygrack_app):
55 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 response = pygrack_app.get('/info/refs?service=git-upload-packs',
56 expect_errors=True)
56 expect_errors=True)
57
57
58 assert response.status_int == 403
58 assert response.status_int == 403
59
59
60
60
61 def test_invalid_endpoint_returns_403(pygrack_app):
61 def test_invalid_endpoint_returns_403(pygrack_app):
62 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
63
63
64 assert response.status_int == 403
64 assert response.status_int == 403
65
65
66
66
67 @pytest.mark.parametrize('sideband', [
67 @pytest.mark.parametrize('sideband', [
68 'side-band-64k',
68 'side-band-64k',
69 'side-band',
69 'side-band',
70 'side-band no-progress',
70 'side-band no-progress',
71 ])
71 ])
72 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
73 request = ''.join([
73 request = ''.join([
74 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
75 f'multi_ack {sideband} ofs-delta\n',
75 f'multi_ack {sideband} ofs-delta\n',
76 '0000',
76 '0000',
77 '0009done\n',
77 '0009done\n',
78 ])
78 ])
79 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
79 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
83
83
84 data = io.BytesIO(response.body)
84 data = io.BytesIO(response.body)
85 proto = dulwich.protocol.Protocol(data.read, None)
85 proto = dulwich.protocol.Protocol(data.read, None)
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
89 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
90 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
90 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
94
94
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 request = ''.join([
96 request = ''.join([
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 'multi_ack ofs-delta\n'
98 'multi_ack ofs-delta\n'
99 '0000',
99 '0000',
100 '0009done\n',
100 '0009done\n',
101 ])
101 ])
102 with mock.patch('vcsserver.hooks.git_pre_pull',
102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 return_value=hooks.HookResponse(1, 'foo')):
103 return_value=hooks.HookResponse(1, 'foo')):
104 response = pygrack_app.post(
104 response = pygrack_app.post(
105 '/git-upload-pack', params=request,
105 '/git-upload-pack', params=request,
106 content_type='application/x-git-upload-pack')
106 content_type='application/x-git-upload-pack')
107
107
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109
109
110
110
111 def test_pull_has_hook_messages(pygrack_app):
111 def test_pull_has_hook_messages(pygrack_app):
112 request = ''.join([
112 request = ''.join([
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 'multi_ack side-band-64k ofs-delta\n'
114 'multi_ack side-band-64k ofs-delta\n'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118
118
119 pre_pull = 'pre_pull_output'
119 pre_pull = 'pre_pull_output'
120 post_pull = 'post_pull_output'
120 post_pull = 'post_pull_output'
121
121
122 with mock.patch('vcsserver.hooks.git_pre_pull',
122 with mock.patch('vcsserver.hooks.git_pre_pull',
123 return_value=hooks.HookResponse(0, pre_pull)):
123 return_value=hooks.HookResponse(0, pre_pull)):
124 with mock.patch('vcsserver.hooks.git_post_pull',
124 with mock.patch('vcsserver.hooks.git_post_pull',
125 return_value=hooks.HookResponse(1, post_pull)):
125 return_value=hooks.HookResponse(1, post_pull)):
126 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
126 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
127 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
127 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
128 response = pygrack_app.post(
128 response = pygrack_app.post(
129 '/git-upload-pack', params=request,
129 '/git-upload-pack', params=request,
130 content_type='application/x-git-upload-pack')
130 content_type='application/x-git-upload-pack')
131
131
132 data = io.BytesIO(response.body)
132 data = io.BytesIO(response.body)
133 proto = dulwich.protocol.Protocol(data.read, None)
133 proto = dulwich.protocol.Protocol(data.read, None)
134 packets = list(proto.read_pkt_seq())
134 packets = list(proto.read_pkt_seq())
135
135
136 assert packets == [b'NAK\n',
136 assert packets == [b'NAK\n',
137 # pre-pull only outputs if IT FAILS as in != 0 ret code
137 # pre-pull only outputs if IT FAILS as in != 0 ret code
138 #b'\x02pre_pull_output',
138 #b'\x02pre_pull_output',
139 b'subp\n',
139 b'subp\n',
140 b'\x02post_pull_output']
140 b'\x02post_pull_output']
141
141
142
142
143 def test_get_want_capabilities(pygrack_instance):
143 def test_get_want_capabilities(pygrack_instance):
144 data = io.BytesIO(
144 data = io.BytesIO(
145 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
145 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
146 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
146 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
147
147
148 request = webob.Request({
148 request = webob.Request({
149 'wsgi.input': data,
149 'wsgi.input': data,
150 'REQUEST_METHOD': 'POST',
150 'REQUEST_METHOD': 'POST',
151 'webob.is_body_seekable': True
151 'webob.is_body_seekable': True
152 })
152 })
153
153
154 capabilities = pygrack_instance._get_want_capabilities(request)
154 capabilities = pygrack_instance._get_want_capabilities(request)
155
155
156 assert capabilities == frozenset(
156 assert capabilities == frozenset(
157 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
157 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
158 assert data.tell() == 0
158 assert data.tell() == 0
159
159
160
160
161 @pytest.mark.parametrize('data,capabilities,expected', [
161 @pytest.mark.parametrize('data,capabilities,expected', [
162 ('foo', [], []),
162 ('foo', [], []),
163 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
163 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
164 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
164 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
165 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
165 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
166 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
166 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
167 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
167 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
168 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
168 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
169 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
169 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
170 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
170 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
171 ], ids=[
171 ], ids=[
172 'foo-empty',
172 'foo-empty',
173 'empty-64k', 'empty',
173 'empty-64k', 'empty',
174 'foo-64k', 'foo',
174 'foo-64k', 'foo',
175 'f-1000-64k', 'f-1000',
175 'f-1000-64k', 'f-1000',
176 'f-65520-64k', 'f-65520'])
176 'f-65520-64k', 'f-65520'])
177 def test_get_messages(pygrack_instance, data, capabilities, expected):
177 def test_get_messages(pygrack_instance, data, capabilities, expected):
178 messages = pygrack_instance._get_messages(data, capabilities)
178 messages = pygrack_instance._get_messages(data, capabilities)
179
179
180 assert messages == expected
180 assert messages == expected
181
181
182
182
183 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
183 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
184 # Unexpected response
184 # Unexpected response
185 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
185 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
186 # No sideband
186 # No sideband
187 ([b'no-sideband'], [], 'foo', 'bar'),
187 ([b'no-sideband'], [], 'foo', 'bar'),
188 # No messages
188 # No messages
189 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
189 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
190 ])
190 ])
191 def test_inject_messages_to_response_nothing_to_do(
191 def test_inject_messages_to_response_nothing_to_do(
192 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
192 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
193
193
194 new_response = pygrack_instance._build_post_pull_response(
194 new_response = pygrack_instance._build_post_pull_response(
195 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
195 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
196
196
197 assert list(new_response) == response
197 assert list(new_response) == response
198
198
199
199
200 @pytest.mark.parametrize('capabilities', [
200 @pytest.mark.parametrize('capabilities', [
201 [pygrack.CAPABILITY_SIDE_BAND],
201 [pygrack.CAPABILITY_SIDE_BAND],
202 [pygrack.CAPABILITY_SIDE_BAND_64K],
202 [pygrack.CAPABILITY_SIDE_BAND_64K],
203 ])
203 ])
204 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
204 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
205 response = [b'0008NAK\n0009subp\n0000']
205 response = [b'0008NAK\n0009subp\n0000']
206 new_response = pygrack_instance._build_post_pull_response(
206 new_response = pygrack_instance._build_post_pull_response(
207 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
207 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
208
208
209 expected_response = b''.join([
209 expected_response = b''.join([
210 b'0008NAK\n',
210 b'0008NAK\n',
211 b'0008\x02foo',
211 b'0008\x02foo',
212 b'0009subp\n',
212 b'0009subp\n',
213 b'0008\x02bar',
213 b'0008\x02bar',
214 b'0000'])
214 b'0000'])
215
215
216 assert b''.join(new_response) == expected_response
216 assert b''.join(new_response) == expected_response
217
217
218
218
219 @pytest.mark.parametrize('capabilities', [
219 @pytest.mark.parametrize('capabilities', [
220 [pygrack.CAPABILITY_SIDE_BAND],
220 [pygrack.CAPABILITY_SIDE_BAND],
221 [pygrack.CAPABILITY_SIDE_BAND_64K],
221 [pygrack.CAPABILITY_SIDE_BAND_64K],
222 ])
222 ])
223 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
223 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
224 response = more_itertools.always_iterable([
224 response = more_itertools.always_iterable([
225 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
225 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
226 ])
226 ])
227 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
227 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
228
228
229 expected_response = b''.join([
229 expected_response = b''.join([
230 b'0008NAK\n',
230 b'0008NAK\n',
231 b'0008\x02foo',
231 b'0008\x02foo',
232 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
232 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
233 b'0008\x02bar',
233 b'0008\x02bar',
234 b'0000'
234 b'0000'
235 ])
235 ])
236
236
237 assert b''.join(new_response) == expected_response
237 assert b''.join(new_response) == expected_response
238
238
239
239
240 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
240 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
241 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
241 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
242
242
243 assert response == [pygrack.GitRepository.EMPTY_PACK]
243 assert response == [pygrack.GitRepository.EMPTY_PACK]
244
244
245
245
246 @pytest.mark.parametrize('capabilities', [
246 @pytest.mark.parametrize('capabilities', [
247 [pygrack.CAPABILITY_SIDE_BAND],
247 [pygrack.CAPABILITY_SIDE_BAND],
248 [pygrack.CAPABILITY_SIDE_BAND_64K],
248 [pygrack.CAPABILITY_SIDE_BAND_64K],
249 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
249 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
250 ])
250 ])
251 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
251 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
252 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
252 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
253
253
254 expected_response = [
254 expected_response = [
255 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
255 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
256 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
256 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
257 pygrack.GitRepository.FLUSH_PACKET,
257 pygrack.GitRepository.FLUSH_PACKET,
258 ]
258 ]
259
259
260 assert response == expected_response
260 assert response == expected_response
261
261
262
262
263 def test_inject_messages_to_response_generator(pygrack_instance):
263 def test_inject_messages_to_response_generator(pygrack_instance):
264
264
265 def response_generator():
265 def response_generator():
266 response = [
266 response = [
267 # protocol start
267 # protocol start
268 b'0008NAK\n',
268 b'0008NAK\n',
269 ]
269 ]
270 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
270 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
271 response += [
271 response += [
272 # protocol end
272 # protocol end
273 pygrack.GitRepository.FLUSH_PACKET
273 pygrack.GitRepository.FLUSH_PACKET
274 ]
274 ]
275 for elem in response:
275 for elem in response:
276 yield elem
276 yield elem
277
277
278 new_response = pygrack_instance._build_post_pull_response(
278 new_response = pygrack_instance._build_post_pull_response(
279 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
279 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
280
280
281 assert iter(new_response)
281 assert iter(new_response)
282
282
283 expected_response = b''.join([
283 expected_response = b''.join([
284 # start
284 # start
285 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
285 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
286 ] + [
286 ] + [
287 # ... rest
287 # ... rest
288 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
288 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
289 ] + [
289 ] + [
290 # final message,
290 # final message,
291 b'0013\x02POST_PULL_MSG\n0000',
291 b'0013\x02POST_PULL_MSG\n0000',
292
292
293 ])
293 ])
294
294
295 assert b''.join(new_response) == expected_response
295 assert b''.join(new_response) == expected_response
@@ -1,87 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mercurial.hg
20 import mercurial.hg
21 import mercurial.ui
21 import mercurial.ui
22 import mercurial.error
22 import mercurial.error
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28 from vcsserver.lib.str_utils import ascii_bytes
28 from vcsserver.lib.str_utils import ascii_bytes
29
29
30
30
31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
33 app = webtest.TestApp(scm_app.HgWeb(repo))
33 app = webtest.TestApp(scm_app.HgWeb(repo))
34
34
35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
36
36
37 assert response.status_int == 400
37 assert response.status_int == 400
38
38
39
39
40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
42 config = (
42 config = (
43 ('paths', 'default', ''),
43 ('paths', 'default', ''),
44 )
44 )
45 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
46 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 hgweb_mock.side_effect = mercurial.error.RequirementError()
47 with pytest.raises(Exception):
47 with pytest.raises(Exception):
48 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
49
49
50
50
51 def test_git_returns_not_found(tmpdir):
51 def test_git_returns_not_found(tmpdir):
52 app = webtest.TestApp(
52 app = webtest.TestApp(
53 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
54
54
55 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 response = app.get('/repo_name/inforefs?service=git-upload-pack',
56 expect_errors=True)
56 expect_errors=True)
57
57
58 assert response.status_int == 404
58 assert response.status_int == 404
59
59
60
60
61 def test_git(tmpdir):
61 def test_git(tmpdir):
62 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
63 tmpdir.mkdir(dir_name)
63 tmpdir.mkdir(dir_name)
64
64
65 app = webtest.TestApp(
65 app = webtest.TestApp(
66 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
67
67
68 # We set service to git-upload-packs to trigger a 403
68 # We set service to git-upload-packs to trigger a 403
69 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 response = app.get('/repo_name/inforefs?service=git-upload-packs',
70 expect_errors=True)
70 expect_errors=True)
71
71
72 assert response.status_int == 403
72 assert response.status_int == 403
73
73
74
74
75 def test_git_fallbacks_to_git_folder(tmpdir):
75 def test_git_fallbacks_to_git_folder(tmpdir):
76 tmpdir.mkdir('.git')
76 tmpdir.mkdir('.git')
77 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
78 tmpdir.mkdir(os.path.join('.git', dir_name))
78 tmpdir.mkdir(os.path.join('.git', dir_name))
79
79
80 app = webtest.TestApp(
80 app = webtest.TestApp(
81 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
82
82
83 # We set service to git-upload-packs to trigger a 403
83 # We set service to git-upload-packs to trigger a 403
84 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 response = app.get('/repo_name/inforefs?service=git-upload-packs',
85 expect_errors=True)
85 expect_errors=True)
86
86
87 assert response.status_int == 403
87 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from vcsserver.server import VcsServer
23 from vcsserver.server import VcsServer
24
24
25
25
26 def test_provides_the_pid(server):
26 def test_provides_the_pid(server):
27 pid = server.get_pid()
27 pid = server.get_pid()
28 assert pid == os.getpid()
28 assert pid == os.getpid()
29
29
30
30
31 def test_allows_to_trigger_the_garbage_collector(server):
31 def test_allows_to_trigger_the_garbage_collector(server):
32 with mock.patch('gc.collect') as collect:
32 with mock.patch('gc.collect') as collect:
33 server.run_gc()
33 server.run_gc()
34 assert collect.called
34 assert collect.called
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def server():
38 def server():
39 return VcsServer()
39 return VcsServer()
@@ -1,155 +1,155 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25 from vcsserver.lib.str_utils import ascii_bytes
25 from vcsserver.lib.str_utils import ascii_bytes
26
26
27
27
28 class FileLikeObj: # pragma: no cover
28 class FileLikeObj: # pragma: no cover
29
29
30 def __init__(self, data: bytes, size):
30 def __init__(self, data: bytes, size):
31 chunks = size // len(data)
31 chunks = size // len(data)
32
32
33 self.stream = self._get_stream(data, chunks)
33 self.stream = self._get_stream(data, chunks)
34
34
35 def _get_stream(self, data, chunks):
35 def _get_stream(self, data, chunks):
36 for x in range(chunks):
36 for x in range(chunks):
37 yield data
37 yield data
38
38
39 def read(self, n):
39 def read(self, n):
40
40
41 buffer_stream = b''
41 buffer_stream = b''
42 for chunk in self.stream:
42 for chunk in self.stream:
43 buffer_stream += chunk
43 buffer_stream += chunk
44 if len(buffer_stream) >= n:
44 if len(buffer_stream) >= n:
45 break
45 break
46
46
47 # self.stream = self.bytes[n:]
47 # self.stream = self.bytes[n:]
48 return buffer_stream
48 return buffer_stream
49
49
50
50
51 @pytest.fixture(scope='module')
51 @pytest.fixture(scope='module')
52 def environ():
52 def environ():
53 """Delete coverage variables, as they make the tests fail."""
53 """Delete coverage variables, as they make the tests fail."""
54 env = dict(os.environ)
54 env = dict(os.environ)
55 for key in list(env.keys()):
55 for key in list(env.keys()):
56 if key.startswith('COV_CORE_'):
56 if key.startswith('COV_CORE_'):
57 del env[key]
57 del env[key]
58
58
59 return env
59 return env
60
60
61
61
62 def _get_python_args(script):
62 def _get_python_args(script):
63 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
64
64
65
65
66 def test_raise_exception_on_non_zero_return_code(environ):
66 def test_raise_exception_on_non_zero_return_code(environ):
67 call_args = _get_python_args('raise ValueError("fail")')
67 call_args = _get_python_args('raise ValueError("fail")')
68 with pytest.raises(OSError):
68 with pytest.raises(OSError):
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
70
70
71
71
72 def test_does_not_fail_on_non_zero_return_code(environ):
72 def test_does_not_fail_on_non_zero_return_code(environ):
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
75 output = b''.join(proc)
75 output = b''.join(proc)
76
76
77 assert output == b'hello'
77 assert output == b'hello'
78
78
79
79
80 def test_raise_exception_on_stderr(environ):
80 def test_raise_exception_on_stderr(environ):
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
82
82
83 with pytest.raises(OSError) as excinfo:
83 with pytest.raises(OSError) as excinfo:
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
85
85
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
87
87
88
88
89 def test_does_not_fail_on_stderr(environ):
89 def test_does_not_fail_on_stderr(environ):
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
92 output = b''.join(proc)
92 output = b''.join(proc)
93
93
94 assert output == b''
94 assert output == b''
95
95
96
96
97 @pytest.mark.parametrize('size', [
97 @pytest.mark.parametrize('size', [
98 1,
98 1,
99 10 ** 5
99 10 ** 5
100 ])
100 ])
101 def test_output_with_no_input(size, environ):
101 def test_output_with_no_input(size, environ):
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
104 output = b''.join(proc)
104 output = b''.join(proc)
105
105
106 assert output == ascii_bytes("X" * size)
106 assert output == ascii_bytes("X" * size)
107
107
108
108
109 @pytest.mark.parametrize('size', [
109 @pytest.mark.parametrize('size', [
110 1,
110 1,
111 10 ** 5
111 10 ** 5
112 ])
112 ])
113 def test_output_with_no_input_does_not_fail(size, environ):
113 def test_output_with_no_input_does_not_fail(size, environ):
114
114
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
117 output = b''.join(proc)
117 output = b''.join(proc)
118
118
119 assert output == ascii_bytes("X" * size)
119 assert output == ascii_bytes("X" * size)
120
120
121
121
122 @pytest.mark.parametrize('size', [
122 @pytest.mark.parametrize('size', [
123 1,
123 1,
124 10 ** 5
124 10 ** 5
125 ])
125 ])
126 def test_output_with_input(size, environ):
126 def test_output_with_input(size, environ):
127 data_len = size
127 data_len = size
128 inputstream = FileLikeObj(b'X', size)
128 inputstream = FileLikeObj(b'X', size)
129
129
130 # This acts like the cat command.
130 # This acts like the cat command.
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
133 output = b''.join(
133 output = b''.join(
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
135 )
135 )
136
136
137 assert len(output) == data_len
137 assert len(output) == data_len
138
138
139
139
140 @pytest.mark.parametrize('size', [
140 @pytest.mark.parametrize('size', [
141 1,
141 1,
142 10 ** 5
142 10 ** 5
143 ])
143 ])
144 def test_output_with_input_skipping_iterator(size, environ):
144 def test_output_with_input_skipping_iterator(size, environ):
145 data_len = size
145 data_len = size
146 inputstream = FileLikeObj(b'X', size)
146 inputstream = FileLikeObj(b'X', size)
147
147
148 # This acts like the cat command.
148 # This acts like the cat command.
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
150
150
151 # Note: assigning the chunker makes sure that it is not deleted too early
151 # Note: assigning the chunker makes sure that it is not deleted too early
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
153 output = b''.join(proc.stdout)
153 output = b''.join(proc.stdout)
154
154
155 assert len(output) == data_len
155 assert len(output) == data_len
@@ -1,103 +1,103 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23 from vcsserver.lib.str_utils import ascii_bytes
23 from vcsserver.lib.str_utils import ascii_bytes
24
24
25
25
26 class MockPopen:
26 class MockPopen:
27 def __init__(self, stderr):
27 def __init__(self, stderr):
28 self.stdout = io.BytesIO(b'')
28 self.stdout = io.BytesIO(b'')
29 self.stderr = io.BytesIO(stderr)
29 self.stderr = io.BytesIO(stderr)
30 self.returncode = 1
30 self.returncode = 1
31
31
32 def wait(self):
32 def wait(self):
33 pass
33 pass
34
34
35
35
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
39 ])
39 ])
40
40
41
41
42 @pytest.mark.parametrize('stderr,expected_reason', [
42 @pytest.mark.parametrize('stderr,expected_reason', [
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
46 @pytest.mark.xfail(sys.platform == "cygwin",
46 @pytest.mark.xfail(sys.platform == "cygwin",
47 reason="SVN not packaged for Cygwin")
47 reason="SVN not packaged for Cygwin")
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
49 from vcsserver.remote import svn_remote
49 from vcsserver.remote import svn_remote
50 factory = mock.Mock()
50 factory = mock.Mock()
51 factory.repo = mock.Mock(return_value=mock.Mock())
51 factory.repo = mock.Mock(return_value=mock.Mock())
52
52
53 remote = svn_remote.SvnRemote(factory)
53 remote = svn_remote.SvnRemote(factory)
54 remote.is_path_valid_repository = lambda wire, path: True
54 remote.is_path_valid_repository = lambda wire, path: True
55
55
56 with mock.patch('subprocess.Popen',
56 with mock.patch('subprocess.Popen',
57 return_value=MockPopen(ascii_bytes(stderr))):
57 return_value=MockPopen(ascii_bytes(stderr))):
58 with pytest.raises(Exception) as excinfo:
58 with pytest.raises(Exception) as excinfo:
59 remote.import_remote_repository({'path': 'path'}, 'url')
59 remote.import_remote_repository({'path': 'path'}, 'url')
60
60
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
62
62
63 assert excinfo.value.args[0] == expected_error_args
63 assert excinfo.value.args[0] == expected_error_args
64
64
65
65
66 def test_svn_libraries_can_be_imported():
66 def test_svn_libraries_can_be_imported():
67 import svn.client # noqa
67 import svn.client # noqa
68 assert svn.client is not None
68 assert svn.client is not None
69
69
70
70
71 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
72 ('http://server.com', ('', '', 'http://server.com')),
72 ('http://server.com', ('', '', 'http://server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 ('<script>', ('', '', '<script>')),
75 ('<script>', ('', '', '<script>')),
76 ('http://', ('', '', 'http://')),
76 ('http://', ('', '', 'http://')),
77 ])
77 ])
78 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver.remote import svn_remote
79 from vcsserver.remote import svn_remote
80
80
81 factory = mock.Mock()
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
83
84 remote = svn_remote.SvnRemote(factory)
84 remote = svn_remote.SvnRemote(factory)
85 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
86
86
87 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
88
88
89
89
90 @pytest.mark.parametrize('call_url', [
90 @pytest.mark.parametrize('call_url', [
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 ])
94 ])
95 def test_check_url(call_url):
95 def test_check_url(call_url):
96 from vcsserver.remote import svn_remote
96 from vcsserver.remote import svn_remote
97 factory = mock.Mock()
97 factory = mock.Mock()
98 factory.repo = mock.Mock(return_value=mock.Mock())
98 factory.repo = mock.Mock(return_value=mock.Mock())
99
99
100 remote = svn_remote.SvnRemote(factory)
100 remote = svn_remote.SvnRemote(factory)
101 remote.is_path_valid_repository = lambda wire, path: True
101 remote.is_path_valid_repository = lambda wire, path: True
102 assert remote.check_url(call_url, {'dummy': 'config'})
102 assert remote.check_url(call_url, {'dummy': 'config'})
103
103
@@ -1,69 +1,69 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pytest
18 import pytest
19 from vcsserver.lib.str_utils import ascii_bytes, ascii_str, convert_to_str
19 from vcsserver.lib.str_utils import ascii_bytes, ascii_str, convert_to_str
20
20
21
21
22 @pytest.mark.parametrize('given, expected', [
22 @pytest.mark.parametrize('given, expected', [
23 ('a', b'a'),
23 ('a', b'a'),
24 ('a', b'a'),
24 ('a', b'a'),
25 ])
25 ])
26 def test_ascii_bytes(given, expected):
26 def test_ascii_bytes(given, expected):
27 assert ascii_bytes(given) == expected
27 assert ascii_bytes(given) == expected
28
28
29
29
30 @pytest.mark.parametrize('given', [
30 @pytest.mark.parametrize('given', [
31 'Ã¥',
31 'Ã¥',
32 'Ã¥'.encode('utf8')
32 'Ã¥'.encode('utf8')
33 ])
33 ])
34 def test_ascii_bytes_raises(given):
34 def test_ascii_bytes_raises(given):
35 with pytest.raises(ValueError):
35 with pytest.raises(ValueError):
36 ascii_bytes(given)
36 ascii_bytes(given)
37
37
38
38
39 @pytest.mark.parametrize('given, expected', [
39 @pytest.mark.parametrize('given, expected', [
40 (b'a', 'a'),
40 (b'a', 'a'),
41 ])
41 ])
42 def test_ascii_str(given, expected):
42 def test_ascii_str(given, expected):
43 assert ascii_str(given) == expected
43 assert ascii_str(given) == expected
44
44
45
45
46 @pytest.mark.parametrize('given', [
46 @pytest.mark.parametrize('given', [
47 'a',
47 'a',
48 'Ã¥'.encode('utf8'),
48 'Ã¥'.encode('utf8'),
49 'Ã¥'
49 'Ã¥'
50 ])
50 ])
51 def test_ascii_str_raises(given):
51 def test_ascii_str_raises(given):
52 with pytest.raises(ValueError):
52 with pytest.raises(ValueError):
53 ascii_str(given)
53 ascii_str(given)
54
54
55
55
56 @pytest.mark.parametrize('given, expected', [
56 @pytest.mark.parametrize('given, expected', [
57 ('a', 'a'),
57 ('a', 'a'),
58 (b'a', 'a'),
58 (b'a', 'a'),
59 # tuple
59 # tuple
60 (('a', b'b', b'c'), ('a', 'b', 'c')),
60 (('a', b'b', b'c'), ('a', 'b', 'c')),
61 # nested tuple
61 # nested tuple
62 (('a', b'b', (b'd', b'e')), ('a', 'b', ('d', 'e'))),
62 (('a', b'b', (b'd', b'e')), ('a', 'b', ('d', 'e'))),
63 # list
63 # list
64 (['a', b'b', b'c'], ['a', 'b', 'c']),
64 (['a', b'b', b'c'], ['a', 'b', 'c']),
65 # mixed
65 # mixed
66 (['a', b'b', b'c', (b'b1', b'b2')], ['a', 'b', 'c', ('b1', 'b2')])
66 (['a', b'b', b'c', (b'b1', b'b2')], ['a', 'b', 'c', ('b1', 'b2')])
67 ])
67 ])
68 def test_convert_to_str(given, expected):
68 def test_convert_to_str(given, expected):
69 assert convert_to_str(given) == expected
69 assert convert_to_str(given) == expected
@@ -1,98 +1,98 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import wsgiref.simple_server
18 import wsgiref.simple_server
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22 from vcsserver.lib.str_utils import ascii_bytes, safe_str
22 from vcsserver.lib.str_utils import ascii_bytes, safe_str
23
23
24
24
25 @wsgiref.validate.validator
25 @wsgiref.validate.validator
26 def demo_app(environ, start_response):
26 def demo_app(environ, start_response):
27 """WSGI app used for testing."""
27 """WSGI app used for testing."""
28
28
29 input_data = safe_str(environ['wsgi.input'].read(1024))
29 input_data = safe_str(environ['wsgi.input'].read(1024))
30
30
31 data = [
31 data = [
32 'Hello World!\n',
32 'Hello World!\n',
33 f'input_data={input_data}\n',
33 f'input_data={input_data}\n',
34 ]
34 ]
35 for key, value in sorted(environ.items()):
35 for key, value in sorted(environ.items()):
36 data.append(f'{key}={value}\n')
36 data.append(f'{key}={value}\n')
37
37
38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
39 write(b'Old school write method\n')
39 write(b'Old school write method\n')
40 write(b'***********************\n')
40 write(b'***********************\n')
41 return list(map(ascii_bytes, data))
41 return list(map(ascii_bytes, data))
42
42
43
43
44 BASE_ENVIRON = {
44 BASE_ENVIRON = {
45 'REQUEST_METHOD': 'GET',
45 'REQUEST_METHOD': 'GET',
46 'SERVER_NAME': 'localhost',
46 'SERVER_NAME': 'localhost',
47 'SERVER_PORT': '80',
47 'SERVER_PORT': '80',
48 'SCRIPT_NAME': '',
48 'SCRIPT_NAME': '',
49 'PATH_INFO': '/',
49 'PATH_INFO': '/',
50 'QUERY_STRING': '',
50 'QUERY_STRING': '',
51 'foo.var': 'bla',
51 'foo.var': 'bla',
52 }
52 }
53
53
54
54
55 def test_complete_environ():
55 def test_complete_environ():
56 environ = dict(BASE_ENVIRON)
56 environ = dict(BASE_ENVIRON)
57 data = b"data"
57 data = b"data"
58 wsgi_app_caller._complete_environ(environ, data)
58 wsgi_app_caller._complete_environ(environ, data)
59 wsgiref.validate.check_environ(environ)
59 wsgiref.validate.check_environ(environ)
60
60
61 assert data == environ['wsgi.input'].read(1024)
61 assert data == environ['wsgi.input'].read(1024)
62
62
63
63
64 def test_start_response():
64 def test_start_response():
65 start_response = wsgi_app_caller._StartResponse()
65 start_response = wsgi_app_caller._StartResponse()
66 status = '200 OK'
66 status = '200 OK'
67 headers = [('Content-Type', 'text/plain')]
67 headers = [('Content-Type', 'text/plain')]
68 start_response(status, headers)
68 start_response(status, headers)
69
69
70 assert status == start_response.status
70 assert status == start_response.status
71 assert headers == start_response.headers
71 assert headers == start_response.headers
72
72
73
73
74 def test_start_response_with_error():
74 def test_start_response_with_error():
75 start_response = wsgi_app_caller._StartResponse()
75 start_response = wsgi_app_caller._StartResponse()
76 status = '500 Internal Server Error'
76 status = '500 Internal Server Error'
77 headers = [('Content-Type', 'text/plain')]
77 headers = [('Content-Type', 'text/plain')]
78 start_response(status, headers, (None, None, None))
78 start_response(status, headers, (None, None, None))
79
79
80 assert status == start_response.status
80 assert status == start_response.status
81 assert headers == start_response.headers
81 assert headers == start_response.headers
82
82
83
83
84 def test_wsgi_app_caller():
84 def test_wsgi_app_caller():
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87
87
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
89 responses, status, headers = caller.handle(environ, input_data)
89 responses, status, headers = caller.handle(environ, input_data)
90 response = b''.join(responses)
90 response = b''.join(responses)
91
91
92 assert status == '200 OK'
92 assert status == '200 OK'
93 assert headers == [('Content-Type', 'text/plain')]
93 assert headers == [('Content-Type', 'text/plain')]
94 assert response.startswith(b'Old school write method\n***********************\n')
94 assert response.startswith(b'Old school write method\n***********************\n')
95 assert b'Hello World!\n' in response
95 assert b'Hello World!\n' in response
96 assert b'foo.var=bla\n' in response
96 assert b'foo.var=bla\n' in response
97
97
98 assert ascii_bytes(f'input_data={input_data}\n') in response
98 assert ascii_bytes(f'input_data={input_data}\n') in response
@@ -1,17 +1,17 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -1,123 +1,123 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import base64
17 import base64
18 import logging
18 import logging
19 import time
19 import time
20
20
21 import msgpack
21 import msgpack
22
22
23 import vcsserver
23 import vcsserver
24 from vcsserver.lib.str_utils import safe_str
24 from vcsserver.lib.str_utils import safe_str
25
25
26 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
27
27
28
28
29 def get_access_path(environ):
29 def get_access_path(environ):
30 path = environ.get('PATH_INFO')
30 path = environ.get('PATH_INFO')
31 return path
31 return path
32
32
33
33
34 def get_user_agent(environ):
34 def get_user_agent(environ):
35 return environ.get('HTTP_USER_AGENT')
35 return environ.get('HTTP_USER_AGENT')
36
36
37
37
38 def get_call_context(request) -> dict:
38 def get_call_context(request) -> dict:
39 cc = {}
39 cc = {}
40 registry = request.registry
40 registry = request.registry
41 if hasattr(registry, 'vcs_call_context'):
41 if hasattr(registry, 'vcs_call_context'):
42 cc.update({
42 cc.update({
43 'X-RC-Method': registry.vcs_call_context.get('method'),
43 'X-RC-Method': registry.vcs_call_context.get('method'),
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
45 })
45 })
46
46
47 return cc
47 return cc
48
48
49
49
50 def get_headers_call_context(environ, strict=True):
50 def get_headers_call_context(environ, strict=True):
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
53 return msgpack.unpackb(packed_cc)
53 return msgpack.unpackb(packed_cc)
54 elif strict:
54 elif strict:
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
56
56
57
57
58 class RequestWrapperTween:
58 class RequestWrapperTween:
59 def __init__(self, handler, registry):
59 def __init__(self, handler, registry):
60 self.handler = handler
60 self.handler = handler
61 self.registry = registry
61 self.registry = registry
62
62
63 # one-time configuration code goes here
63 # one-time configuration code goes here
64
64
65 def __call__(self, request):
65 def __call__(self, request):
66 start = time.time()
66 start = time.time()
67 log.debug('Starting request processing')
67 log.debug('Starting request processing')
68 response = None
68 response = None
69
69
70 try:
70 try:
71 response = self.handler(request)
71 response = self.handler(request)
72 finally:
72 finally:
73 ua = get_user_agent(request.environ)
73 ua = get_user_agent(request.environ)
74 call_context = get_call_context(request)
74 call_context = get_call_context(request)
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
77
77
78 count = request.request_count()
78 count = request.request_count()
79 _ver_ = vcsserver.get_version()
79 _ver_ = vcsserver.get_version()
80 _path = safe_str(get_access_path(request.environ))
80 _path = safe_str(get_access_path(request.environ))
81
81
82 ip = '127.0.0.1'
82 ip = '127.0.0.1'
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
85
85
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
87
87
88 total = time.time() - start
88 total = time.time() - start
89
89
90 log.info(
90 log.info(
91 'Finished request processing: reqq[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
91 'Finished request processing: reqq[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
92 count, ip, request.environ.get('REQUEST_METHOD'),
92 count, ip, request.environ.get('REQUEST_METHOD'),
93 _view_path, total, ua, _ver_,
93 _view_path, total, ua, _ver_,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
95 "path": _path, "view_name": match_route, "user_agent": ua,
95 "path": _path, "view_name": match_route, "user_agent": ua,
96 "vcs_method": vcs_method, "repo_name": repo_name}
96 "vcs_method": vcs_method, "repo_name": repo_name}
97 )
97 )
98
98
99 statsd = request.registry.statsd
99 statsd = request.registry.statsd
100 if statsd:
100 if statsd:
101 match_route = request.matched_route.name if request.matched_route else _path
101 match_route = request.matched_route.name if request.matched_route else _path
102 elapsed_time_ms = round(1000.0 * total) # use ms only
102 elapsed_time_ms = round(1000.0 * total) # use ms only
103 statsd.timing(
103 statsd.timing(
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
105 tags=[
105 tags=[
106 f"view_name:{match_route}",
106 f"view_name:{match_route}",
107 f"code:{resp_code}"
107 f"code:{resp_code}"
108 ],
108 ],
109 use_decimals=False
109 use_decimals=False
110 )
110 )
111 statsd.incr(
111 statsd.incr(
112 "vcsserver_req_total", tags=[
112 "vcsserver_req_total", tags=[
113 f"view_name:{match_route}",
113 f"view_name:{match_route}",
114 f"code:{resp_code}"
114 f"code:{resp_code}"
115 ])
115 ])
116
116
117 return response
117 return response
118
118
119
119
120 def includeme(config):
120 def includeme(config):
121 config.add_tween(
121 config.add_tween(
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
123 )
123 )
@@ -1,53 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18 import hashlib
19
19
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22
22
23 class AttributeDictBase(dict):
23 class AttributeDictBase(dict):
24 def __getstate__(self):
24 def __getstate__(self):
25 odict = self.__dict__ # get attribute dictionary
25 odict = self.__dict__ # get attribute dictionary
26 return odict
26 return odict
27
27
28 def __setstate__(self, dict):
28 def __setstate__(self, dict):
29 self.__dict__ = dict
29 self.__dict__ = dict
30
30
31 __setattr__ = dict.__setitem__
31 __setattr__ = dict.__setitem__
32 __delattr__ = dict.__delitem__
32 __delattr__ = dict.__delitem__
33
33
34
34
35 class StrictAttributeDict(AttributeDictBase):
35 class StrictAttributeDict(AttributeDictBase):
36 """
36 """
37 Strict Version of Attribute dict which raises an Attribute error when
37 Strict Version of Attribute dict which raises an Attribute error when
38 requested attribute is not set
38 requested attribute is not set
39 """
39 """
40 def __getattr__(self, attr):
40 def __getattr__(self, attr):
41 try:
41 try:
42 return self[attr]
42 return self[attr]
43 except KeyError:
43 except KeyError:
44 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
44 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
45
45
46
46
47 class AttributeDict(AttributeDictBase):
47 class AttributeDict(AttributeDictBase):
48 def __getattr__(self, attr):
48 def __getattr__(self, attr):
49 return self.get(attr, None)
49 return self.get(attr, None)
50
50
51
51
52 def sha1(val):
52 def sha1(val):
53 return hashlib.sha1(val).hexdigest()
53 return hashlib.sha1(val).hexdigest()
@@ -1,46 +1,46 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18 from vcsserver.lib import rc_cache
19
19
20
20
21 class RemoteBase:
21 class RemoteBase:
22 EMPTY_COMMIT = '0' * 40
22 EMPTY_COMMIT = '0' * 40
23
23
24 def _region(self, wire):
24 def _region(self, wire):
25 cache_repo_id = wire.get('cache_repo_id', '')
25 cache_repo_id = wire.get('cache_repo_id', '')
26 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
26 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
28
28
29 def _cache_on(self, wire):
29 def _cache_on(self, wire):
30 context = wire.get('context', '')
30 context = wire.get('context', '')
31 context_uid = f'{context}'
31 context_uid = f'{context}'
32 repo_id = wire.get('repo_id', '')
32 repo_id = wire.get('repo_id', '')
33 cache = wire.get('cache', True)
33 cache = wire.get('cache', True)
34 cache_on = context and cache
34 cache_on = context and cache
35 return cache_on, context_uid, repo_id
35 return cache_on, context_uid, repo_id
36
36
37 def vcsserver_invalidate_cache(self, wire, delete):
37 def vcsserver_invalidate_cache(self, wire, delete):
38 cache_repo_id = wire.get('cache_repo_id', '')
38 cache_repo_id = wire.get('cache_repo_id', '')
39 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
39 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
40
40
41 if delete:
41 if delete:
42 rc_cache.clear_cache_namespace(
42 rc_cache.clear_cache_namespace(
43 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
43 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
44
44
45 repo_id = wire.get('repo_id', '')
45 repo_id = wire.get('repo_id', '')
46 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
46 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
@@ -1,116 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2024 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Extract the responses of a WSGI app."""
18 """Extract the responses of a WSGI app."""
19
19
20 __all__ = ('WSGIAppCaller',)
20 __all__ = ('WSGIAppCaller',)
21
21
22 import io
22 import io
23 import logging
23 import logging
24 import os
24 import os
25
25
26 from vcsserver.lib.str_utils import ascii_bytes
26 from vcsserver.lib.str_utils import ascii_bytes
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30 DEV_NULL = open(os.devnull)
30 DEV_NULL = open(os.devnull)
31
31
32
32
33 def _complete_environ(environ, input_data: bytes):
33 def _complete_environ(environ, input_data: bytes):
34 """Update the missing wsgi.* variables of a WSGI environment.
34 """Update the missing wsgi.* variables of a WSGI environment.
35
35
36 :param environ: WSGI environment to update
36 :param environ: WSGI environment to update
37 :type environ: dict
37 :type environ: dict
38 :param input_data: data to be read by the app
38 :param input_data: data to be read by the app
39 :type input_data: bytes
39 :type input_data: bytes
40 """
40 """
41 environ.update({
41 environ.update({
42 'wsgi.version': (1, 0),
42 'wsgi.version': (1, 0),
43 'wsgi.url_scheme': 'http',
43 'wsgi.url_scheme': 'http',
44 'wsgi.multithread': True,
44 'wsgi.multithread': True,
45 'wsgi.multiprocess': True,
45 'wsgi.multiprocess': True,
46 'wsgi.run_once': False,
46 'wsgi.run_once': False,
47 'wsgi.input': io.BytesIO(input_data),
47 'wsgi.input': io.BytesIO(input_data),
48 'wsgi.errors': DEV_NULL,
48 'wsgi.errors': DEV_NULL,
49 })
49 })
50
50
51
51
52 # pylint: disable=too-few-public-methods
52 # pylint: disable=too-few-public-methods
53 class _StartResponse:
53 class _StartResponse:
54 """Save the arguments of a start_response call."""
54 """Save the arguments of a start_response call."""
55
55
56 __slots__ = ['status', 'headers', 'content']
56 __slots__ = ['status', 'headers', 'content']
57
57
58 def __init__(self):
58 def __init__(self):
59 self.status = None
59 self.status = None
60 self.headers = None
60 self.headers = None
61 self.content = []
61 self.content = []
62
62
63 def __call__(self, status, headers, exc_info=None):
63 def __call__(self, status, headers, exc_info=None):
64 # TODO(skreft): do something meaningful with the exc_info
64 # TODO(skreft): do something meaningful with the exc_info
65 exc_info = None # avoid dangling circular reference
65 exc_info = None # avoid dangling circular reference
66 self.status = status
66 self.status = status
67 self.headers = headers
67 self.headers = headers
68
68
69 return self.write
69 return self.write
70
70
71 def write(self, content):
71 def write(self, content):
72 """Write method returning when calling this object.
72 """Write method returning when calling this object.
73
73
74 All the data written is then available in content.
74 All the data written is then available in content.
75 """
75 """
76 self.content.append(content)
76 self.content.append(content)
77
77
78
78
79 class WSGIAppCaller:
79 class WSGIAppCaller:
80 """Calls a WSGI app."""
80 """Calls a WSGI app."""
81
81
82 def __init__(self, app):
82 def __init__(self, app):
83 """
83 """
84 :param app: WSGI app to call
84 :param app: WSGI app to call
85 """
85 """
86 self.app = app
86 self.app = app
87
87
88 def handle(self, environ, input_data):
88 def handle(self, environ, input_data):
89 """Process a request with the WSGI app.
89 """Process a request with the WSGI app.
90
90
91 The returned data of the app is fully consumed into a list.
91 The returned data of the app is fully consumed into a list.
92
92
93 :param environ: WSGI environment to update
93 :param environ: WSGI environment to update
94 :type environ: dict
94 :type environ: dict
95 :param input_data: data to be read by the app
95 :param input_data: data to be read by the app
96 :type input_data: str/bytes
96 :type input_data: str/bytes
97
97
98 :returns: a tuple with the contents, status and headers
98 :returns: a tuple with the contents, status and headers
99 :rtype: (list<str>, str, list<(str, str)>)
99 :rtype: (list<str>, str, list<(str, str)>)
100 """
100 """
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
102 start_response = _StartResponse()
102 start_response = _StartResponse()
103 log.debug("Calling wrapped WSGI application")
103 log.debug("Calling wrapped WSGI application")
104 responses = self.app(environ, start_response)
104 responses = self.app(environ, start_response)
105 responses_list = list(responses)
105 responses_list = list(responses)
106 existing_responses = start_response.content
106 existing_responses = start_response.content
107 if existing_responses:
107 if existing_responses:
108 log.debug("Adding returned response to response written via write()")
108 log.debug("Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
112 log.debug("Closing iterator from WSGI application")
112 log.debug("Closing iterator from WSGI application")
113 responses.close()
113 responses.close()
114
114
115 log.debug("Handling of WSGI request done, returning response")
115 log.debug("Handling of WSGI request done, returning response")
116 return responses_list, start_response.status, start_response.headers
116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now