##// END OF EJS Templates
merged python3 into default
super-admin -
r1131:20e2294a merge default
parent child Browse files
Show More
@@ -0,0 +1,54 b''
1 [build-system]
2 requires = ["setuptools>=61.0.0", "wheel"]
3 build-backend = "setuptools.build_meta"
4
5 [project]
6 name = "rhodecode-vcsserver"
7 description = "Version Control System Server for RhodeCode"
8 authors = [
9 {name = "RhodeCode GmbH", email = "support@rhodecode.com"},
10 ]
11
12 license = {text = "GPL V3"}
13 requires-python = ">=3.10"
14 dynamic = ["version", "readme", "dependencies", "optional-dependencies"]
15 classifiers = [
16 'Development Status :: 6 - Mature',
17 'Intended Audience :: Developers',
18 'Operating System :: OS Independent',
19 'Topic :: Software Development :: Version Control',
20 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
21 'Programming Language :: Python :: 3.10',
22 ]
23
24 [project.entry-points."paste.app_factory"]
25 main = "vcsserver.http_main:main"
26
27
28 [tool.setuptools]
29 packages = ["vcsserver"]
30
31 [tool.setuptools.dynamic]
32 readme = {file = ["README.rst"], content-type = "text/rst"}
33 version = {file = "vcsserver/VERSION"}
34 dependencies = {file = ["requirements.txt"]}
35 optional-dependencies.tests = {file = ["requirements_test.txt"]}
36
37 [tool.ruff]
38 select = [
39 # Pyflakes
40 "F",
41 # Pycodestyle
42 "E",
43 "W",
44 # isort
45 "I001"
46 ]
47 ignore = [
48 "E501", # line too long, handled by black
49 ]
50 # Same as Black.
51 line-length = 120
52
53 [tool.ruff.isort]
54 known-first-party = ["vcsserver"]
@@ -0,0 +1,27 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 HOOK_REPO_SIZE = 'changegroup.repo_size'
20
21 # HG
22 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
23 HOOK_PULL = 'outgoing.pull_logger'
24 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
25 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
26 HOOK_PUSH = 'changegroup.push_logger'
27 HOOK_PUSH_KEY = 'pushkey.key_push'
@@ -0,0 +1,53 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import sys
20 import logging
21
22
23 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(30, 38))
24
25 # Sequences
26 RESET_SEQ = "\033[0m"
27 COLOR_SEQ = "\033[0;%dm"
28 BOLD_SEQ = "\033[1m"
29
30 COLORS = {
31 'CRITICAL': MAGENTA,
32 'ERROR': RED,
33 'WARNING': CYAN,
34 'INFO': GREEN,
35 'DEBUG': BLUE,
36 'SQL': YELLOW
37 }
38
39
40 class ColorFormatter(logging.Formatter):
41
42 def format(self, record):
43 """
44 Change record's levelname to use with COLORS enum
45 """
46 def_record = super().format(record)
47
48 levelname = record.levelname
49 start = COLOR_SEQ % (COLORS[levelname])
50 end = RESET_SEQ
51
52 colored_record = ''.join([start, def_record, end])
53 return colored_record
@@ -0,0 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 import os
20 import diskcache
21 from diskcache import RLock
22
23 log = logging.getLogger(__name__)
24
25 cache_meta = None
26
27
28 class ReentrantLock(RLock):
29 def __enter__(self):
30 reentrant_lock_key = self._key
31
32 log.debug('Acquire ReentrantLock(key=%s) for archive cache generation...', reentrant_lock_key)
33 #self.acquire()
34 log.debug('Lock for key=%s acquired', reentrant_lock_key)
35
36 def __exit__(self, *exc_info):
37 #self.release()
38 pass
39
40
41 def get_archival_config(config):
42
43 final_config = {
44 'archive_cache.eviction_policy': 'least-frequently-used'
45 }
46
47 for k, v in config.items():
48 if k.startswith('archive_cache'):
49 final_config[k] = v
50
51 return final_config
52
53
54 def get_archival_cache_store(config):
55
56 global cache_meta
57 if cache_meta is not None:
58 return cache_meta
59
60 config = get_archival_config(config)
61
62 archive_cache_dir = config['archive_cache.store_dir']
63 archive_cache_size_gb = config['archive_cache.cache_size_gb']
64 archive_cache_shards = config['archive_cache.cache_shards']
65 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
66
67 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
68
69 # check if it's ok to write, and re-create the archive cache
70 if not os.path.isdir(archive_cache_dir):
71 os.makedirs(archive_cache_dir, exist_ok=True)
72
73 d_cache = diskcache.FanoutCache(
74 archive_cache_dir, shards=archive_cache_shards,
75 cull_limit=0, # manual eviction required
76 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
77 eviction_policy=archive_cache_eviction_policy,
78 timeout=30
79 )
80 cache_meta = d_cache
81 return cache_meta
82
83
84 def includeme(config):
85 # init our cache at start, for vcsserver we don't init at runtime
86 # because our cache config is sent via wire on make archive call, this call just lazy-enables the client
87 return
@@ -0,0 +1,2 b''
1 # use orjson by default
2 import orjson as json
@@ -0,0 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 from svn import client
22 from svn import core
23 from svn import ra
24
25 from mercurial import error
26
27 from vcsserver.str_utils import safe_bytes
28
29 core.svn_config_ensure(None)
30 svn_config = core.svn_config_get_config(None)
31
32
33 class RaCallbacks(ra.Callbacks):
34 @staticmethod
35 def open_tmp_file(pool): # pragma: no cover
36 (fd, fn) = tempfile.mkstemp()
37 os.close(fd)
38 return fn
39
40 @staticmethod
41 def get_client_string(pool):
42 return b'RhodeCode-subversion-url-checker'
43
44
45 class SubversionException(Exception):
46 pass
47
48
49 class SubversionConnectionException(SubversionException):
50 """Exception raised when a generic error occurs when connecting to a repository."""
51
52
53 def normalize_url(url):
54 if not url:
55 return url
56 if url.startswith(b'svn+http://') or url.startswith(b'svn+https://'):
57 url = url[4:]
58 url = url.rstrip(b'/')
59 return url
60
61
62 def _create_auth_baton(pool):
63 """Create a Subversion authentication baton. """
64 # Give the client context baton a suite of authentication
65 # providers.h
66 platform_specific = [
67 'svn_auth_get_gnome_keyring_simple_provider',
68 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
69 'svn_auth_get_keychain_simple_provider',
70 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
71 'svn_auth_get_kwallet_simple_provider',
72 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
73 'svn_auth_get_ssl_client_cert_file_provider',
74 'svn_auth_get_windows_simple_provider',
75 'svn_auth_get_windows_ssl_server_trust_provider',
76 ]
77
78 providers = []
79
80 for p in platform_specific:
81 if getattr(core, p, None) is not None:
82 try:
83 providers.append(getattr(core, p)())
84 except RuntimeError:
85 pass
86
87 providers += [
88 client.get_simple_provider(),
89 client.get_username_provider(),
90 client.get_ssl_client_cert_file_provider(),
91 client.get_ssl_client_cert_pw_file_provider(),
92 client.get_ssl_server_trust_file_provider(),
93 ]
94
95 return core.svn_auth_open(providers, pool)
96
97
98 class SubversionRepo(object):
99 """Wrapper for a Subversion repository.
100
101 It uses the SWIG Python bindings, see above for requirements.
102 """
103 def __init__(self, svn_url: bytes = b'', username: bytes = b'', password: bytes = b''):
104
105 self.username = username
106 self.password = password
107 self.svn_url = core.svn_path_canonicalize(svn_url)
108
109 self.auth_baton_pool = core.Pool()
110 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
111 # self.init_ra_and_client() assumes that a pool already exists
112 self.pool = core.Pool()
113
114 self.ra = self.init_ra_and_client()
115 self.uuid = ra.get_uuid(self.ra, self.pool)
116
117 def init_ra_and_client(self):
118 """Initializes the RA and client layers, because sometimes getting
119 unified diffs runs the remote server out of open files.
120 """
121
122 if self.username:
123 core.svn_auth_set_parameter(self.auth_baton,
124 core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
125 self.username)
126 if self.password:
127 core.svn_auth_set_parameter(self.auth_baton,
128 core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
129 self.password)
130
131 callbacks = RaCallbacks()
132 callbacks.auth_baton = self.auth_baton
133
134 try:
135 return ra.open2(self.svn_url, callbacks, svn_config, self.pool)
136 except SubversionException as e:
137 # e.child contains a detailed error messages
138 msglist = []
139 svn_exc = e
140 while svn_exc:
141 if svn_exc.args[0]:
142 msglist.append(svn_exc.args[0])
143 svn_exc = svn_exc.child
144 msg = '\n'.join(msglist)
145 raise SubversionConnectionException(msg)
146
147
148 class svnremoterepo(object):
149 """ the dumb wrapper for actual Subversion repositories """
150
151 def __init__(self, username: bytes = b'', password: bytes = b'', svn_url: bytes = b''):
152 self.username = username or b''
153 self.password = password or b''
154 self.path = normalize_url(svn_url)
155
156 def svn(self):
157 try:
158 return SubversionRepo(self.path, self.username, self.password)
159 except SubversionConnectionException as e:
160 raise error.Abort(safe_bytes(e))
@@ -0,0 +1,17 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
@@ -0,0 +1,133 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import typing
19 import base64
20 import logging
21
22
23 log = logging.getLogger(__name__)
24
25
26 def safe_int(val, default=None) -> int:
27 """
28 Returns int() of val if val is not convertable to int use default
29 instead
30
31 :param val:
32 :param default:
33 """
34
35 try:
36 val = int(val)
37 except (ValueError, TypeError):
38 val = default
39
40 return val
41
42
43 def base64_to_str(text) -> str:
44 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
45
46
47 def get_default_encodings() -> list[str]:
48 return ['utf8']
49
50
51 def safe_str(str_, to_encoding=None) -> str:
52 """
53 safe str function. Does few trick to turn unicode_ into string
54
55 :param str_: str to encode
56 :param to_encoding: encode to this type UTF8 default
57 """
58 if isinstance(str_, str):
59 return str_
60
61 # if it's bytes cast to str
62 if not isinstance(str_, bytes):
63 return str(str_)
64
65 to_encoding = to_encoding or get_default_encodings()
66 if not isinstance(to_encoding, (list, tuple)):
67 to_encoding = [to_encoding]
68
69 for enc in to_encoding:
70 try:
71 return str(str_, enc)
72 except UnicodeDecodeError:
73 pass
74
75 return str(str_, to_encoding[0], 'replace')
76
77
78 def safe_bytes(str_, from_encoding=None) -> bytes:
79 """
80 safe bytes function. Does few trick to turn str_ into bytes string:
81
82 :param str_: string to decode
83 :param from_encoding: encode from this type UTF8 default
84 """
85 if isinstance(str_, bytes):
86 return str_
87
88 if not isinstance(str_, str):
89 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
90
91 from_encoding = from_encoding or get_default_encodings()
92 if not isinstance(from_encoding, (list, tuple)):
93 from_encoding = [from_encoding]
94
95 for enc in from_encoding:
96 try:
97 return str_.encode(enc)
98 except UnicodeDecodeError:
99 pass
100
101 return str_.encode(from_encoding[0], 'replace')
102
103
104 def ascii_bytes(str_, allow_bytes=False) -> bytes:
105 """
106 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
107 Fails with UnicodeError on invalid input.
108 This should be used where encoding and "safe" ambiguity should be avoided.
109 Where strings already have been encoded in other ways but still are unicode
110 string - for example to hex, base64, json, urlencoding, or are known to be
111 identifiers.
112 """
113 if allow_bytes and isinstance(str_, bytes):
114 return str_
115
116 if not isinstance(str_, str):
117 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
118 return str_.encode('ascii')
119
120
121 def ascii_str(str_) -> str:
122 """
123 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
124 Fails with UnicodeError on invalid input.
125 This should be used where encoding and "safe" ambiguity should be avoided.
126 Where strings are encoded but also in other ways are known to be ASCII, and
127 where a unicode string is wanted without caring about encoding. For example
128 to hex, base64, urlencoding, or are known to be identifiers.
129 """
130
131 if not isinstance(str_, bytes):
132 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
133 return str_.decode('ascii')
@@ -0,0 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import pytest
19 from vcsserver.str_utils import ascii_bytes, ascii_str
20
21
22 @pytest.mark.parametrize('given, expected', [
23 ('a', b'a'),
24 ('a', b'a'),
25 ])
26 def test_ascii_bytes(given, expected):
27 assert ascii_bytes(given) == expected
28
29
30 @pytest.mark.parametrize('given', [
31 'å',
32 'å'.encode('utf8')
33 ])
34 def test_ascii_bytes_raises(given):
35 with pytest.raises(ValueError):
36 ascii_bytes(given)
37
38
39 @pytest.mark.parametrize('given, expected', [
40 (b'a', 'a'),
41 ])
42 def test_ascii_str(given, expected):
43 assert ascii_str(given) == expected
44
45
46 @pytest.mark.parametrize('given', [
47 'a',
48 'å'.encode('utf8'),
49 'å'
50 ])
51 def test_ascii_str_raises(given):
52 with pytest.raises(ValueError):
53 ascii_str(given)
@@ -0,0 +1,67 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 import logging
20
21 log = logging.getLogger(__name__)
22
23
24 def str2bool(str_):
25 """
26 returns True/False value from given string, it tries to translate the
27 string into boolean
28
29 :param str_: string value to translate into boolean
30 :rtype: boolean
31 :returns: boolean from given string
32 """
33 if str_ is None:
34 return False
35 if str_ in (True, False):
36 return str_
37 str_ = str(str_).strip().lower()
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
39
40
41 def aslist(obj, sep=None, strip=True) -> list:
42 """
43 Returns given string separated by sep as list
44
45 :param obj:
46 :param sep:
47 :param strip:
48 """
49 if isinstance(obj, str):
50 if obj in ['', ""]:
51 return []
52
53 lst = obj.split(sep)
54 if strip:
55 lst = [v.strip() for v in lst]
56 return lst
57 elif isinstance(obj, (list, tuple)):
58 return obj
59 elif obj is None:
60 return []
61 else:
62 return [obj]
63
64
65 def assert_bytes(input_type, expected_types=(bytes,)):
66 if not isinstance(input_type, expected_types):
67 raise ValueError(f'input_types should be one of {expected_types} got {type(input_type)} instead')
@@ -1,5 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.28.0
2 current_version = 5.0.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
@@ -1,4 +1,5 b''
1 syntax: glob
1 syntax: glob
2
2 *.orig
3 *.orig
3 *.pyc
4 *.pyc
4 *.swp
5 *.swp
@@ -19,8 +20,11 b' syntax: regexp'
19 ^\.pydevproject$
20 ^\.pydevproject$
20 ^\.coverage$
21 ^\.coverage$
21 ^\.cache.*$
22 ^\.cache.*$
23 ^\.venv.*$
24 ^\.ruff_cache.*$
22 ^\.rhodecode$
25 ^\.rhodecode$
23
26
27
24 ^.dev
28 ^.dev
25 ^build/
29 ^build/
26 ^coverage\.xml$
30 ^coverage\.xml$
@@ -1,45 +1,111 b''
1 .DEFAULT_GOAL := help
1 # required for pushd to work..
2 #SHELL = /bin/bash
3
2
4
3 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
4 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
5
7
6 .PHONY: clean
8 .PHONY: clean
7 clean: ## full clean
9 ## Cleanup compiled and cache py files
10 clean:
8 make test-clean
11 make test-clean
9 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
10
13
11
14
12 .PHONY: test
15 .PHONY: test
13 test: ## run test-clean and tests
16 ## run test-clean and tests
17 test:
14 make test-clean
18 make test-clean
15 make test-only
19 make test-only
16
20
17
21
18 .PHONY:test-clean
22 .PHONY: test-clean
19 test-clean: ## run test-clean and tests
23 ## run test-clean and tests
24 test-clean:
20 rm -rf coverage.xml htmlcov junit.xml pylint.log result
25 rm -rf coverage.xml htmlcov junit.xml pylint.log result
21 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
26 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
22 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
27 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
23
28
24
29
25 .PHONY: test-only
30 .PHONY: test-only
26 test-only: ## run tests
31 ## Run tests only without cleanup
32 test-only:
27 PYTHONHASHSEED=random \
33 PYTHONHASHSEED=random \
28 py.test -x -vv -r xw -p no:sugar \
34 py.test -x -vv -r xw -p no:sugar \
29 --cov=vcsserver --cov-report=term-missing --cov-report=html \
35 --cov-report=term-missing --cov-report=html \
30 vcsserver
36 --cov=vcsserver vcsserver
31
32
33 .PHONY: generate-pkgs
34 generate-pkgs: ## generate new python packages
35 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
36
37
37
38
38 .PHONY: pip-packages
39 .PHONY: pip-packages
39 pip-packages: ## show outdated packages
40 ## Show outdated packages
41 pip-packages:
40 python ${OUTDATED_PACKAGES}
42 python ${OUTDATED_PACKAGES}
41
43
42
44
43 .PHONY: help
45 .PHONY: build
44 help:
46 ## Build sdist/egg
45 @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-24s\033[0m %s\n", $$1, $$2}'
47 build:
48 python -m build
49
50
51 .PHONY: dev-env
52 ## make dev-env based on the requirements files and install develop of packages
53 dev-env:
54 pip install build virtualenv
55 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_test.txt -r requirements_debug.txt
56 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_test.txt -r requirements_debug.txt
57 pip install -e .
58
59
60 .PHONY: dev-srv
61 ## run develop server instance
62 dev-srv:
63 pserve --reload .dev/dev.ini
64
65
66 .PHONY: dev-srv-g
67 ## run gunicorn multi process workers
68 dev-srv-g:
69 gunicorn --workers=4 --paste .dev/dev.ini --bind=0.0.0.0:10010 --worker-class=sync --threads=1 --config=configs/gunicorn_config.py --timeout=120
70
71 # Default command on calling make
72 .DEFAULT_GOAL := show-help
73
74 .PHONY: show-help
75 show-help:
76 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
77 @echo
78 @sed -n -e "/^## / { \
79 h; \
80 s/.*//; \
81 :doc" \
82 -e "H; \
83 n; \
84 s/^## //; \
85 t doc" \
86 -e "s/:.*//; \
87 G; \
88 s/\\n## /---/; \
89 s/\\n/ /g; \
90 p; \
91 }" ${MAKEFILE_LIST} \
92 | LC_ALL='C' sort --ignore-case \
93 | awk -F '---' \
94 -v ncol=$$(tput cols) \
95 -v indent=19 \
96 -v col_on="$$(tput setaf 6)" \
97 -v col_off="$$(tput sgr0)" \
98 '{ \
99 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
100 n = split($$2, words, " "); \
101 line_length = ncol - indent; \
102 for (i = 1; i <= n; i++) { \
103 line_length -= length(words[i]) + 1; \
104 if (line_length <= 0) { \
105 line_length = ncol - indent - length(words[i]) - 1; \
106 printf "\n%*s ", -indent, " "; \
107 } \
108 printf "%s ", words[i]; \
109 } \
110 printf "\n"; \
111 }'
@@ -1,4 +1,4 b''
1 ## -*- coding: utf-8 -*-
1 #
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
@@ -57,12 +57,9 b' def _get_process_rss(pid=None):'
57
57
58
58
59 def _get_config(ini_path):
59 def _get_config(ini_path):
60 import configparser
60
61
61 try:
62 try:
62 import configparser
63 except ImportError:
64 import ConfigParser as configparser
65 try:
66 config = configparser.RawConfigParser()
63 config = configparser.RawConfigParser()
67 config.read(ini_path)
64 config.read(ini_path)
68 return config
65 return config
@@ -336,7 +333,7 b' class RhodeCodeLogger(Logger):'
336 def now(self):
333 def now(self):
337 """ return date in RhodeCode Log format """
334 """ return date in RhodeCode Log format """
338 now = time.time()
335 now = time.time()
339 msecs = int((now - long(now)) * 1000)
336 msecs = int((now - int(now)) * 1000)
340 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
337 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
341
338
342 def atoms(self, resp, req, environ, request_time):
339 def atoms(self, resp, req, environ, request_time):
@@ -1,4 +1,4 b''
1 ## -*- coding: utf-8 -*-
1 #
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -39,7 +39,7 b' def repeat(request):'
39 @pytest.fixture(scope='session')
39 @pytest.fixture(scope='session')
40 def vcsserver_port(request):
40 def vcsserver_port(request):
41 port = get_available_port()
41 port = get_available_port()
42 print('Using vcsserver port %s' % (port, ))
42 print(f'Using vcsserver port {port}')
43 return port
43 return port
44
44
45
45
@@ -1,47 +1,56 b''
1 ## dependencies
1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
2
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5
2
6 dogpile.cache==0.9.0
3 async-timeout==4.0.2
7 decorator==4.1.2
4 atomicwrites==1.4.1
8 dulwich==0.13.0
5 contextlib2==21.6.0
9 hgsubversion==1.9.3
6 cov-core==1.15.0
10 hg-evolve==9.1.0
7 coverage==7.2.3
11 mako==1.1.0
8 diskcache==5.6.1
12 markupsafe==1.1.1
9 dogpile.cache==1.2.2
13 mercurial==5.1.1
10 decorator==5.1.1
14 msgpack-python==0.5.6
11 stevedore==5.0.0
15
12 pbr==5.11.1
16 pastedeploy==2.1.0
13 dulwich==0.21.5
17 pyramid==1.10.4
14 urllib3==1.26.14
18 pyramid-mako==1.1.0
15 gunicorn==21.0.1
19 pygit2==0.28.2
16 packaging==23.1
20
17 hg-evolve==11.0.2
18 importlib-metadata==6.0.0
19 zipp==3.15.0
20 mercurial==6.3.3
21 mock==5.0.2
22 more-itertools==9.1.0
23 msgpack==1.0.5
24 orjson==3.9.2
25 psutil==5.9.5
26 py==1.11.0
27 pygit2==1.12.2
28 cffi==1.15.1
29 pycparser==2.21
30 pygments==2.15.1
31 pyparsing==3.0.9
32 pyramid==2.0.1
33 hupper==1.12
34 plaster==1.1.2
35 plaster-pastedeploy==1.0.1
36 pastedeploy==3.0.1
37 plaster==1.1.2
38 translationstring==1.4
39 venusian==3.0.0
40 webob==1.8.7
41 zope.deprecation==5.0.0
42 zope.interface==6.0.0
43 redis==4.6.0
44 async-timeout==4.0.2
21 repoze.lru==0.7
45 repoze.lru==0.7
22 redis==3.5.3
46 scandir==1.10.0
23 simplejson==3.16.0
47 setproctitle==1.3.2
24 subprocess32==3.5.4
48 subvertpy==0.11.0
25 subvertpy==0.10.1
49 wcwidth==0.2.6
26
50
27 six==1.11.0
28 translationstring==1.3
29 webob==1.8.5
30 zope.deprecation==4.4.0
31 zope.interface==4.6.0
32
33 ## http servers
34 gevent==1.5.0
35 greenlet==0.4.15
36 gunicorn==19.9.0
37 waitress==1.3.1
38
39 ## debug
40 ipdb==0.13.2
41 ipython==5.1.0
42
51
43 ## test related requirements
52 ## test related requirements
44 -r requirements_test.txt
53 #-r requirements_test.txt
45
54
46 ## uncomment to add the debug libraries
55 ## uncomment to add the debug libraries
47 #-r requirements_debug.txt
56 #-r requirements_debug.txt
@@ -1,8 +1,22 b''
1 ## special libraries we could extend the requirements.txt file with to add some
1 ## special libraries we could extend the requirements.txt file with to add some
2 ## custom libraries useful for debug and memory tracing
2 ## custom libraries usefull for debug and memory tracing
3
4 ## uncomment inclusion of this file in requirements.txt run make generate-pkgs and nix-shell
5
3
6 objgraph
4 objgraph
7 memory-profiler
5 memory-profiler
8 pympler
6 pympler
7
8 ## debug
9 ipdb
10 ipython
11 rich
12
13 # format
14 flake8
15 ruff
16
17 pipdeptree==2.7.1
18 invoke==2.0.0
19 bumpversion==0.6.0
20 bump2version==1.0.1
21
22 docutils-stubs
@@ -1,16 +1,45 b''
1 # test related requirements
1 # test related requirements
2 pytest==4.6.9
2
3 py==1.8.1
3 cov-core==1.15.0
4 pytest-cov==2.8.1
4 coverage==7.2.3
5 pytest-sugar==0.9.3
5 mock==5.0.2
6 pytest-runner==5.2.0
6 py==1.11.0
7 pytest-cov==4.0.0
8 coverage==7.2.3
9 pytest==7.3.1
10 attrs==22.2.0
11 iniconfig==2.0.0
12 packaging==23.1
13 pluggy==1.0.0
7 pytest-profiling==1.7.0
14 pytest-profiling==1.7.0
8 pytest-timeout==1.3.3
15 gprof2dot==2022.7.29
9 gprof2dot==2017.9.19
16 pytest==7.3.1
17 attrs==22.2.0
18 iniconfig==2.0.0
19 packaging==23.1
20 pluggy==1.0.0
21 six==1.16.0
22 pytest-runner==6.0.0
23 pytest-sugar==0.9.7
24 packaging==23.1
25 pytest==7.3.1
26 attrs==22.2.0
27 iniconfig==2.0.0
28 packaging==23.1
29 pluggy==1.0.0
30 termcolor==2.3.0
31 pytest-timeout==2.1.0
32 pytest==7.3.1
33 attrs==22.2.0
34 iniconfig==2.0.0
35 packaging==23.1
36 pluggy==1.0.0
37 webtest==3.0.0
38 beautifulsoup4==4.11.2
39 soupsieve==2.4
40 waitress==2.1.2
41 webob==1.8.7
10
42
11 mock==3.0.5
43 # RhodeCode test-data
12 cov-core==1.15.0
44 rc_testdata @ https://code.rhodecode.com/upstream/rc-testdata-dist/raw/77378e9097f700b4c1b9391b56199fe63566b5c9/rc_testdata-0.11.0.tar.gz#egg=rc_testdata
13 coverage==4.5.4
45 rc_testdata==0.11.0
14
15 webtest==2.0.34
16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.28.0 No newline at end of file
1 5.0.0 No newline at end of file
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -18,7 +18,7 b''
18 import pkgutil
18 import pkgutil
19
19
20
20
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip().decode()
22
22
23 # link to config for pyramid
23 # link to config for pyramid
24 CONFIG = {}
24 CONFIG = {}
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -16,13 +16,18 b''
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import tempfile
19 import traceback
20 import traceback
20 import logging
21 import logging
21 import urlparse
22 import urllib.parse
23
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
25 from vcsserver.lib.rc_cache import region_meta
22
26
23 from vcsserver import exceptions
27 from vcsserver import exceptions
24 from vcsserver.exceptions import NoContentException
28 from vcsserver.exceptions import NoContentException
25 from vcsserver.hgcompat import (archival)
29 from vcsserver.hgcompat import archival
30 from vcsserver.str_utils import safe_bytes
26
31
27 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
28
33
@@ -37,7 +42,7 b' class RepoFactory(object):'
37 repo_type = None
42 repo_type = None
38
43
39 def __init__(self):
44 def __init__(self):
40 pass
45 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
41
46
42 def _create_config(self, path, config):
47 def _create_config(self, path, config):
43 config = {}
48 config = {}
@@ -55,30 +60,33 b' def obfuscate_qs(query_string):'
55 return None
60 return None
56
61
57 parsed = []
62 parsed = []
58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
63 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
59 if k in ['auth_token', 'api_key']:
64 if k in ['auth_token', 'api_key']:
60 v = "*****"
65 v = "*****"
61 parsed.append((k, v))
66 parsed.append((k, v))
62
67
63 return '&'.join('{}{}'.format(
68 return '&'.join('{}{}'.format(
64 k, '={}'.format(v) if v else '') for k, v in parsed)
69 k, f'={v}' if v else '') for k, v in parsed)
65
70
66
71
67 def raise_from_original(new_type):
72 def raise_from_original(new_type, org_exc: Exception):
68 """
73 """
69 Raise a new exception type with original args and traceback.
74 Raise a new exception type with original args and traceback.
70 """
75 """
76
71 exc_type, exc_value, exc_traceback = sys.exc_info()
77 exc_type, exc_value, exc_traceback = sys.exc_info()
72 new_exc = new_type(*exc_value.args)
78 new_exc = new_type(*exc_value.args)
79
73 # store the original traceback into the new exc
80 # store the original traceback into the new exc
74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
81 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
75
82
76 try:
83 try:
77 raise new_exc, None, exc_traceback
84 raise new_exc.with_traceback(exc_traceback)
78 finally:
85 finally:
79 del exc_traceback
86 del exc_traceback
80
87
81
88
89
82 class ArchiveNode(object):
90 class ArchiveNode(object):
83 def __init__(self, path, mode, is_link, raw_bytes):
91 def __init__(self, path, mode, is_link, raw_bytes):
84 self.path = path
92 self.path = path
@@ -87,34 +95,58 b' class ArchiveNode(object):'
87 self.raw_bytes = raw_bytes
95 self.raw_bytes = raw_bytes
88
96
89
97
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
98 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
99 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
92 """
100 """
101 Function that would store an generate archive and send it to a dedicated backend store
102 In here we use diskcache
103
104 :param node_walker: a generator returning nodes to add to archive
105 :param archive_key: key used to store the path
106 :param kind: archive kind
107 :param mtime: time of creation
108 :param archive_at_path: default '/' the path at archive was started. if this is not '/' it means it's a partial archive
109 :param archive_dir_name: inside dir name when creating an archive
110 :param commit_id: commit sha of revision archive was created at
111 :param write_metadata:
112 :param extra_metadata:
113 :param cache_config:
114
93 walker should be a file walker, for example:
115 walker should be a file walker, for example:
94 def walker():
116 def node_walker():
95 for file_info in files:
117 for file_info in files:
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
118 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
97 """
119 """
98 extra_metadata = extra_metadata or {}
120 extra_metadata = extra_metadata or {}
99
121
122 d_cache = get_archival_cache_store(config=cache_config)
123
124 if archive_key in d_cache:
125 with d_cache as d_cache_reader:
126 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
127 return reader.name
128
129 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
130 log.debug('Creating new temp archive in %s', archive_tmp_path)
131
100 if kind == "tgz":
132 if kind == "tgz":
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
133 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
102 elif kind == "tbz2":
134 elif kind == "tbz2":
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
135 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
104 elif kind == 'zip':
136 elif kind == 'zip':
105 archiver = archival.zipit(archive_dest_path, mtime)
137 archiver = archival.zipit(archive_tmp_path, mtime)
106 else:
138 else:
107 raise exceptions.ArchiveException()(
139 raise exceptions.ArchiveException()(
108 'Remote does not support: "%s" archive type.' % kind)
140 f'Remote does not support: "{kind}" archive type.')
109
141
110 for f in walker(commit_id, archive_at_path):
142 for f in node_walker(commit_id, archive_at_path):
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
143 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
112 try:
144 try:
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
145 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
114 except NoContentException:
146 except NoContentException:
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
147 # NOTE(marcink): this is a special case for SVN so we can create "empty"
116 # directories which arent supported by archiver
148 # directories which arent supported by archiver
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
149 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
118
150
119 if write_metadata:
151 if write_metadata:
120 metadata = dict([
152 metadata = dict([
@@ -123,8 +155,41 b' def archive_repo(walker, archive_dest_pa'
123 ])
155 ])
124 metadata.update(extra_metadata)
156 metadata.update(extra_metadata)
125
157
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
158 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
159 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
160 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
161
162 archiver.done()
163
164 # ensure set & get are atomic
165 with d_cache.transact():
166
167 with open(archive_tmp_path, 'rb') as archive_file:
168 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
169 if not add_result:
170 log.error('Failed to store cache for key=%s', archive_key)
171
172 os.remove(archive_tmp_path)
129
173
130 return archiver.done()
174 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
175 if not reader:
176 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
177
178 return reader.name
179
180
181 class BinaryEnvelope(object):
182 def __init__(self, val):
183 self.val = val
184
185
186 class BytesEnvelope(bytes):
187 def __new__(cls, content):
188 if isinstance(content, bytes):
189 return super().__new__(cls, content)
190 else:
191 raise TypeError('Content must be bytes.')
192
193
194 class BinaryBytesEnvelope(BytesEnvelope):
195 pass
@@ -0,0 +1,1 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
@@ -1,6 +1,4 b''
1 # -*- coding: utf-8 -*-
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -25,6 +23,9 b' import functools'
25 import logging
23 import logging
26 import tempfile
24 import tempfile
27 import logging.config
25 import logging.config
26
27 from vcsserver.type_utils import str2bool, aslist
28
28 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
29
30
30 # skip keys, that are set here, so we don't double process those
31 # skip keys, that are set here, so we don't double process those
@@ -33,47 +34,6 b' set_keys = {'
33 }
34 }
34
35
35
36
36 def str2bool(_str):
37 """
38 returns True/False value from given string, it tries to translate the
39 string into boolean
40
41 :param _str: string value to translate into boolean
42 :rtype: boolean
43 :returns: boolean from given string
44 """
45 if _str is None:
46 return False
47 if _str in (True, False):
48 return _str
49 _str = str(_str).strip().lower()
50 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
51
52
53 def aslist(obj, sep=None, strip=True):
54 """
55 Returns given string separated by sep as list
56
57 :param obj:
58 :param sep:
59 :param strip:
60 """
61 if isinstance(obj, (basestring,)):
62 if obj in ['', ""]:
63 return []
64
65 lst = obj.split(sep)
66 if strip:
67 lst = [v.strip() for v in lst]
68 return lst
69 elif isinstance(obj, (list, tuple)):
70 return obj
71 elif obj is None:
72 return []
73 else:
74 return [obj]
75
76
77 class SettingsMaker(object):
37 class SettingsMaker(object):
78
38
79 def __init__(self, app_settings):
39 def __init__(self, app_settings):
@@ -81,8 +41,9 b' class SettingsMaker(object):'
81
41
82 @classmethod
42 @classmethod
83 def _bool_func(cls, input_val):
43 def _bool_func(cls, input_val):
84 if isinstance(input_val, unicode):
44 if isinstance(input_val, bytes):
85 input_val = input_val.encode('utf8')
45 # decode to str
46 input_val = input_val.decode('utf8')
86 return str2bool(input_val)
47 return str2bool(input_val)
87
48
88 @classmethod
49 @classmethod
@@ -108,10 +69,10 b' class SettingsMaker(object):'
108
69
109 # ensure we have our dir created
70 # ensure we have our dir created
110 if not os.path.isdir(input_val) and ensure_dir:
71 if not os.path.isdir(input_val) and ensure_dir:
111 os.makedirs(input_val, mode=mode)
72 os.makedirs(input_val, mode=mode, exist_ok=True)
112
73
113 if not os.path.isdir(input_val):
74 if not os.path.isdir(input_val):
114 raise Exception('Dir at {} does not exist'.format(input_val))
75 raise Exception(f'Dir at {input_val} does not exist')
115 return input_val
76 return input_val
116
77
117 @classmethod
78 @classmethod
@@ -163,7 +124,7 b' class SettingsMaker(object):'
163 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
124 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
164 return
125 return
165
126
166 with open(logging_conf, 'rb') as f:
127 with open(logging_conf, 'rt') as f:
167 ini_template = textwrap.dedent(f.read())
128 ini_template = textwrap.dedent(f.read())
168 ini_template = string.Template(ini_template).safe_substitute(
129 ini_template = string.Template(ini_template).safe_substitute(
169 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
130 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
@@ -1,3 +1,5 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Provides a stub implementation for VCS operations.
4 Provides a stub implementation for VCS operations.
3
5
@@ -1,3 +1,5 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Implementation of :class:`EchoApp`.
4 Implementation of :class:`EchoApp`.
3
5
@@ -23,7 +25,7 b' class EchoApp(object):'
23 status = '200 OK'
25 status = '200 OK'
24 headers = [('Content-Type', 'text/plain')]
26 headers = [('Content-Type', 'text/plain')]
25 start_response(status, headers)
27 start_response(status, headers)
26 return ["ECHO"]
28 return [b"ECHO"]
27
29
28
30
29 class EchoAppStream(object):
31 class EchoAppStream(object):
@@ -41,8 +43,8 b' class EchoAppStream(object):'
41 start_response(status, headers)
43 start_response(status, headers)
42
44
43 def generator():
45 def generator():
44 for _ in xrange(1000000):
46 for _ in range(1000000):
45 yield "ECHO"
47 yield b"ECHO_STREAM"
46 return generator()
48 return generator()
47
49
48
50
@@ -1,3 +1,5 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2
1 """
3 """
2 Provides the same API as :mod:`remote_wsgi`.
4 Provides the same API as :mod:`remote_wsgi`.
3
5
@@ -42,4 +44,4 b' def _assert_valid_config(config):'
42 config = config.copy()
44 config = config.copy()
43
45
44 # This is what git needs from config at this stage
46 # This is what git needs from config at this stage
45 config.pop('git_update_server_info')
47 config.pop(b'git_update_server_info')
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -109,7 +109,7 b' class HTTPRepoLocked(HTTPLocked):'
109 def __init__(self, title, status_code=None, **kwargs):
109 def __init__(self, title, status_code=None, **kwargs):
110 self.code = status_code or HTTPLocked.code
110 self.code = status_code or HTTPLocked.code
111 self.title = title
111 self.title = title
112 super(HTTPRepoLocked, self).__init__(**kwargs)
112 super().__init__(**kwargs)
113
113
114
114
115 class HTTPRepoBranchProtected(HTTPForbidden):
115 class HTTPRepoBranchProtected(HTTPForbidden):
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -16,4 +16,4 b''
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from app import create_app
19 from .app import create_app
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -19,16 +19,16 b' import re'
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
20 from wsgiref.util import FileWrapper
21
21
22 import simplejson as json
23 from pyramid.config import Configurator
22 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
23 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
26 HTTPUnprocessableEntity)
28
27
28 from vcsserver.lib.rc_json import json
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
31 from vcsserver.str_utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
@@ -212,7 +212,7 b' def lfs_objects_verify(request):'
212
212
213 store_size = store.size_oid()
213 store_size = store.size_oid()
214 if store_size != size:
214 if store_size != size:
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
215 msg = 'requested file size mismatch store size:{} requested:{}'.format(
216 store_size, size)
216 store_size, size)
217 return write_response_error(
217 return write_response_error(
218 HTTPUnprocessableEntity, msg)
218 HTTPUnprocessableEntity, msg)
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -51,7 +51,7 b' class OidHandler(object):'
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
54 err_msg = f'object: {store.oid} does not exist in store'
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -18,8 +18,9 b''
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21 import simplejson as json
22
21
22 from vcsserver.lib.rc_json import json
23 from vcsserver.str_utils import safe_bytes
23 from vcsserver.git_lfs.app import create_app
24 from vcsserver.git_lfs.app import create_app
24
25
25
26
@@ -54,19 +55,19 b' class TestLFSApplication(object):'
54 def test_app_deprecated_endpoint(self, git_lfs_app):
55 def test_app_deprecated_endpoint(self, git_lfs_app):
55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 assert response.status_code == 501
57 assert response.status_code == 501
57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
58 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
58
59
59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 assert response.status_code == 501
62 assert response.status_code == 501
62 assert json.loads(response.text) == {
63 assert json.loads(response.text) == {
63 u'message': u'GIT LFS locking api not supported'}
64 'message': 'GIT LFS locking api not supported'}
64
65
65 def test_app_lock_api_not_available(self, git_lfs_app):
66 def test_app_lock_api_not_available(self, git_lfs_app):
66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 assert response.status_code == 501
68 assert response.status_code == 501
68 assert json.loads(response.text) == {
69 assert json.loads(response.text) == {
69 u'message': u'GIT LFS locking api not supported'}
70 'message': 'GIT LFS locking api not supported'}
70
71
71 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 git_lfs_app.post_json(
73 git_lfs_app.post_json(
@@ -77,14 +78,14 b' class TestLFSApplication(object):'
77 '/repo/info/lfs/objects/batch', params={}, status=400,
78 '/repo/info/lfs/objects/batch', params={}, status=400,
78 extra_environ=http_auth)
79 extra_environ=http_auth)
79 assert json.loads(response.text) == {
80 assert json.loads(response.text) == {
80 u'message': u'unsupported operation mode: `None`'}
81 'message': 'unsupported operation mode: `None`'}
81
82
82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 response = git_lfs_app.post_json(
84 response = git_lfs_app.post_json(
84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 status=400, extra_environ=http_auth)
86 status=400, extra_environ=http_auth)
86 assert json.loads(response.text) == {
87 assert json.loads(response.text) == {
87 u'message': u'missing objects data'}
88 'message': 'missing objects data'}
88
89
89 def test_app_batch_api_unsupported_data_in_objects(
90 def test_app_batch_api_unsupported_data_in_objects(
90 self, git_lfs_app, http_auth):
91 self, git_lfs_app, http_auth):
@@ -94,7 +95,7 b' class TestLFSApplication(object):'
94 '/repo/info/lfs/objects/batch', params=params, status=400,
95 '/repo/info/lfs/objects/batch', params=params, status=400,
95 extra_environ=http_auth)
96 extra_environ=http_auth)
96 assert json.loads(response.text) == {
97 assert json.loads(response.text) == {
97 u'message': u'unsupported data in objects'}
98 'message': 'unsupported data in objects'}
98
99
99 def test_app_batch_api_download_missing_object(
100 def test_app_batch_api_download_missing_object(
100 self, git_lfs_app, http_auth):
101 self, git_lfs_app, http_auth):
@@ -105,12 +106,12 b' class TestLFSApplication(object):'
105 extra_environ=http_auth)
106 extra_environ=http_auth)
106
107
107 expected_objects = [
108 expected_objects = [
108 {u'authenticated': True,
109 {'authenticated': True,
109 u'errors': {u'error': {
110 'errors': {'error': {
110 u'code': 404,
111 'code': 404,
111 u'message': u'object: 123 does not exist in store'}},
112 'message': 'object: 123 does not exist in store'}},
112 u'oid': u'123',
113 'oid': '123',
113 u'size': u'1024'}
114 'size': '1024'}
114 ]
115 ]
115 assert json.loads(response.text) == {
116 assert json.loads(response.text) == {
116 'objects': expected_objects, 'transfer': 'basic'}
117 'objects': expected_objects, 'transfer': 'basic'}
@@ -121,7 +122,7 b' class TestLFSApplication(object):'
121 if not os.path.isdir(os.path.dirname(oid_path)):
122 if not os.path.isdir(os.path.dirname(oid_path)):
122 os.makedirs(os.path.dirname(oid_path))
123 os.makedirs(os.path.dirname(oid_path))
123 with open(oid_path, 'wb') as f:
124 with open(oid_path, 'wb') as f:
124 f.write('OID_CONTENT')
125 f.write(safe_bytes('OID_CONTENT'))
125
126
126 params = {'operation': 'download',
127 params = {'operation': 'download',
127 'objects': [{'oid': oid, 'size': '1024'}]}
128 'objects': [{'oid': oid, 'size': '1024'}]}
@@ -130,14 +131,14 b' class TestLFSApplication(object):'
130 extra_environ=http_auth)
131 extra_environ=http_auth)
131
132
132 expected_objects = [
133 expected_objects = [
133 {u'authenticated': True,
134 {'authenticated': True,
134 u'actions': {
135 'actions': {
135 u'download': {
136 'download': {
136 u'header': {u'Authorization': u'Basic XXXXX'},
137 'header': {'Authorization': 'Basic XXXXX'},
137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
138 'href': 'http://localhost/repo/info/lfs/objects/456'},
138 },
139 },
139 u'oid': u'456',
140 'oid': '456',
140 u'size': u'1024'}
141 'size': '1024'}
141 ]
142 ]
142 assert json.loads(response.text) == {
143 assert json.loads(response.text) == {
143 'objects': expected_objects, 'transfer': 'basic'}
144 'objects': expected_objects, 'transfer': 'basic'}
@@ -149,18 +150,18 b' class TestLFSApplication(object):'
149 '/repo/info/lfs/objects/batch', params=params,
150 '/repo/info/lfs/objects/batch', params=params,
150 extra_environ=http_auth)
151 extra_environ=http_auth)
151 expected_objects = [
152 expected_objects = [
152 {u'authenticated': True,
153 {'authenticated': True,
153 u'actions': {
154 'actions': {
154 u'upload': {
155 'upload': {
155 u'header': {u'Authorization': u'Basic XXXXX',
156 'header': {'Authorization': 'Basic XXXXX',
156 u'Transfer-Encoding': u'chunked'},
157 'Transfer-Encoding': 'chunked'},
157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
158 'href': 'http://localhost/repo/info/lfs/objects/123'},
158 u'verify': {
159 'verify': {
159 u'header': {u'Authorization': u'Basic XXXXX'},
160 'header': {'Authorization': 'Basic XXXXX'},
160 u'href': u'http://localhost/repo/info/lfs/verify'}
161 'href': 'http://localhost/repo/info/lfs/verify'}
161 },
162 },
162 u'oid': u'123',
163 'oid': '123',
163 u'size': u'1024'}
164 'size': '1024'}
164 ]
165 ]
165 assert json.loads(response.text) == {
166 assert json.loads(response.text) == {
166 'objects': expected_objects, 'transfer': 'basic'}
167 'objects': expected_objects, 'transfer': 'basic'}
@@ -172,18 +173,18 b' class TestLFSApplication(object):'
172 '/repo/info/lfs/objects/batch', params=params,
173 '/repo/info/lfs/objects/batch', params=params,
173 extra_environ=http_auth)
174 extra_environ=http_auth)
174 expected_objects = [
175 expected_objects = [
175 {u'authenticated': True,
176 {'authenticated': True,
176 u'actions': {
177 'actions': {
177 u'upload': {
178 'upload': {
178 u'header': {u'Authorization': u'Basic XXXXX',
179 'header': {'Authorization': 'Basic XXXXX',
179 u'Transfer-Encoding': u'chunked'},
180 'Transfer-Encoding': 'chunked'},
180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 'href': 'https://localhost/repo/info/lfs/objects/123'},
181 u'verify': {
182 'verify': {
182 u'header': {u'Authorization': u'Basic XXXXX'},
183 'header': {'Authorization': 'Basic XXXXX'},
183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 'href': 'https://localhost/repo/info/lfs/verify'}
184 },
185 },
185 u'oid': u'123',
186 'oid': '123',
186 u'size': u'1024'}
187 'size': '1024'}
187 ]
188 ]
188 assert json.loads(response.text) == {
189 assert json.loads(response.text) == {
189 'objects': expected_objects, 'transfer': 'basic'}
190 'objects': expected_objects, 'transfer': 'basic'}
@@ -195,7 +196,7 b' class TestLFSApplication(object):'
195 status=400)
196 status=400)
196
197
197 assert json.loads(response.text) == {
198 assert json.loads(response.text) == {
198 u'message': u'missing oid and size in request data'}
199 'message': 'missing oid and size in request data'}
199
200
200 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 params = {'oid': 'missing', 'size': '1024'}
202 params = {'oid': 'missing', 'size': '1024'}
@@ -204,7 +205,7 b' class TestLFSApplication(object):'
204 status=404)
205 status=404)
205
206
206 assert json.loads(response.text) == {
207 assert json.loads(response.text) == {
207 u'message': u'oid `missing` does not exists in store'}
208 'message': 'oid `missing` does not exists in store'}
208
209
209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 oid = 'existing'
211 oid = 'existing'
@@ -212,15 +213,15 b' class TestLFSApplication(object):'
212 if not os.path.isdir(os.path.dirname(oid_path)):
213 if not os.path.isdir(os.path.dirname(oid_path)):
213 os.makedirs(os.path.dirname(oid_path))
214 os.makedirs(os.path.dirname(oid_path))
214 with open(oid_path, 'wb') as f:
215 with open(oid_path, 'wb') as f:
215 f.write('OID_CONTENT')
216 f.write(safe_bytes('OID_CONTENT'))
216
217
217 params = {'oid': oid, 'size': '1024'}
218 params = {'oid': oid, 'size': '1024'}
218 response = git_lfs_app.post_json(
219 response = git_lfs_app.post_json(
219 '/repo/info/lfs/verify', params=params, status=422)
220 '/repo/info/lfs/verify', params=params, status=422)
220
221
221 assert json.loads(response.text) == {
222 assert json.loads(response.text) == {
222 u'message': u'requested file size mismatch '
223 'message': 'requested file size mismatch '
223 u'store size:11 requested:1024'}
224 'store size:11 requested:1024'}
224
225
225 def test_app_verify_api(self, git_lfs_app):
226 def test_app_verify_api(self, git_lfs_app):
226 oid = 'existing'
227 oid = 'existing'
@@ -228,14 +229,14 b' class TestLFSApplication(object):'
228 if not os.path.isdir(os.path.dirname(oid_path)):
229 if not os.path.isdir(os.path.dirname(oid_path)):
229 os.makedirs(os.path.dirname(oid_path))
230 os.makedirs(os.path.dirname(oid_path))
230 with open(oid_path, 'wb') as f:
231 with open(oid_path, 'wb') as f:
231 f.write('OID_CONTENT')
232 f.write(safe_bytes('OID_CONTENT'))
232
233
233 params = {'oid': oid, 'size': 11}
234 params = {'oid': oid, 'size': 11}
234 response = git_lfs_app.post_json(
235 response = git_lfs_app.post_json(
235 '/repo/info/lfs/verify', params=params)
236 '/repo/info/lfs/verify', params=params)
236
237
237 assert json.loads(response.text) == {
238 assert json.loads(response.text) == {
238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
239 'message': {'size': 'ok', 'in_store': 'ok'}}
239
240
240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 oid = 'missing'
242 oid = 'missing'
@@ -244,7 +245,7 b' class TestLFSApplication(object):'
244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245
246
246 assert json.loads(response.text) == {
247 assert json.loads(response.text) == {
247 u'message': u'requested file with oid `missing` not found in store'}
248 'message': 'requested file with oid `missing` not found in store'}
248
249
249 def test_app_download_api(self, git_lfs_app):
250 def test_app_download_api(self, git_lfs_app):
250 oid = 'existing'
251 oid = 'existing'
@@ -252,7 +253,7 b' class TestLFSApplication(object):'
252 if not os.path.isdir(os.path.dirname(oid_path)):
253 if not os.path.isdir(os.path.dirname(oid_path)):
253 os.makedirs(os.path.dirname(oid_path))
254 os.makedirs(os.path.dirname(oid_path))
254 with open(oid_path, 'wb') as f:
255 with open(oid_path, 'wb') as f:
255 f.write('OID_CONTENT')
256 f.write(safe_bytes('OID_CONTENT'))
256
257
257 response = git_lfs_app.get(
258 response = git_lfs_app.get(
258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
259 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
@@ -264,7 +265,7 b' class TestLFSApplication(object):'
264 response = git_lfs_app.put(
265 response = git_lfs_app.put(
265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266
267
267 assert json.loads(response.text) == {u'upload': u'ok'}
268 assert json.loads(response.text) == {'upload': 'ok'}
268
269
269 # verify that we actually wrote that OID
270 # verify that we actually wrote that OID
270 oid_path = os.path.join(git_lfs_app._store, oid)
271 oid_path = os.path.join(git_lfs_app._store, oid)
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -17,6 +17,7 b''
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.str_utils import safe_bytes
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21
22
22
23
@@ -70,7 +71,7 b' class TestOidHandler(object):'
70 os.makedirs(os.path.dirname(store.oid_path))
71 os.makedirs(os.path.dirname(store.oid_path))
71
72
72 with open(store.oid_path, 'wb') as f:
73 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
74 f.write(safe_bytes('CONTENT'))
74
75
75 response, has_errors = oid_handler.exec_operation('download')
76 response, has_errors = oid_handler.exec_operation('download')
76
77
@@ -86,7 +87,7 b' class TestOidHandler(object):'
86 os.makedirs(os.path.dirname(store.oid_path))
87 os.makedirs(os.path.dirname(store.oid_path))
87
88
88 with open(store.oid_path, 'wb') as f:
89 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
90 f.write(safe_bytes('CONTENT'))
90 oid_handler.obj_size = 7
91 oid_handler.obj_size = 7
91 response, has_errors = oid_handler.exec_operation('upload')
92 response, has_errors = oid_handler.exec_operation('upload')
92 assert has_errors is None
93 assert has_errors is None
@@ -98,7 +99,7 b' class TestOidHandler(object):'
98 os.makedirs(os.path.dirname(store.oid_path))
99 os.makedirs(os.path.dirname(store.oid_path))
99
100
100 with open(store.oid_path, 'wb') as f:
101 with open(store.oid_path, 'wb') as f:
101 f.write('CONTENT')
102 f.write(safe_bytes('CONTENT'))
102
103
103 oid_handler.obj_size = 10240
104 oid_handler.obj_size = 10240
104 response, has_errors = oid_handler.exec_operation('upload')
105 response, has_errors = oid_handler.exec_operation('upload')
@@ -127,7 +128,7 b' class TestLFSStore(object):'
127
128
128 engine = lfs_store.get_engine(mode='wb')
129 engine = lfs_store.get_engine(mode='wb')
129 with engine as f:
130 with engine as f:
130 f.write('CONTENT')
131 f.write(safe_bytes('CONTENT'))
131
132
132 assert os.path.isfile(oid_location)
133 assert os.path.isfile(oid_location)
133
134
@@ -136,6 +137,6 b' class TestLFSStore(object):'
136 assert lfs_store.has_oid() is False
137 assert lfs_store.has_oid() is False
137 engine = lfs_store.get_engine(mode='wb')
138 engine = lfs_store.get_engine(mode='wb')
138 with engine as f:
139 with engine as f:
139 f.write('CONTENT')
140 f.write(safe_bytes('CONTENT'))
140
141
141 assert lfs_store.has_oid() is True No newline at end of file
142 assert lfs_store.has_oid() is True
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -21,8 +21,11 b' Mercurial libs compatibility'
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
26 # demandimport.enable()
27 from vcsserver.str_utils import safe_bytes
28
26 demandimport.enable = lambda *args, **kwargs: 1
29 demandimport.enable = lambda *args, **kwargs: 1
27
30
28 from mercurial import ui
31 from mercurial import ui
@@ -39,7 +42,8 b' from mercurial import subrepo'
39 from mercurial import subrepoutil
42 from mercurial import subrepoutil
40 from mercurial import tags as hg_tag
43 from mercurial import tags as hg_tag
41 from mercurial import util as hgutil
44 from mercurial import util as hgutil
42 from mercurial.commands import clone, nullid, pull
45 from mercurial.commands import clone, pull
46 from mercurial.node import nullid
43 from mercurial.context import memctx, memfilectx
47 from mercurial.context import memctx, memfilectx
44 from mercurial.error import (
48 from mercurial.error import (
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
49 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
@@ -53,7 +57,7 b' from mercurial.encoding import tolocal'
53 from mercurial.discovery import findcommonoutgoing
57 from mercurial.discovery import findcommonoutgoing
54 from mercurial.hg import peer
58 from mercurial.hg import peer
55 from mercurial.httppeer import makepeer
59 from mercurial.httppeer import makepeer
56 from mercurial.util import url as hg_url
60 from mercurial.utils.urlutil import url as hg_url
57 from mercurial.scmutil import revrange, revsymbol
61 from mercurial.scmutil import revrange, revsymbol
58 from mercurial.node import nullrev
62 from mercurial.node import nullrev
59 from mercurial import exchange
63 from mercurial import exchange
@@ -63,8 +67,13 b' from hgext import largefiles'
63 # infinit looping when given invalid resources
67 # infinit looping when given invalid resources
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
68 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65
69
70 # hg strip is in core now
71 from mercurial import strip as hgext_strip
72
66
73
67 def get_ctx(repo, ref):
74 def get_ctx(repo, ref):
75 if not isinstance(ref, int):
76 ref = safe_bytes(ref)
68 try:
77 try:
69 ctx = repo[ref]
78 ctx = repo[ref]
70 except (ProgrammingError, TypeError):
79 except (ProgrammingError, TypeError):
@@ -73,7 +82,7 b' def get_ctx(repo, ref):'
73 ctx = revsymbol(repo, ref)
82 ctx = revsymbol(repo, ref)
74 except (LookupError, RepoLookupError):
83 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
84 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
85 if isinstance(ref, int):
77 raise
86 raise
78 ctx = revsymbol(repo, ref)
87 ctx = revsymbol(repo, ref)
79 return ctx
88 return ctx
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -62,7 +62,7 b' def _dynamic_capabilities_wrapper(lfprot'
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo, subrepoutil
65 from .hgcompat import subrepo, subrepoutil
66 from vcsserver.exceptions import SubrepoMergeException
66 from vcsserver.exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
@@ -1,7 +1,5 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
5 #
3 #
6 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -25,6 +23,7 b' import logging'
25 import pkg_resources
23 import pkg_resources
26
24
27 import vcsserver
25 import vcsserver
26 from vcsserver.str_utils import safe_bytes
28
27
29 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
30
29
@@ -49,7 +48,7 b' def install_git_hooks(repo_path, bare, e'
49 hooks_path = get_git_hooks_path(repo_path, bare)
48 hooks_path = get_git_hooks_path(repo_path, bare)
50
49
51 if not os.path.isdir(hooks_path):
50 if not os.path.isdir(hooks_path):
52 os.makedirs(hooks_path, mode=0o777)
51 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
53
52
54 tmpl_post = pkg_resources.resource_string(
53 tmpl_post = pkg_resources.resource_string(
55 'vcsserver', '/'.join(
54 'vcsserver', '/'.join(
@@ -70,14 +69,13 b' def install_git_hooks(repo_path, bare, e'
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
69 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 try:
70 try:
72 with open(_hook_file, 'wb') as f:
71 with open(_hook_file, 'wb') as f:
73 template = template.replace(
72 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
74 '_TMPL_', vcsserver.__version__)
73 template = template.replace(b'_DATE_', safe_bytes(timestamp))
75 template = template.replace('_DATE_', timestamp)
74 template = template.replace(b'_ENV_', safe_bytes(executable))
76 template = template.replace('_ENV_', executable)
75 template = template.replace(b'_PATH_', safe_bytes(path))
77 template = template.replace('_PATH_', path)
78 f.write(template)
76 f.write(template)
79 os.chmod(_hook_file, 0o755)
77 os.chmod(_hook_file, 0o755)
80 except IOError:
78 except OSError:
81 log.exception('error writing hook file %s', _hook_file)
79 log.exception('error writing hook file %s', _hook_file)
82 else:
80 else:
83 log.debug('skipping writing hook file')
81 log.debug('skipping writing hook file')
@@ -102,7 +100,7 b' def install_svn_hooks(repo_path, executa'
102 executable = executable or sys.executable
100 executable = executable or sys.executable
103 hooks_path = get_svn_hooks_path(repo_path)
101 hooks_path = get_svn_hooks_path(repo_path)
104 if not os.path.isdir(hooks_path):
102 if not os.path.isdir(hooks_path):
105 os.makedirs(hooks_path, mode=0o777)
103 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
106
104
107 tmpl_post = pkg_resources.resource_string(
105 tmpl_post = pkg_resources.resource_string(
108 'vcsserver', '/'.join(
106 'vcsserver', '/'.join(
@@ -124,15 +122,14 b' def install_svn_hooks(repo_path, executa'
124
122
125 try:
123 try:
126 with open(_hook_file, 'wb') as f:
124 with open(_hook_file, 'wb') as f:
127 template = template.replace(
125 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
128 '_TMPL_', vcsserver.__version__)
126 template = template.replace(b'_DATE_', safe_bytes(timestamp))
129 template = template.replace('_DATE_', timestamp)
127 template = template.replace(b'_ENV_', safe_bytes(executable))
130 template = template.replace('_ENV_', executable)
128 template = template.replace(b'_PATH_', safe_bytes(path))
131 template = template.replace('_PATH_', path)
132
129
133 f.write(template)
130 f.write(template)
134 os.chmod(_hook_file, 0o755)
131 os.chmod(_hook_file, 0o755)
135 except IOError:
132 except OSError:
136 log.exception('error writing hook file %s', _hook_file)
133 log.exception('error writing hook file %s', _hook_file)
137 else:
134 else:
138 log.debug('skipping writing hook file')
135 log.debug('skipping writing hook file')
@@ -141,16 +138,16 b' def install_svn_hooks(repo_path, executa'
141
138
142
139
143 def get_version_from_hook(hook_path):
140 def get_version_from_hook(hook_path):
144 version = ''
141 version = b''
145 hook_content = read_hook_content(hook_path)
142 hook_content = read_hook_content(hook_path)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
143 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
147 if matches:
144 if matches:
148 try:
145 try:
149 version = matches.groups()[0]
146 version = matches.groups()[0]
150 log.debug('got version %s from hooks.', version)
147 log.debug('got version %s from hooks.', version)
151 except Exception:
148 except Exception:
152 log.exception("Exception while reading the hook version.")
149 log.exception("Exception while reading the hook version.")
153 return version.replace("'", "")
150 return version.replace(b"'", b"")
154
151
155
152
156 def check_rhodecode_hook(hook_path):
153 def check_rhodecode_hook(hook_path):
@@ -169,8 +166,8 b' def check_rhodecode_hook(hook_path):'
169 return False
166 return False
170
167
171
168
172 def read_hook_content(hook_path):
169 def read_hook_content(hook_path) -> bytes:
173 content = ''
170 content = b''
174 if os.path.isfile(hook_path):
171 if os.path.isfile(hook_path):
175 with open(hook_path, 'rb') as f:
172 with open(hook_path, 'rb') as f:
176 content = f.read()
173 content = f.read()
@@ -11,7 +11,7 b' try:'
11 except ImportError:
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
13 import traceback
14 print traceback.format_exc()
14 print(traceback.format_exc())
15 hooks = None
15 hooks = None
16
16
17
17
@@ -42,7 +42,7 b' def main():'
42 # TODO: johbo: Improve handling of this special case
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
44 raise
45 print 'ERROR:', error
45 print(f'ERROR: {error}')
46 sys.exit(1)
46 sys.exit(1)
47 sys.exit(0)
47 sys.exit(0)
48
48
@@ -11,7 +11,7 b' try:'
11 except ImportError:
11 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
13 import traceback
14 print traceback.format_exc()
14 print(traceback.format_exc())
15 hooks = None
15 hooks = None
16
16
17
17
@@ -42,7 +42,7 b' def main():'
42 # TODO: johbo: Improve handling of this special case
42 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
44 raise
45 print 'ERROR:', error
45 print(f'ERROR: {error}')
46 sys.exit(1)
46 sys.exit(1)
47 sys.exit(0)
47 sys.exit(0)
48
48
@@ -12,7 +12,7 b' try:'
12 except ImportError:
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
14 import traceback
15 print traceback.format_exc()
15 print(traceback.format_exc())
16 hooks = None
16 hooks = None
17
17
18
18
@@ -40,7 +40,7 b' def main():'
40 # TODO: johbo: Improve handling of this special case
40 # TODO: johbo: Improve handling of this special case
41 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
41 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
42 raise
42 raise
43 print 'ERROR:', error
43 print(f'ERROR: {error}')
44 sys.exit(1)
44 sys.exit(1)
45 sys.exit(0)
45 sys.exit(0)
46
46
@@ -12,7 +12,7 b' try:'
12 except ImportError:
12 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
14 import traceback
15 print traceback.format_exc()
15 print(traceback.format_exc())
16 hooks = None
16 hooks = None
17
17
18
18
@@ -43,7 +43,7 b' def main():'
43 # TODO: johbo: Improve handling of this special case
43 # TODO: johbo: Improve handling of this special case
44 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
45 raise
45 raise
46 print 'ERROR:', error
46 print(f'ERROR: {error}')
47 sys.exit(1)
47 sys.exit(1)
48 sys.exit(0)
48 sys.exit(0)
49
49
@@ -1,7 +1,5 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
5 #
3 #
6 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -24,51 +22,70 b' import logging'
24 import collections
22 import collections
25 import importlib
23 import importlib
26 import base64
24 import base64
25 import msgpack
26 import dataclasses
27 import pygit2
27
28
28 from httplib import HTTPConnection
29 import http.client
29
30
30
31
31 import mercurial.scmutil
32 import mercurial.scmutil
32 import mercurial.node
33 import mercurial.node
33 import simplejson as json
34
34
35 from vcsserver.lib.rc_json import json
35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import ascii_str, safe_str
38 from vcsserver.remote.git import Repository
36
39
37 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
38
41
39
42
40 class HooksHttpClient(object):
43 class HooksHttpClient(object):
44 proto = 'msgpack.v1'
41 connection = None
45 connection = None
42
46
43 def __init__(self, hooks_uri):
47 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
48 self.hooks_uri = hooks_uri
45
49
50 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
46 def __call__(self, method, extras):
53 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
54 connection = http.client.HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
55 # binary msgpack body
49 try:
56 headers, body = self._serialize(method, extras)
50 connection.request('POST', '/', body)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
51 except Exception:
52 log.error('Hooks calling Connection failed on %s', connection.__dict__)
53 raise
54 response = connection.getresponse()
55
56 response_data = response.read()
57
58
58 try:
59 try:
59 return json.loads(response_data)
60 try:
61 connection.request('POST', '/', body, headers)
62 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
65
66 response = connection.getresponse()
67 try:
68 return msgpack.load(response)
60 except Exception:
69 except Exception:
70 response_data = response.read()
61 log.exception('Failed to decode hook response json data. '
71 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
72 'response_code:%s, raw_data:%s',
63 response.status, response_data)
73 response.status, response_data)
64 raise
74 raise
75 finally:
76 connection.close()
65
77
66 def _serialize(self, hook_name, extras):
78 @classmethod
79 def _serialize(cls, hook_name, extras):
67 data = {
80 data = {
68 'method': hook_name,
81 'method': hook_name,
69 'extras': extras
82 'extras': extras
70 }
83 }
71 return json.dumps(data)
84 headers = {
85 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
87 }
88 return headers, msgpack.packb(data)
72
89
73
90
74 class HooksDummyClient(object):
91 class HooksDummyClient(object):
@@ -98,7 +115,7 b' class HgMessageWriter(RemoteMessageWrite'
98 def __init__(self, ui):
115 def __init__(self, ui):
99 self.ui = ui
116 self.ui = ui
100
117
101 def write(self, message):
118 def write(self, message: str):
102 # TODO: Check why the quiet flag is set by default.
119 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
120 old = self.ui.quiet
104 self.ui.quiet = False
121 self.ui.quiet = False
@@ -112,8 +129,8 b' class GitMessageWriter(RemoteMessageWrit'
112 def __init__(self, stdout=None):
129 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
130 self.stdout = stdout or sys.stdout
114
131
115 def write(self, message):
132 def write(self, message: str):
116 self.stdout.write(message.encode('utf-8'))
133 self.stdout.write(message)
117
134
118
135
119 class SvnMessageWriter(RemoteMessageWriter):
136 class SvnMessageWriter(RemoteMessageWriter):
@@ -130,6 +147,7 b' class SvnMessageWriter(RemoteMessageWrit'
130 def _handle_exception(result):
147 def _handle_exception(result):
131 exception_class = result.get('exception')
148 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
149 exception_traceback = result.get('exception_traceback')
150 log.debug('Handling hook-call exception: %s', exception_class)
133
151
134 if exception_traceback:
152 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
153 log.error('Got traceback from remote call:%s', exception_traceback)
@@ -141,13 +159,15 b' def _handle_exception(result):'
141 elif exception_class == 'RepositoryError':
159 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
160 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
161 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
162 raise Exception(
145 (exception_class, result['exception_args']))
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
164 )
146
165
147
166
148 def _get_hooks_client(extras):
167 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
168 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
169 is_shadow_repo = extras.get('is_shadow_repo')
170
151 if hooks_uri:
171 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
172 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
173 elif is_shadow_repo:
@@ -161,7 +181,6 b' def _call_hook(hook_name, extras, writer'
161 log.debug('Hooks, using client:%s', hooks_client)
181 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
182 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
183 log.debug('Hooks got result: %s', result)
164
165 _handle_exception(result)
184 _handle_exception(result)
166 writer.write(result['output'])
185 writer.write(result['output'])
167
186
@@ -169,7 +188,7 b' def _call_hook(hook_name, extras, writer'
169
188
170
189
171 def _extras_from_ui(ui):
190 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
173 if not hook_data:
192 if not hook_data:
174 # maybe it's inside environ ?
193 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
194 env_hook_data = os.environ.get('RC_SCM_DATA')
@@ -192,8 +211,8 b' def _rev_range_hash(repo, node, check_he'
192 for rev in range(start, end):
211 for rev in range(start, end):
193 revs.append(rev)
212 revs.append(rev)
194 ctx = get_ctx(repo, rev)
213 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
196 branch = ctx.branch()
215 branch = safe_str(ctx.branch())
197 commits.append((commit_id, branch))
216 commits.append((commit_id, branch))
198
217
199 parent_heads = []
218 parent_heads = []
@@ -217,9 +236,9 b' def _check_heads(repo, start, end, commi'
217 for p in parents:
236 for p in parents:
218 branch = get_ctx(repo, p).branch()
237 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
238 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
239 parent_heads = {p}
221 reachable = set([p])
240 reachable = {p}
222 for x in xrange(p + 1, end):
241 for x in range(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
242 if get_ctx(repo, x).branch() != branch:
224 continue
243 continue
225 for pp in changelog.parentrevs(x):
244 for pp in changelog.parentrevs(x):
@@ -295,14 +314,16 b' def pre_push(ui, repo, node=None, **kwar'
295 detect_force_push = extras.get('detect_force_push')
314 detect_force_push = extras.get('detect_force_push')
296
315
297 rev_data = []
316 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
317 hook_type: str = safe_str(kwargs.get('hooktype'))
318
319 if node and hook_type == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
320 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
322 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
323 branches[branch].append(commit_id)
303
324
304 for branch, commits in branches.items():
325 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
306 rev_data.append({
327 rev_data.append({
307 'total_commits': len(commits),
328 'total_commits': len(commits),
308 'old_rev': old_rev,
329 'old_rev': old_rev,
@@ -319,10 +340,10 b' def pre_push(ui, repo, node=None, **kwar'
319 extras.get('repo_store', ''), extras.get('repository', ''))
340 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
341 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
342 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
323 repo_path=repo_path)
344 repo_path=repo_path)
324
345
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
346 extras['hook_type'] = hook_type or 'pre_push'
326 extras['commit_ids'] = rev_data
347 extras['commit_ids'] = rev_data
327
348
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
@@ -363,6 +384,7 b' def post_push(ui, repo, node, **kwargs):'
363 branches = []
384 branches = []
364 bookmarks = []
385 bookmarks = []
365 tags = []
386 tags = []
387 hook_type: str = safe_str(kwargs.get('hooktype'))
366
388
367 commits, _heads = _rev_range_hash(repo, node)
389 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
390 for commit_id, branch in commits:
@@ -370,11 +392,12 b' def post_push(ui, repo, node, **kwargs):'
370 if branch not in branches:
392 if branch not in branches:
371 branches.append(branch)
393 branches.append(branch)
372
394
373 if hasattr(ui, '_rc_pushkey_branches'):
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
374 bookmarks = ui._rc_pushkey_branches
396 bookmarks = ui._rc_pushkey_bookmarks
375
397
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
398 extras['hook_type'] = hook_type or 'post_push'
377 extras['commit_ids'] = commit_ids
399 extras['commit_ids'] = commit_ids
400
378 extras['new_refs'] = {
401 extras['new_refs'] = {
379 'branches': branches,
402 'branches': branches,
380 'bookmarks': bookmarks,
403 'bookmarks': bookmarks,
@@ -395,9 +418,10 b' def post_push_ssh(ui, repo, node, **kwar'
395
418
396 def key_push(ui, repo, **kwargs):
419 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
420 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
421
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
423 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
401 return
425 return
402
426
403
427
@@ -426,10 +450,13 b' def handle_git_post_receive(unused_repo_'
426 pass
450 pass
427
451
428
452
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
453 @dataclasses.dataclass
454 class HookResponse:
455 status: int
456 output: str
430
457
431
458
432 def git_pre_pull(extras):
459 def git_pre_pull(extras) -> HookResponse:
433 """
460 """
434 Pre pull hook.
461 Pre pull hook.
435
462
@@ -439,20 +466,23 b' def git_pre_pull(extras):'
439 :return: status code of the hook. 0 for success.
466 :return: status code of the hook. 0 for success.
440 :rtype: int
467 :rtype: int
441 """
468 """
469
442 if 'pull' not in extras['hooks']:
470 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
471 return HookResponse(0, '')
444
472
445 stdout = io.BytesIO()
473 stdout = io.StringIO()
446 try:
474 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
476
448 except Exception as error:
477 except Exception as error:
449 status = 128
478 log.exception('Failed to call pre_pull hook')
450 stdout.write('ERROR: %s\n' % str(error))
479 status_code = 128
480 stdout.write(f'ERROR: {error}\n')
451
481
452 return HookResponse(status, stdout.getvalue())
482 return HookResponse(status_code, stdout.getvalue())
453
483
454
484
455 def git_post_pull(extras):
485 def git_post_pull(extras) -> HookResponse:
456 """
486 """
457 Post pull hook.
487 Post pull hook.
458
488
@@ -465,12 +495,12 b' def git_post_pull(extras):'
465 if 'pull' not in extras['hooks']:
495 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
496 return HookResponse(0, '')
467
497
468 stdout = io.BytesIO()
498 stdout = io.StringIO()
469 try:
499 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
501 except Exception as error:
472 status = 128
502 status = 128
473 stdout.write('ERROR: %s\n' % error)
503 stdout.write(f'ERROR: {error}\n')
474
504
475 return HookResponse(status, stdout.getvalue())
505 return HookResponse(status, stdout.getvalue())
476
506
@@ -495,15 +525,11 b' def _parse_git_ref_lines(revision_lines)'
495 return rev_data
525 return rev_data
496
526
497
527
498 def git_pre_receive(unused_repo_path, revision_lines, env):
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
499 """
529 """
500 Pre push hook.
530 Pre push hook.
501
531
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
504
505 :return: status code of the hook. 0 for success.
532 :return: status code of the hook. 0 for success.
506 :rtype: int
507 """
533 """
508 extras = json.loads(env['RC_SCM_DATA'])
534 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
535 rev_data = _parse_git_ref_lines(revision_lines)
@@ -527,7 +553,7 b' def git_pre_receive(unused_repo_path, re'
527 if type_ == 'heads' and not (new_branch or delete_branch):
553 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
554 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
555 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
531 stdout, stderr = subprocessio.run_command(
557 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
558 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
559 # means we're having some non-reachable objects, this forced push was used
@@ -536,18 +562,18 b' def git_pre_receive(unused_repo_path, re'
536
562
537 extras['hook_type'] = 'pre_receive'
563 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
564 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
565
566 stdout = sys.stdout
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
568
569 return status_code
540
570
541
571
542 def git_post_receive(unused_repo_path, revision_lines, env):
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
543 """
573 """
544 Post push hook.
574 Post push hook.
545
575
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
548
549 :return: status code of the hook. 0 for success.
576 :return: status code of the hook. 0 for success.
550 :rtype: int
551 """
577 """
552 extras = json.loads(env['RC_SCM_DATA'])
578 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
579 if 'push' not in extras['hooks']:
@@ -567,26 +593,28 b' def git_post_receive(unused_repo_path, r'
567 type_ = push_ref['type']
593 type_ = push_ref['type']
568
594
569 if type_ == 'heads':
595 if type_ == 'heads':
596 # starting new branch case
570 if push_ref['old_rev'] == empty_commit_id:
597 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
598 push_ref_name = push_ref['name']
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
574
599
575 # Fix up head revision if needed
600 if push_ref_name not in branches:
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
601 branches.append(push_ref_name)
602
603 need_head_set = ''
604 with Repository(os.getcwd()) as repo:
577 try:
605 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
606 repo.head
579 except Exception:
607 except pygit2.GitError:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"',
608 need_head_set = f'refs/heads/{push_ref_name}'
581 '"refs/heads/%s"' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
609
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
610 if need_head_set:
586 '--format=%(refname)', 'refs/heads/*']
611 repo.set_head(need_head_set)
612 print(f"Setting default branch to {push_ref_name}")
613
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
615 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
616 cmd, env=os.environ.copy())
589 heads = stdout
617 heads = safe_str(stdout)
590 heads = heads.replace(push_ref['ref'], '')
618 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
619 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
620 in heads.splitlines() if head) or '.'
@@ -595,9 +623,10 b' def git_post_receive(unused_repo_path, r'
595 '--not', heads]
623 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
624 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
625 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
627
628 # delete branch case
599 elif push_ref['new_rev'] == empty_commit_id:
629 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
630 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
631 else:
603 if push_ref['name'] not in branches:
632 if push_ref['name'] not in branches:
@@ -608,7 +637,25 b' def git_post_receive(unused_repo_path, r'
608 '--reverse', '--pretty=format:%H']
637 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
638 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
639 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
640 # we get bytes from stdout, we need str to be consistent
641 log_revs = list(map(ascii_str, stdout.splitlines()))
642 git_revs.extend(log_revs)
643
644 # Pure pygit2 impl. but still 2-3x slower :/
645 # results = []
646 #
647 # with Repository(os.getcwd()) as repo:
648 # repo_new_rev = repo[push_ref['new_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
651 #
652 # for commit in walker:
653 # if commit.id == repo_old_rev.id:
654 # break
655 # results.append(commit.id.hex)
656 # # reverse the order, can't use GIT_SORT_REVERSE
657 # log_revs = results[::-1]
658
612 elif type_ == 'tags':
659 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
660 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
661 tags.append(push_ref['name'])
@@ -622,13 +669,16 b' def git_post_receive(unused_repo_path, r'
622 'tags': tags,
669 'tags': tags,
623 }
670 }
624
671
672 stdout = sys.stdout
673
625 if 'repo_size' in extras['hooks']:
674 if 'repo_size' in extras['hooks']:
626 try:
675 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
628 except:
677 except Exception:
629 pass
678 pass
630
679
631 return _call_hook('post_push', extras, GitMessageWriter())
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
681 return status_code
632
682
633
683
634 def _get_extras_from_txn_id(path, txn_id):
684 def _get_extras_from_txn_id(path, txn_id):
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -15,9 +15,9 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import os
19 import os
19 import sys
20 import sys
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
@@ -28,17 +28,20 b' import tempfile'
28 import psutil
28 import psutil
29
29
30 from itertools import chain
30 from itertools import chain
31 from cStringIO import StringIO
32
31
33 import simplejson as json
34 import msgpack
32 import msgpack
33 import configparser
34
35 from pyramid.config import Configurator
35 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
36 from pyramid.wsgi import wsgiapp
37 from pyramid.compat import configparser
38 from pyramid.response import Response
37 from pyramid.response import Response
38
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40 from vcsserver.lib.rc_json import json
39 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.config.settings_maker import SettingsMaker
40 from vcsserver.utils import safe_int
42 from vcsserver.str_utils import safe_int
41 from vcsserver.lib.statsd_client import StatsdClient
43 from vcsserver.lib.statsd_client import StatsdClient
44 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
42
45
43 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
44
47
@@ -62,23 +65,39 b' from vcsserver.exceptions import HTTPRep'
62 from vcsserver.lib.exc_tracking import store_exception
65 from vcsserver.lib.exc_tracking import store_exception
63 from vcsserver.server import VcsServer
66 from vcsserver.server import VcsServer
64
67
68 strict_vcs = True
69
70 git_import_err = None
65 try:
71 try:
66 from vcsserver.git import GitFactory, GitRemote
72 from vcsserver.remote.git import GitFactory, GitRemote
67 except ImportError:
73 except ImportError as e:
68 GitFactory = None
74 GitFactory = None
69 GitRemote = None
75 GitRemote = None
76 git_import_err = e
77 if strict_vcs:
78 raise
70
79
80
81 hg_import_err = None
71 try:
82 try:
72 from vcsserver.hg import MercurialFactory, HgRemote
83 from vcsserver.remote.hg import MercurialFactory, HgRemote
73 except ImportError:
84 except ImportError as e:
74 MercurialFactory = None
85 MercurialFactory = None
75 HgRemote = None
86 HgRemote = None
87 hg_import_err = e
88 if strict_vcs:
89 raise
76
90
91
92 svn_import_err = None
77 try:
93 try:
78 from vcsserver.svn import SubversionFactory, SvnRemote
94 from vcsserver.remote.svn import SubversionFactory, SvnRemote
79 except ImportError:
95 except ImportError as e:
80 SubversionFactory = None
96 SubversionFactory = None
81 SvnRemote = None
97 SvnRemote = None
98 svn_import_err = e
99 if strict_vcs:
100 raise
82
101
83
102
84 def _is_request_chunked(environ):
103 def _is_request_chunked(environ):
@@ -106,13 +125,13 b' class VCS(object):'
106 git_factory = GitFactory()
125 git_factory = GitFactory()
107 self._git_remote = GitRemote(git_factory)
126 self._git_remote = GitRemote(git_factory)
108 else:
127 else:
109 log.info("Git client import failed")
128 log.error("Git client import failed: %s", git_import_err)
110
129
111 if MercurialFactory and HgRemote:
130 if MercurialFactory and HgRemote:
112 hg_factory = MercurialFactory()
131 hg_factory = MercurialFactory()
113 self._hg_remote = HgRemote(hg_factory)
132 self._hg_remote = HgRemote(hg_factory)
114 else:
133 else:
115 log.info("Mercurial client import failed")
134 log.error("Mercurial client import failed: %s", hg_import_err)
116
135
117 if SubversionFactory and SvnRemote:
136 if SubversionFactory and SvnRemote:
118 svn_factory = SubversionFactory()
137 svn_factory = SubversionFactory()
@@ -121,7 +140,7 b' class VCS(object):'
121 hg_factory = MercurialFactory()
140 hg_factory = MercurialFactory()
122 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
123 else:
142 else:
124 log.warning("Subversion client import failed")
143 log.error("Subversion client import failed: %s", svn_import_err)
125
144
126 self._vcsserver = VcsServer()
145 self._vcsserver = VcsServer()
127
146
@@ -129,8 +148,7 b' class VCS(object):'
129 if self.locale:
148 if self.locale:
130 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 log.info('Settings locale: `LC_ALL` to %s', self.locale)
131 else:
150 else:
132 log.info(
151 log.info('Configuring locale subsystem based on environment variables')
133 'Configuring locale subsystem based on environment variables')
134 try:
152 try:
135 # If self.locale is the empty string, then the locale
153 # If self.locale is the empty string, then the locale
136 # module will use the environment variables. See the
154 # module will use the environment variables. See the
@@ -142,8 +160,7 b' class VCS(object):'
142 'Locale set to language code "%s" with encoding "%s".',
160 'Locale set to language code "%s" with encoding "%s".',
143 language_code, encoding)
161 language_code, encoding)
144 except locale.Error:
162 except locale.Error:
145 log.exception(
163 log.exception('Cannot set locale, not configuring the locale system')
146 'Cannot set locale, not configuring the locale system')
147
164
148
165
149 class WsgiProxy(object):
166 class WsgiProxy(object):
@@ -189,7 +206,7 b' class VCSViewPredicate(object):'
189 self.remotes = val
206 self.remotes = val
190
207
191 def text(self):
208 def text(self):
192 return 'vcs view method = %s' % (self.remotes.keys(),)
209 return f'vcs view method = {list(self.remotes.keys())}'
193
210
194 phash = text
211 phash = text
195
212
@@ -213,9 +230,11 b' class HTTPApplication(object):'
213 self.config = Configurator(settings=settings)
230 self.config = Configurator(settings=settings)
214 # Init our statsd at very start
231 # Init our statsd at very start
215 self.config.registry.statsd = StatsdClient.statsd
232 self.config.registry.statsd = StatsdClient.statsd
233 self.config.registry.vcs_call_context = {}
216
234
217 self.global_config = global_config
235 self.global_config = global_config
218 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache')
237 self.config.include('vcsserver.lib.rc_cache.archive_cache')
219
238
220 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
221 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
@@ -302,6 +321,7 b' class HTTPApplication(object):'
302 def _vcs_view_params(self, request):
321 def _vcs_view_params(self, request):
303 remote = self._remotes[request.matchdict['backend']]
322 remote = self._remotes[request.matchdict['backend']]
304 payload = msgpack.unpackb(request.body, use_list=True)
323 payload = msgpack.unpackb(request.body, use_list=True)
324
305 method = payload.get('method')
325 method = payload.get('method')
306 params = payload['params']
326 params = payload['params']
307 wire = params.get('wire')
327 wire = params.get('wire')
@@ -309,6 +329,11 b' class HTTPApplication(object):'
309 kwargs = params.get('kwargs')
329 kwargs = params.get('kwargs')
310 context_uid = None
330 context_uid = None
311
331
332 request.registry.vcs_call_context = {
333 'method': method,
334 'repo_name': payload.get('_repo_name'),
335 }
336
312 if wire:
337 if wire:
313 try:
338 try:
314 wire['context'] = context_uid = uuid.UUID(wire['context'])
339 wire['context'] = context_uid = uuid.UUID(wire['context'])
@@ -319,22 +344,33 b' class HTTPApplication(object):'
319
344
320 # NOTE(marcink): trading complexity for slight performance
345 # NOTE(marcink): trading complexity for slight performance
321 if log.isEnabledFor(logging.DEBUG):
346 if log.isEnabledFor(logging.DEBUG):
322 no_args_methods = [
347 # also we SKIP printing out any of those methods args since they maybe excessive
323
348 just_args_methods = {
324 ]
349 'commitctx': ('content', 'removed', 'updated'),
325 if method in no_args_methods:
350 'commit': ('content', 'removed', 'updated')
351 }
352 if method in just_args_methods:
353 skip_args = just_args_methods[method]
326 call_args = ''
354 call_args = ''
355 call_kwargs = {}
356 for k in kwargs:
357 if k in skip_args:
358 # replace our skip key with dummy
359 call_kwargs[k] = f'RemovedParam({k})'
360 else:
361 call_kwargs[k] = kwargs[k]
327 else:
362 else:
328 call_args = args[1:]
363 call_args = args[1:]
364 call_kwargs = kwargs
329
365
330 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
366 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
331 method, call_args, kwargs, context_uid, repo_state_uid)
367 method, call_args, call_kwargs, context_uid, repo_state_uid)
332
368
333 statsd = request.registry.statsd
369 statsd = request.registry.statsd
334 if statsd:
370 if statsd:
335 statsd.incr(
371 statsd.incr(
336 'vcsserver_method_total', tags=[
372 'vcsserver_method_total', tags=[
337 "method:{}".format(method),
373 f"method:{method}",
338 ])
374 ])
339 return payload, remote, method, args, kwargs
375 return payload, remote, method, args, kwargs
340
376
@@ -384,7 +420,7 b' class HTTPApplication(object):'
384 resp = {
420 resp = {
385 'id': payload_id,
421 'id': payload_id,
386 'error': {
422 'error': {
387 'message': e.message,
423 'message': str(e),
388 'traceback': tb_info,
424 'traceback': tb_info,
389 'org_exc': org_exc_name,
425 'org_exc': org_exc_name,
390 'org_exc_tb': org_exc_tb,
426 'org_exc_tb': org_exc_tb,
@@ -401,7 +437,7 b' class HTTPApplication(object):'
401 'id': payload_id,
437 'id': payload_id,
402 'result': resp
438 'result': resp
403 }
439 }
404
440 log.debug('Serving data for method %s', method)
405 return resp
441 return resp
406
442
407 def vcs_stream_view(self, request):
443 def vcs_stream_view(self, request):
@@ -416,7 +452,7 b' class HTTPApplication(object):'
416 raise
452 raise
417
453
418 def get_chunked_data(method_resp):
454 def get_chunked_data(method_resp):
419 stream = StringIO(method_resp)
455 stream = io.BytesIO(method_resp)
420 while 1:
456 while 1:
421 chunk = stream.read(chunk_size)
457 chunk = stream.read(chunk_size)
422 if not chunk:
458 if not chunk:
@@ -453,7 +489,7 b' class HTTPApplication(object):'
453 except Exception:
489 except Exception:
454 log.exception('Failed to read .ini file for display')
490 log.exception('Failed to read .ini file for display')
455
491
456 environ = os.environ.items()
492 environ = list(os.environ.items())
457
493
458 resp = {
494 resp = {
459 'id': payload.get('id'),
495 'id': payload.get('id'),
@@ -468,14 +504,28 b' class HTTPApplication(object):'
468 return resp
504 return resp
469
505
470 def _msgpack_renderer_factory(self, info):
506 def _msgpack_renderer_factory(self, info):
507
471 def _render(value, system):
508 def _render(value, system):
509 bin_type = False
510 res = value.get('result')
511 if isinstance(res, BytesEnvelope):
512 log.debug('Result is wrapped in BytesEnvelope type')
513 bin_type = True
514 elif isinstance(res, BinaryEnvelope):
515 log.debug('Result is wrapped in BinaryEnvelope type')
516 value['result'] = res.val
517 bin_type = True
518
472 request = system.get('request')
519 request = system.get('request')
473 if request is not None:
520 if request is not None:
474 response = request.response
521 response = request.response
475 ct = response.content_type
522 ct = response.content_type
476 if ct == response.default_content_type:
523 if ct == response.default_content_type:
477 response.content_type = 'application/x-msgpack'
524 response.content_type = 'application/x-msgpack'
478 return msgpack.packb(value)
525 if bin_type:
526 response.content_type = 'application/x-msgpack-bin'
527
528 return msgpack.packb(value, use_bin_type=bin_type)
479 return _render
529 return _render
480
530
481 def set_env_from_config(self, environ, config):
531 def set_env_from_config(self, environ, config):
@@ -528,16 +578,17 b' class HTTPApplication(object):'
528 @wsgiapp
578 @wsgiapp
529 def _hg_stream(environ, start_response):
579 def _hg_stream(environ, start_response):
530 log.debug('http-app: handling hg stream')
580 log.debug('http-app: handling hg stream')
531 repo_path = environ['HTTP_X_RC_REPO_PATH']
581 call_context = get_headers_call_context(environ)
532 repo_name = environ['HTTP_X_RC_REPO_NAME']
582
533 packed_config = base64.b64decode(
583 repo_path = call_context['repo_path']
534 environ['HTTP_X_RC_REPO_CONFIG'])
584 repo_name = call_context['repo_name']
535 config = msgpack.unpackb(packed_config)
585 config = call_context['repo_config']
586
536 app = scm_app.create_hg_wsgi_app(
587 app = scm_app.create_hg_wsgi_app(
537 repo_path, repo_name, config)
588 repo_path, repo_name, config)
538
589
539 # Consistent path information for hgweb
590 # Consistent path information for hgweb
540 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
591 environ['PATH_INFO'] = call_context['path_info']
541 environ['REPO_NAME'] = repo_name
592 environ['REPO_NAME'] = repo_name
542 self.set_env_from_config(environ, config)
593 self.set_env_from_config(environ, config)
543
594
@@ -557,13 +608,14 b' class HTTPApplication(object):'
557 @wsgiapp
608 @wsgiapp
558 def _git_stream(environ, start_response):
609 def _git_stream(environ, start_response):
559 log.debug('http-app: handling git stream')
610 log.debug('http-app: handling git stream')
560 repo_path = environ['HTTP_X_RC_REPO_PATH']
611
561 repo_name = environ['HTTP_X_RC_REPO_NAME']
612 call_context = get_headers_call_context(environ)
562 packed_config = base64.b64decode(
563 environ['HTTP_X_RC_REPO_CONFIG'])
564 config = msgpack.unpackb(packed_config)
565
613
566 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
614 repo_path = call_context['repo_path']
615 repo_name = call_context['repo_name']
616 config = call_context['repo_config']
617
618 environ['PATH_INFO'] = call_context['path_info']
567 self.set_env_from_config(environ, config)
619 self.set_env_from_config(environ, config)
568
620
569 content_type = environ.get('CONTENT_TYPE', '')
621 content_type = environ.get('CONTENT_TYPE', '')
@@ -599,15 +651,18 b' class HTTPApplication(object):'
599
651
600 def handle_vcs_exception(self, exception, request):
652 def handle_vcs_exception(self, exception, request):
601 _vcs_kind = getattr(exception, '_vcs_kind', '')
653 _vcs_kind = getattr(exception, '_vcs_kind', '')
654
602 if _vcs_kind == 'repo_locked':
655 if _vcs_kind == 'repo_locked':
603 # Get custom repo-locked status code if present.
656 headers_call_context = get_headers_call_context(request.environ)
604 status_code = request.headers.get('X-RC-Locked-Status-Code')
657 status_code = safe_int(headers_call_context['locked_status_code'])
658
605 return HTTPRepoLocked(
659 return HTTPRepoLocked(
606 title=exception.message, status_code=status_code)
660 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
607
661
608 elif _vcs_kind == 'repo_branch_protected':
662 elif _vcs_kind == 'repo_branch_protected':
609 # Get custom repo-branch-protected status code if present.
663 # Get custom repo-branch-protected status code if present.
610 return HTTPRepoBranchProtected(title=exception.message)
664 return HTTPRepoBranchProtected(
665 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
611
666
612 exc_info = request.exc_info
667 exc_info = request.exc_info
613 store_exception(id(exc_info), exc_info)
668 store_exception(id(exc_info), exc_info)
@@ -623,9 +678,9 b' class HTTPApplication(object):'
623
678
624 statsd = request.registry.statsd
679 statsd = request.registry.statsd
625 if statsd:
680 if statsd:
626 exc_type = "{}.{}".format(exception.__class__.__module__, exception.__class__.__name__)
681 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
627 statsd.incr('vcsserver_exception_total',
682 statsd.incr('vcsserver_exception_total',
628 tags=["type:{}".format(exc_type)])
683 tags=[f"type:{exc_type}"])
629 raise exception
684 raise exception
630
685
631
686
@@ -716,8 +771,7 b' def main(global_config, **settings):'
716
771
717 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
772 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
718 total_time = time.time() - start_time
773 total_time = time.time() - start_time
719 log.info('Pyramid app `%s` created and configured in %.2fs',
774 log.info('Pyramid app created and configured in %.2fs', total_time)
720 getattr(pyramid_app, 'func_name', 'pyramid_app'), total_time)
721 return pyramid_app
775 return pyramid_app
722
776
723
777
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -84,7 +84,7 b' class JsonEncoder(json.JSONEncoder):'
84 return str(obj)
84 return str(obj)
85
85
86 try:
86 try:
87 return super(JsonEncoder, self).default(obj)
87 return super().default(obj)
88
88
89 except TypeError:
89 except TypeError:
90 try:
90 try:
@@ -132,7 +132,7 b' class JsonFormatter(ExceptionAwareFormat'
132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
133 self.prefix = kwargs.pop("prefix", "")
133 self.prefix = kwargs.pop("prefix", "")
134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
135 self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs))
135 self.reserved_attrs = dict(list(zip(reserved_attrs, reserved_attrs)))
136 self.timestamp = kwargs.pop("timestamp", True)
136 self.timestamp = kwargs.pop("timestamp", True)
137
137
138 # super(JsonFormatter, self).__init__(*args, **kwargs)
138 # super(JsonFormatter, self).__init__(*args, **kwargs)
@@ -141,8 +141,8 b' class JsonFormatter(ExceptionAwareFormat'
141 self.json_encoder = JsonEncoder
141 self.json_encoder = JsonEncoder
142
142
143 self._required_fields = self.parse()
143 self._required_fields = self.parse()
144 self._skip_fields = dict(zip(self._required_fields,
144 self._skip_fields = dict(list(zip(self._required_fields,
145 self._required_fields))
145 self._required_fields)))
146 self._skip_fields.update(self.reserved_attrs)
146 self._skip_fields.update(self.reserved_attrs)
147
147
148 def _str_to_fn(self, fn_as_str):
148 def _str_to_fn(self, fn_as_str):
@@ -200,7 +200,7 b' class JsonFormatter(ExceptionAwareFormat'
200
200
201 def serialize_log_record(self, log_record):
201 def serialize_log_record(self, log_record):
202 """Returns the final representation of the log record."""
202 """Returns the final representation of the log record."""
203 return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
203 return "{}{}".format(self.prefix, self.jsonify_log_record(log_record))
204
204
205 def format(self, record):
205 def format(self, record):
206 """Formats a log record and serializes to json"""
206 """Formats a log record and serializes to json"""
@@ -23,14 +23,8 b' loggers = {'
23 ]
23 ]
24 }
24 }
25
25
26 PY3 = sys.version_info[0] == 3
27
28 if PY3:
29 text_type = str
26 text_type = str
30 binary_type = bytes
27 binary_type = bytes
31 else:
32 text_type = unicode # noqa
33 binary_type = str
34
28
35
29
36 # Check if the id match. If not, return an error code.
30 # Check if the id match. If not, return an error code.
@@ -225,7 +219,7 b' class Lock(object):'
225
219
226 if self._held:
220 if self._held:
227 owner_id = self.get_owner_id()
221 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
222 raise AlreadyAcquired(f"Already acquired from this Lock instance. Lock id: {owner_id}")
229
223
230 if not blocking and timeout is not None:
224 if not blocking and timeout is not None:
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
225 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import logging
1 import logging
4
2
5 from .stream import TCPStatsClient, UnixSocketStatsClient # noqa
3 from .stream import TCPStatsClient, UnixSocketStatsClient # noqa
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import re
1 import re
4 import random
2 import random
5 from collections import deque
3 from collections import deque
@@ -49,7 +47,7 b' class StatsClientBase(object):'
49 statsd = StatsdClient.statsd
47 statsd = StatsdClient.statsd
50 with statsd.timer('bucket_name', auto_send=True) as tmr:
48 with statsd.timer('bucket_name', auto_send=True) as tmr:
51 # This block will be timed.
49 # This block will be timed.
52 for i in xrange(0, 100000):
50 for i in range(0, 100000):
53 i ** 2
51 i ** 2
54 # you can access time here...
52 # you can access time here...
55 elapsed_ms = tmr.ms
53 elapsed_ms = tmr.ms
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import socket
1 import socket
4
2
5 from .base import StatsClientBase, PipelineBase
3 from .base import StatsClientBase, PipelineBase
@@ -1,14 +1,5 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import functools
1 import functools
4
5 # Use timer that's not susceptible to time of day adjustments.
6 try:
7 # perf_counter is only present on Py3.3+
8 from time import perf_counter as time_now
2 from time import perf_counter as time_now
9 except ImportError:
10 # fall back to using time
11 from time import time as time_now
12
3
13
4
14 def safe_wraps(wrapper, *args, **kwargs):
5 def safe_wraps(wrapper, *args, **kwargs):
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import socket
1 import socket
4
2
5 from .base import StatsClientBase, PipelineBase
3 from .base import StatsClientBase, PipelineBase
@@ -8,7 +6,7 b' from .base import StatsClientBase, Pipel'
8 class Pipeline(PipelineBase):
6 class Pipeline(PipelineBase):
9
7
10 def __init__(self, client):
8 def __init__(self, client):
11 super(Pipeline, self).__init__(client)
9 super().__init__(client)
12 self._maxudpsize = client._maxudpsize
10 self._maxudpsize = client._maxudpsize
13
11
14 def _send(self):
12 def _send(self):
@@ -1,7 +1,5 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
5 #
3 #
6 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -26,8 +24,6 b' import logging'
26 import traceback
24 import traceback
27 import tempfile
25 import tempfile
28
26
29 from pyramid import compat
30
31 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
32
28
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
29 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
@@ -77,7 +73,8 b' def _store_exception(exc_id, exc_info, p'
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
73 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78
74
79 if detailed_tb:
75 if detailed_tb:
80 if isinstance(detailed_tb, compat.string_types):
76 remote_tb = detailed_tb
77 if isinstance(detailed_tb, str):
81 remote_tb = [detailed_tb]
78 remote_tb = [detailed_tb]
82
79
83 tb += (
80 tb += (
@@ -127,10 +124,10 b' def store_exception(exc_id, exc_info, pr'
127 def _find_exc_file(exc_id, prefix=global_prefix):
124 def _find_exc_file(exc_id, prefix=global_prefix):
128 exc_store_path = get_exc_store()
125 exc_store_path = get_exc_store()
129 if prefix:
126 if prefix:
130 exc_id = '{}_{}'.format(exc_id, prefix)
127 exc_id = f'{exc_id}_{prefix}'
131 else:
128 else:
132 # search without a prefix
129 # search without a prefix
133 exc_id = '{}'.format(exc_id)
130 exc_id = f'{exc_id}'
134
131
135 # we need to search the store for such start pattern as above
132 # we need to search the store for such start pattern as above
136 for fname in os.listdir(exc_store_path):
133 for fname in os.listdir(exc_store_path):
@@ -1,7 +1,5 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
5 #
3 #
6 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -22,7 +20,7 b' import logging'
22
20
23 from repoze.lru import LRUCache
21 from repoze.lru import LRUCache
24
22
25 from vcsserver.utils import safe_str
23 from vcsserver.str_utils import safe_str
26
24
27 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
28
26
@@ -45,7 +43,7 b' class LRUDict(LRUCache):'
45 del self.data[key]
43 del self.data[key]
46
44
47 def keys(self):
45 def keys(self):
48 return self.data.keys()
46 return list(self.data.keys())
49
47
50
48
51 class LRUDictDebug(LRUDict):
49 class LRUDictDebug(LRUDict):
@@ -53,11 +51,11 b' class LRUDictDebug(LRUDict):'
53 Wrapper to provide some debug options
51 Wrapper to provide some debug options
54 """
52 """
55 def _report_keys(self):
53 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
54 elems_cnt = f'{len(list(self.keys()))}/{self.size}'
57 # trick for pformat print it more nicely
55 # trick for pformat print it more nicely
58 fmt = '\n'
56 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
57 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
58 fmt += f'{cnt+1} - {safe_str(elem)}\n'
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
59 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62
60
63 def __getitem__(self, key):
61 def __getitem__(self, key):
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -16,31 +16,57 b''
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import threading
20
19 from dogpile.cache import register_backend
21 from dogpile.cache import register_backend
20
22
23 from . import region_meta
24 from .utils import (
25 backend_key_generator,
26 clear_cache_namespace,
27 get_default_cache_settings,
28 get_or_create_region,
29 make_region,
30 str2bool,
31 )
32
33 module_name = 'vcsserver'
34
21 register_backend(
35 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
36 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
23 "LRUMemoryBackend")
37 "LRUMemoryBackend")
24
38
25 register_backend(
39 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
40 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
27 "FileNamespaceBackend")
41 "FileNamespaceBackend")
28
42
29 register_backend(
43 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
44 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
31 "RedisPickleBackend")
45 "RedisPickleBackend")
32
46
33 register_backend(
47 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
48 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
49 "RedisMsgPackBackend")
36
50
37
51
38 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
39
53
40 from . import region_meta
54
41 from .utils import (
55 CLEAR_DELETE = 'delete'
42 get_default_cache_settings, backend_key_generator, get_or_create_region,
56 CLEAR_INVALIDATE = 'invalidate'
43 clear_cache_namespace, make_region)
57
58
59 def async_creation_runner(cache, somekey, creator, mutex):
60
61 def runner():
62 try:
63 value = creator()
64 cache.set(somekey, value)
65 finally:
66 mutex.release()
67
68 thread = threading.Thread(target=runner)
69 thread.start()
44
70
45
71
46 def configure_dogpile_cache(settings):
72 def configure_dogpile_cache(settings):
@@ -62,13 +88,20 b' def configure_dogpile_cache(settings):'
62
88
63 new_region = make_region(
89 new_region = make_region(
64 name=namespace_name,
90 name=namespace_name,
65 function_key_generator=None
91 function_key_generator=None,
92 async_creation_runner=None
66 )
93 )
67
94
68 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
95 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
69 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
96 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
97
98 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
99 if async_creator:
100 log.debug('configuring region %s with async creator', new_region)
101 new_region.async_creation_runner = async_creation_runner
102
70 if log.isEnabledFor(logging.DEBUG):
103 if log.isEnabledFor(logging.DEBUG):
71 region_args = dict(backend=new_region.actual_backend.__class__,
104 region_args = dict(backend=new_region.actual_backend,
72 region_invalidator=new_region.region_invalidator.__class__)
105 region_invalidator=new_region.region_invalidator.__class__)
73 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
106 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
74
107
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -15,25 +15,31 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
19 import errno
18 import errno
19 import fcntl
20 import functools
20 import logging
21 import logging
22 import os
23 import pickle
24 #import time
21
25
26 #import gevent
22 import msgpack
27 import msgpack
23 import redis
28 import redis
24
29
25 from dogpile.cache.api import CachedValue
30 flock_org = fcntl.flock
26 from dogpile.cache.backends import memory as memory_backend
31 from typing import Union
32
33 from dogpile.cache.api import Deserializer, Serializer
27 from dogpile.cache.backends import file as file_backend
34 from dogpile.cache.backends import file as file_backend
35 from dogpile.cache.backends import memory as memory_backend
28 from dogpile.cache.backends import redis as redis_backend
36 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
37 from dogpile.cache.backends.file import FileLock
30 from dogpile.cache.util import memoized_property
38 from dogpile.cache.util import memoized_property
31
39
32 from pyramid.settings import asbool
33
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
40 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str, safe_unicode
41 from vcsserver.str_utils import safe_bytes, safe_str
36
42 from vcsserver.type_utils import str2bool
37
43
38 _default_max_size = 1024
44 _default_max_size = 1024
39
45
@@ -45,14 +51,20 b' class LRUMemoryBackend(memory_backend.Me'
45 pickle_values = False
51 pickle_values = False
46
52
47 def __init__(self, arguments):
53 def __init__(self, arguments):
48 max_size = arguments.pop('max_size', _default_max_size)
54 self.max_size = arguments.pop('max_size', _default_max_size)
49
55
50 LRUDictClass = LRUDict
56 LRUDictClass = LRUDict
51 if arguments.pop('log_key_count', None):
57 if arguments.pop('log_key_count', None):
52 LRUDictClass = LRUDictDebug
58 LRUDictClass = LRUDictDebug
53
59
54 arguments['cache_dict'] = LRUDictClass(max_size)
60 arguments['cache_dict'] = LRUDictClass(self.max_size)
55 super(LRUMemoryBackend, self).__init__(arguments)
61 super().__init__(arguments)
62
63 def __repr__(self):
64 return f'{self.__class__}(maxsize=`{self.max_size}`)'
65
66 def __str__(self):
67 return self.__repr__()
56
68
57 def delete(self, key):
69 def delete(self, key):
58 try:
70 try:
@@ -66,55 +78,22 b' class LRUMemoryBackend(memory_backend.Me'
66 self.delete(key)
78 self.delete(key)
67
79
68
80
69 class PickleSerializer(object):
81 class PickleSerializer:
70
82 serializer: None | Serializer = staticmethod( # type: ignore
71 def _dumps(self, value, safe=False):
83 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
72 try:
84 )
73 return compat.pickle.dumps(value)
85 deserializer: None | Deserializer = staticmethod( # type: ignore
74 except Exception:
86 functools.partial(pickle.loads)
75 if safe:
87 )
76 return NO_VALUE
77 else:
78 raise
79
80 def _loads(self, value, safe=True):
81 try:
82 return compat.pickle.loads(value)
83 except Exception:
84 if safe:
85 return NO_VALUE
86 else:
87 raise
88
88
89
89
90 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
91
91 serializer: None | Serializer = staticmethod( # type: ignore
92 def _dumps(self, value, safe=False):
92 msgpack.packb
93 try:
93 )
94 return msgpack.packb(value)
94 deserializer: None | Deserializer = staticmethod( # type: ignore
95 except Exception:
95 functools.partial(msgpack.unpackb, use_list=False)
96 if safe:
96 )
97 return NO_VALUE
98 else:
99 raise
100
101 def _loads(self, value, safe=True):
102 """
103 pickle maintained the `CachedValue` wrapper of the tuple
104 msgpack does not, so it must be added back in.
105 """
106 try:
107 value = msgpack.unpackb(value, use_list=False)
108 return CachedValue(*value)
109 except Exception:
110 if safe:
111 return NO_VALUE
112 else:
113 raise
114
115
116 import fcntl
117 flock_org = fcntl.flock
118
97
119
98
120 class CustomLockFactory(FileLock):
99 class CustomLockFactory(FileLock):
@@ -129,30 +108,40 b' class FileNamespaceBackend(PickleSeriali'
129 arguments['lock_factory'] = CustomLockFactory
108 arguments['lock_factory'] = CustomLockFactory
130 db_file = arguments.get('filename')
109 db_file = arguments.get('filename')
131
110
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
111 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
112 db_file_dir = os.path.dirname(db_file)
113 if not os.path.isdir(db_file_dir):
114 os.makedirs(db_file_dir)
115
133 try:
116 try:
134 super(FileNamespaceBackend, self).__init__(arguments)
117 super().__init__(arguments)
135 except Exception:
118 except Exception:
136 log.exception('Failed to initialize db at: %s', db_file)
119 log.exception('Failed to initialize db at: %s', db_file)
137 raise
120 raise
138
121
139 def __repr__(self):
122 def __repr__(self):
140 return '{} `{}`'.format(self.__class__, self.filename)
123 return f'{self.__class__}(file=`{self.filename}`)'
124
125 def __str__(self):
126 return self.__repr__()
141
127
142 def list_keys(self, prefix=''):
128 def _get_keys_pattern(self, prefix: bytes = b''):
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
129 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
144
130
145 def cond(v):
131 def list_keys(self, prefix: bytes = b''):
132 prefix = self._get_keys_pattern(prefix)
133
134 def cond(dbm_key: bytes):
146 if not prefix:
135 if not prefix:
147 return True
136 return True
148
137
149 if v.startswith(prefix):
138 if dbm_key.startswith(prefix):
150 return True
139 return True
151 return False
140 return False
152
141
153 with self._dbm_file(True) as dbm:
142 with self._dbm_file(True) as dbm:
154 try:
143 try:
155 return filter(cond, dbm.keys())
144 return list(filter(cond, dbm.keys()))
156 except Exception:
145 except Exception:
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
146 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
158 raise
147 raise
@@ -160,49 +149,27 b' class FileNamespaceBackend(PickleSeriali'
160 def get_store(self):
149 def get_store(self):
161 return self.filename
150 return self.filename
162
151
163 def _dbm_get(self, key):
164 with self._dbm_file(False) as dbm:
165 if hasattr(dbm, 'get'):
166 value = dbm.get(key, NO_VALUE)
167 else:
168 # gdbm objects lack a .get method
169 try:
170 value = dbm[key]
171 except KeyError:
172 value = NO_VALUE
173 if value is not NO_VALUE:
174 value = self._loads(value)
175 return value
176
177 def get(self, key):
178 try:
179 return self._dbm_get(key)
180 except Exception:
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
182 raise
183
184 def set(self, key, value):
185 with self._dbm_file(True) as dbm:
186 dbm[key] = self._dumps(value)
187
188 def set_multi(self, mapping):
189 with self._dbm_file(True) as dbm:
190 for key, value in mapping.items():
191 dbm[key] = self._dumps(value)
192
193
152
194 class BaseRedisBackend(redis_backend.RedisBackend):
153 class BaseRedisBackend(redis_backend.RedisBackend):
195 key_prefix = ''
154 key_prefix = ''
196
155
197 def __init__(self, arguments):
156 def __init__(self, arguments):
198 super(BaseRedisBackend, self).__init__(arguments)
157 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
158 super().__init__(arguments)
159
199 self._lock_timeout = self.lock_timeout
160 self._lock_timeout = self.lock_timeout
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
161 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
201
162
202 if self._lock_auto_renewal and not self._lock_timeout:
163 if self._lock_auto_renewal and not self._lock_timeout:
203 # set default timeout for auto_renewal
164 # set default timeout for auto_renewal
204 self._lock_timeout = 30
165 self._lock_timeout = 30
205
166
167 def __repr__(self):
168 return f'{self.__class__}(conn=`{self.db_conn}`)'
169
170 def __str__(self):
171 return self.__repr__()
172
206 def _create_client(self):
173 def _create_client(self):
207 args = {}
174 args = {}
208
175
@@ -216,58 +183,29 b' class BaseRedisBackend(redis_backend.Red'
216 )
183 )
217
184
218 connection_pool = redis.ConnectionPool(**args)
185 connection_pool = redis.ConnectionPool(**args)
219
186 self.writer_client = redis.StrictRedis(
220 return redis.StrictRedis(connection_pool=connection_pool)
187 connection_pool=connection_pool
188 )
189 self.reader_client = self.writer_client
221
190
222 def list_keys(self, prefix=''):
191 def _get_keys_pattern(self, prefix: bytes = b''):
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
192 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
224 return self.client.keys(prefix)
193
194 def list_keys(self, prefix: bytes = b''):
195 prefix = self._get_keys_pattern(prefix)
196 return self.reader_client.keys(prefix)
225
197
226 def get_store(self):
198 def get_store(self):
227 return self.client.connection_pool
199 return self.reader_client.connection_pool
228
229 def get(self, key):
230 value = self.client.get(key)
231 if value is None:
232 return NO_VALUE
233 return self._loads(value)
234
235 def get_multi(self, keys):
236 if not keys:
237 return []
238 values = self.client.mget(keys)
239 loads = self._loads
240 return [
241 loads(v) if v is not None else NO_VALUE
242 for v in values]
243
244 def set(self, key, value):
245 if self.redis_expiration_time:
246 self.client.setex(key, self.redis_expiration_time,
247 self._dumps(value))
248 else:
249 self.client.set(key, self._dumps(value))
250
251 def set_multi(self, mapping):
252 dumps = self._dumps
253 mapping = dict(
254 (k, dumps(v))
255 for k, v in mapping.items()
256 )
257
258 if not self.redis_expiration_time:
259 self.client.mset(mapping)
260 else:
261 pipe = self.client.pipeline()
262 for key, value in mapping.items():
263 pipe.setex(key, self.redis_expiration_time, value)
264 pipe.execute()
265
200
266 def get_mutex(self, key):
201 def get_mutex(self, key):
267 if self.distributed_lock:
202 if self.distributed_lock:
268 lock_key = u'_lock_{0}'.format(safe_unicode(key))
203 lock_key = f'_lock_{safe_str(key)}'
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
204 return get_mutex_lock(
270 auto_renewal=self._lock_auto_renewal)
205 self.writer_client, lock_key,
206 self._lock_timeout,
207 auto_renewal=self._lock_auto_renewal
208 )
271 else:
209 else:
272 return None
210 return None
273
211
@@ -283,7 +221,7 b' class RedisMsgPackBackend(MsgPackSeriali'
283
221
284
222
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
223 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
286 import redis_lock
224 from vcsserver.lib._vendor import redis_lock
287
225
288 class _RedisLockWrapper(object):
226 class _RedisLockWrapper(object):
289 """LockWrapper for redis_lock"""
227 """LockWrapper for redis_lock"""
@@ -299,10 +237,10 b' def get_mutex_lock(client, lock_key, loc'
299 )
237 )
300
238
301 def __repr__(self):
239 def __repr__(self):
302 return "{}:{}".format(self.__class__.__name__, lock_key)
240 return f"{self.__class__.__name__}:{lock_key}"
303
241
304 def __str__(self):
242 def __str__(self):
305 return "{}:{}".format(self.__class__.__name__, lock_key)
243 return f"{self.__class__.__name__}:{lock_key}"
306
244
307 def __init__(self):
245 def __init__(self):
308 self.lock = self.get_lock()
246 self.lock = self.get_lock()
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -15,115 +15,69 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import functools
19 import time
20 import logging
19 import logging
21 import functools
20 import os
21 import threading
22 import time
22
23
24 import decorator
23 from dogpile.cache import CacheRegion
25 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
26
25
27
26 from vcsserver.utils import safe_str, sha1
28 from vcsserver.utils import sha1
29 from vcsserver.str_utils import safe_bytes
30 from vcsserver.type_utils import str2bool
27
31
28 from vcsserver.lib.rc_cache import region_meta
32 from . import region_meta
29
33
30 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
31
35
32
36
33 class RhodeCodeCacheRegion(CacheRegion):
37 class RhodeCodeCacheRegion(CacheRegion):
34
38
39 def __repr__(self):
40 return f'{self.__class__}(name={self.name})'
41
35 def conditional_cache_on_arguments(
42 def conditional_cache_on_arguments(
36 self, namespace=None,
43 self, namespace=None,
37 expiration_time=None,
44 expiration_time=None,
38 should_cache_fn=None,
45 should_cache_fn=None,
39 to_str=compat.string_type,
46 to_str=str,
40 function_key_generator=None,
47 function_key_generator=None,
41 condition=True):
48 condition=True):
42 """
49 """
43 Custom conditional decorator, that will not touch any dogpile internals if
50 Custom conditional decorator, that will not touch any dogpile internals if
44 condition isn't meet. This works a bit different than should_cache_fn
51 condition isn't meet. This works a bit different from should_cache_fn
45 And it's faster in cases we don't ever want to compute cached values
52 And it's faster in cases we don't ever want to compute cached values
46 """
53 """
47 expiration_time_is_callable = compat.callable(expiration_time)
54 expiration_time_is_callable = callable(expiration_time)
55 if not namespace:
56 namespace = getattr(self, '_default_namespace', None)
48
57
49 if function_key_generator is None:
58 if function_key_generator is None:
50 function_key_generator = self.function_key_generator
59 function_key_generator = self.function_key_generator
51
60
52 # workaround for py2 and cython problems, this block should be removed
61 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
53 # once we've migrated to py3
54 if 'cython' == 'cython':
55 def decorator(fn):
56 if to_str is compat.string_type:
57 # backwards compatible
58 key_generator = function_key_generator(namespace, fn)
59 else:
60 key_generator = function_key_generator(namespace, fn, to_str=to_str)
61
62 @functools.wraps(fn)
63 def decorate(*arg, **kw):
64 key = key_generator(*arg, **kw)
65
66 @functools.wraps(fn)
67 def creator():
68 return fn(*arg, **kw)
69
62
70 if not condition:
63 if not condition:
71 return creator()
64 log.debug('Calling un-cached method:%s', user_func.__name__)
65 start = time.time()
66 result = user_func(*arg, **kw)
67 total = time.time() - start
68 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
69 return result
70
71 key = func_key_generator(*arg, **kw)
72
72
73 timeout = expiration_time() if expiration_time_is_callable \
73 timeout = expiration_time() if expiration_time_is_callable \
74 else expiration_time
74 else expiration_time
75
75
76 return self.get_or_create(key, creator, timeout, should_cache_fn)
76 log.debug('Calling cached method:`%s`', user_func.__name__)
77
78 def invalidate(*arg, **kw):
79 key = key_generator(*arg, **kw)
80 self.delete(key)
81
82 def set_(value, *arg, **kw):
83 key = key_generator(*arg, **kw)
84 self.set(key, value)
85
86 def get(*arg, **kw):
87 key = key_generator(*arg, **kw)
88 return self.get(key)
89
90 def refresh(*arg, **kw):
91 key = key_generator(*arg, **kw)
92 value = fn(*arg, **kw)
93 self.set(key, value)
94 return value
95
96 decorate.set = set_
97 decorate.invalidate = invalidate
98 decorate.refresh = refresh
99 decorate.get = get
100 decorate.original = fn
101 decorate.key_generator = key_generator
102 decorate.__wrapped__ = fn
103
104 return decorate
105 return decorator
106
107 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
108
109 if not condition:
110 log.debug('Calling un-cached method:%s', user_func.func_name)
111 start = time.time()
112 result = user_func(*arg, **kw)
113 total = time.time() - start
114 log.debug('un-cached method:%s took %.4fs', user_func.func_name, total)
115 return result
116
117 key = key_generator(*arg, **kw)
118
119 timeout = expiration_time() if expiration_time_is_callable \
120 else expiration_time
121
122 log.debug('Calling cached method:`%s`', user_func.func_name)
123 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
77 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
124
78
125 def cache_decorator(user_func):
79 def cache_decorator(user_func):
126 if to_str is compat.string_type:
80 if to_str is str:
127 # backwards compatible
81 # backwards compatible
128 key_generator = function_key_generator(namespace, user_func)
82 key_generator = function_key_generator(namespace, user_func)
129 else:
83 else:
@@ -176,7 +130,7 b' def get_default_cache_settings(settings,'
176 if key.startswith(prefix):
130 if key.startswith(prefix):
177 name = key.split(prefix)[1].strip()
131 name = key.split(prefix)[1].strip()
178 val = settings[key]
132 val = settings[key]
179 if isinstance(val, compat.string_types):
133 if isinstance(val, str):
180 val = val.strip()
134 val = val.strip()
181 cache_settings[name] = val
135 cache_settings[name] = val
182 return cache_settings
136 return cache_settings
@@ -186,7 +140,21 b' def compute_key_from_params(*args):'
186 """
140 """
187 Helper to compute key from given params to be used in cache manager
141 Helper to compute key from given params to be used in cache manager
188 """
142 """
189 return sha1("_".join(map(safe_str, args)))
143 return sha1(safe_bytes("_".join(map(str, args))))
144
145
146 def custom_key_generator(backend, namespace, fn):
147 func_name = fn.__name__
148
149 def generate_key(*args):
150 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
151 namespace_pref = namespace or 'default_namespace'
152 arg_key = compute_key_from_params(*args)
153 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
154
155 return final_key
156
157 return generate_key
190
158
191
159
192 def backend_key_generator(backend):
160 def backend_key_generator(backend):
@@ -194,49 +162,50 b' def backend_key_generator(backend):'
194 Special wrapper that also sends over the backend to the key generator
162 Special wrapper that also sends over the backend to the key generator
195 """
163 """
196 def wrapper(namespace, fn):
164 def wrapper(namespace, fn):
197 return key_generator(backend, namespace, fn)
165 return custom_key_generator(backend, namespace, fn)
198 return wrapper
166 return wrapper
199
167
200
168
201 def key_generator(backend, namespace, fn):
169 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
202 fname = fn.__name__
170 from .backends import FileNamespaceBackend
171 from . import async_creation_runner
203
172
204 def generate_key(*args):
205 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
206 namespace_pref = namespace or 'default_namespace'
207 arg_key = compute_key_from_params(*args)
208 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
209
210 return final_key
211
212 return generate_key
213
214
215 def get_or_create_region(region_name, region_namespace=None):
216 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
217 region_obj = region_meta.dogpile_cache_regions.get(region_name)
173 region_obj = region_meta.dogpile_cache_regions.get(region_name)
218 if not region_obj:
174 if not region_obj:
219 raise EnvironmentError(
175 reg_keys = list(region_meta.dogpile_cache_regions.keys())
220 'Region `{}` not in configured: {}.'.format(
176 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
221 region_name, region_meta.dogpile_cache_regions.keys()))
177
178 region_uid_name = f'{region_name}:{region_namespace}'
222
179
223 region_uid_name = '{}:{}'.format(region_name, region_namespace)
224 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
180 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
181 if not region_namespace:
182 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
183
225 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
184 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
226 if region_exist:
185 if region_exist:
227 log.debug('Using already configured region: %s', region_namespace)
186 log.debug('Using already configured region: %s', region_namespace)
228 return region_exist
187 return region_exist
229 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
188
230 expiration_time = region_obj.expiration_time
189 expiration_time = region_obj.expiration_time
231
190
232 if not os.path.isdir(cache_dir):
191 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
233 os.makedirs(cache_dir)
192 namespace_cache_dir = cache_dir
193
194 # we default the namespace_cache_dir to our default cache dir.
195 # however if this backend is configured with filename= param, we prioritize that
196 # so all caches within that particular region, even those namespaced end up in the same path
197 if region_obj.actual_backend.filename:
198 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
199
200 if not os.path.isdir(namespace_cache_dir):
201 os.makedirs(namespace_cache_dir)
234 new_region = make_region(
202 new_region = make_region(
235 name=region_uid_name,
203 name=region_uid_name,
236 function_key_generator=backend_key_generator(region_obj.actual_backend)
204 function_key_generator=backend_key_generator(region_obj.actual_backend)
237 )
205 )
206
238 namespace_filename = os.path.join(
207 namespace_filename = os.path.join(
239 cache_dir, "{}.cache.dbm".format(region_namespace))
208 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
240 # special type that allows 1db per namespace
209 # special type that allows 1db per namespace
241 new_region.configure(
210 new_region.configure(
242 backend='dogpile.cache.rc.file_namespace',
211 backend='dogpile.cache.rc.file_namespace',
@@ -248,16 +217,31 b' def get_or_create_region(region_name, re'
248 log.debug('configuring new region: %s', region_uid_name)
217 log.debug('configuring new region: %s', region_uid_name)
249 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
218 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
250
219
220 region_obj._default_namespace = region_namespace
221 if use_async_runner:
222 region_obj.async_creation_runner = async_creation_runner
251 return region_obj
223 return region_obj
252
224
253
225
254 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
226 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
255 region = get_or_create_region(cache_region, cache_namespace_uid)
227 from . import CLEAR_DELETE, CLEAR_INVALIDATE
256 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
228
257 num_delete_keys = len(cache_keys)
229 if not isinstance(cache_region, RhodeCodeCacheRegion):
258 if invalidate:
230 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
259 region.invalidate(hard=False)
231 log.debug('clearing cache region: %s with method=%s', cache_region, method)
260 else:
232
261 if num_delete_keys:
233 num_affected_keys = None
262 region.delete_multi(cache_keys)
234
263 return num_delete_keys
235 if method == CLEAR_INVALIDATE:
236 # NOTE: The CacheRegion.invalidate() method’s default mode of
237 # operation is to set a timestamp local to this CacheRegion in this Python process only.
238 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
239 cache_region.invalidate(hard=True)
240
241 if method == CLEAR_DELETE:
242 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
243 num_affected_keys = len(cache_keys)
244 if num_affected_keys:
245 cache_region.delete_multi(cache_keys)
246
247 return num_affected_keys
@@ -1,7 +1,5 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
5 #
3 #
6 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,3 +1,20 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
1 from vcsserver.lib._vendor.statsd import client_from_config
18 from vcsserver.lib._vendor.statsd import client_from_config
2
19
3
20
@@ -12,7 +29,7 b' class _Singleton(type):'
12
29
13 def __call__(cls, *args, **kwargs):
30 def __call__(cls, *args, **kwargs):
14 if cls not in cls._instances:
31 if cls not in cls._instances:
15 cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
32 cls._instances[cls] = super().__call__(*args, **kwargs)
16 return cls._instances[cls]
33 return cls._instances[cls]
17
34
18
35
@@ -24,22 +41,26 b' class StatsdClientClass(Singleton):'
24 setup_run = False
41 setup_run = False
25 statsd_client = None
42 statsd_client = None
26 statsd = None
43 statsd = None
44 strict_mode_init = False
27
45
28 def __getattribute__(self, name):
46 def __getattribute__(self, name):
29
47
30 if name.startswith("statsd"):
48 if name.startswith("statsd"):
31 if self.setup_run:
49 if self.setup_run:
32 return super(StatsdClientClass, self).__getattribute__(name)
50 return super().__getattribute__(name)
33 else:
51 else:
52 if self.strict_mode_init:
53 raise StatsdClientNotInitialised(f"requested key was {name}")
34 return None
54 return None
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
36
55
37 return super(StatsdClientClass, self).__getattribute__(name)
56 return super().__getattribute__(name)
38
57
39 def setup(self, settings):
58 def setup(self, settings):
40 """
59 """
41 Initialize the client
60 Initialize the client
42 """
61 """
62 strict_init_mode = settings.pop('statsd_strict_init', False)
63
43 statsd = client_from_config(settings)
64 statsd = client_from_config(settings)
44 self.statsd = statsd
65 self.statsd = statsd
45 self.statsd_client = statsd
66 self.statsd_client = statsd
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -21,11 +21,13 b' import os'
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
25 import dulwich.protocol
24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver.lib.rc_json import json
28 from vcsserver import hooks, subprocessio
29 from vcsserver import hooks, subprocessio
30 from vcsserver.str_utils import ascii_bytes
29
31
30
32
31 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
@@ -54,7 +56,7 b' class FileWrapper(object):'
54 return data
56 return data
55
57
56 def __repr__(self):
58 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
59 return '<FileWrapper {} len: {}, read: {}>'.format(
58 self.fd, self.content_length, self.content_length - self.remain
60 self.fd, self.content_length, self.content_length - self.remain
59 )
61 )
60
62
@@ -62,26 +64,25 b' class FileWrapper(object):'
62 class GitRepository(object):
64 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
65 """WSGI app for handling Git smart protocol endpoints."""
64
66
65 git_folder_signature = frozenset(
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 FLUSH_PACKET = b"0000"
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79 extras):
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82 valid_dir_signature = self.git_folder_signature.issubset(files)
82
83
83 if not valid_dir_signature:
84 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
85 raise OSError(f'{content_path} missing git signature')
85
86
86 self.content_path = content_path
87 self.content_path = content_path
87 self.repo_name = repo_name
88 self.repo_name = repo_name
@@ -123,7 +124,7 b' class GitRepository(object):'
123 # It reads binary, per number of bytes specified.
124 # It reads binary, per number of bytes specified.
124 # if you do add '\n' as part of data, count it.
125 # if you do add '\n' as part of data, count it.
125 server_advert = '# service=%s\n' % git_command
126 server_advert = '# service=%s\n' % git_command
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 try:
128 try:
128 gitenv = dict(os.environ)
129 gitenv = dict(os.environ)
129 # forget all configs
130 # forget all configs
@@ -133,15 +134,15 b' class GitRepository(object):'
133 out = subprocessio.SubprocessIOChunker(
134 out = subprocessio.SubprocessIOChunker(
134 command,
135 command,
135 env=gitenv,
136 env=gitenv,
136 starting_values=[packet_len + server_advert + '0000'],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 shell=False
138 shell=False
138 )
139 )
139 except EnvironmentError:
140 except OSError:
140 log.exception('Error processing command')
141 log.exception('Error processing command')
141 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
142
143
143 resp = Response()
144 resp = Response()
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 resp.charset = None
146 resp.charset = None
146 resp.app_iter = out
147 resp.app_iter = out
147
148
@@ -166,34 +167,103 b' class GitRepository(object):'
166 We also print in the error output a message explaining why the command
167 We also print in the error output a message explaining why the command
167 was aborted.
168 was aborted.
168
169
169 If aditionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
170 of the pre-pull hook.
171 of the pre-pull hook.
171
172
172 Note that for clients not supporting side-band we just send them the
173 Note that for clients not supporting side-band we just send them the
173 emtpy PACK file.
174 emtpy PACK file.
174 """
175 """
176
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 response = []
178 response = []
177 proto = dulwich.protocol.Protocol(None, response.append)
179 proto = dulwich.protocol.Protocol(None, response.append)
178 proto.write_pkt_line('NAK\n')
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
179 self._write_sideband_to_proto(pre_pull_messages, proto,
181
180 capabilities)
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 # produces a fatal error in the client:
184 # produces a fatal error in the client:
183 # fatal: error in sideband demultiplexer
185 # fatal: error in sideband demultiplexer
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
186 proto.write_sideband(
185 proto.write_sideband(1, self.EMPTY_PACK)
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
186
192
187 # writes 0000
193 # writes b"0000" as default
188 proto.write_pkt_line(None)
194 proto.write_pkt_line(None)
189
195
190 return response
196 return response
191 else:
197 else:
192 return [self.EMPTY_PACK]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
202 Given a list response we inject the post-pull messages.
203
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
206 0008NAK\n...0000
207
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
210 """
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
214
215 if not start_message and not end_message:
216 return response
217
218 try:
219 iter(response)
220 # iterator probably will work, we continue
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
226 def injected_response():
193
227
194 def _write_sideband_to_proto(self, data, proto, capabilities):
228 do_loop = 1
229 header_injected = 0
230 next_item = None
231 has_item = False
232 item = b''
233
234 while do_loop:
235
236 try:
237 next_item = next(response)
238 except StopIteration:
239 do_loop = 0
240
241 if has_item:
242 # last item ! alter it now
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
244 new_response = [item[:-4]]
245 new_response.extend(self._get_messages(end_message, capabilities))
246 new_response.append(self.FLUSH_PACKET)
247 item = b''.join(new_response)
248
249 yield item
250
251 has_item = True
252 item = next_item
253
254 # alter item if it's the initial chunk
255 if not header_injected and item.startswith(b'0008NAK\n'):
256 new_response = [b'0008NAK\n']
257 new_response.extend(self._get_messages(start_message, capabilities))
258 new_response.append(item[8:])
259 item = b''.join(new_response)
260 header_injected = 1
261
262 return injected_response()
263
264 def _write_sideband_to_proto(self, proto, data, capabilities):
195 """
265 """
196 Write the data to the proto's sideband number 2.
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
197
267
198 We do not use dulwich's write_sideband directly as it only supports
268 We do not use dulwich's write_sideband directly as it only supports
199 side-band-64k.
269 side-band-64k.
@@ -204,68 +274,27 b' class GitRepository(object):'
204 # N.B.(skreft): The values below are explained in the pack protocol
274 # N.B.(skreft): The values below are explained in the pack protocol
205 # documentation, section Packfile Data.
275 # documentation, section Packfile Data.
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 if 'side-band-64k' in capabilities:
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
208 chunk_size = 65515
278 chunk_size = 65515
209 elif 'side-band' in capabilities:
279 elif CAPABILITY_SIDE_BAND in capabilities:
210 chunk_size = 995
280 chunk_size = 995
211 else:
281 else:
212 return
282 return
213
283
214 chunker = (
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216
285
217 for chunk in chunker:
286 for chunk in chunker:
218 proto.write_sideband(2, chunk)
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
219
288
220 def _get_messages(self, data, capabilities):
289 def _get_messages(self, data, capabilities):
221 """Return a list with packets for sending data in sideband number 2."""
290 """Return a list with packets for sending data in sideband number 2."""
222 response = []
291 response = []
223 proto = dulwich.protocol.Protocol(None, response.append)
292 proto = dulwich.protocol.Protocol(None, response.append)
224
293
225 self._write_sideband_to_proto(data, proto, capabilities)
294 self._write_sideband_to_proto(proto, data, capabilities)
226
295
227 return response
296 return response
228
297
229 def _inject_messages_to_response(self, response, capabilities,
230 start_messages, end_messages):
231 """
232 Given a list response we inject the pre/post-pull messages.
233
234 We only inject the messages if the client supports sideband, and the
235 response has the format:
236 0008NAK\n...0000
237
238 Note that we do not check the no-progress capability as by default, git
239 sends it, which effectively would block all messages.
240 """
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 return response
243
244 if not start_messages and not end_messages:
245 return response
246
247 # make a list out of response if it's an iterator
248 # so we can investigate it for message injection.
249 if hasattr(response, '__iter__'):
250 response = list(response)
251
252 if (not response[0].startswith('0008NAK\n') or
253 not response[-1].endswith('0000')):
254 return response
255
256 new_response = ['0008NAK\n']
257 new_response.extend(self._get_messages(start_messages, capabilities))
258 if len(response) == 1:
259 new_response.append(response[0][8:-4])
260 else:
261 new_response.append(response[0][8:])
262 new_response.extend(response[1:-1])
263 new_response.append(response[-1][:-4])
264 new_response.extend(self._get_messages(end_messages, capabilities))
265 new_response.append('0000')
266
267 return new_response
268
269 def backend(self, request, environ):
298 def backend(self, request, environ):
270 """
299 """
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
@@ -304,14 +333,15 b' class GitRepository(object):'
304 inputstream = request.body_file_seekable
333 inputstream = request.body_file_seekable
305
334
306 resp = Response()
335 resp = Response()
307 resp.content_type = ('application/x-%s-result' %
336 resp.content_type = f'application/x-{git_command}-result'
308 git_command.encode('utf8'))
309 resp.charset = None
337 resp.charset = None
310
338
311 pre_pull_messages = ''
339 pre_pull_messages = ''
340 # Upload-pack == clone
312 if git_command == 'git-upload-pack':
341 if git_command == 'git-upload-pack':
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
342 hook_response = hooks.git_pre_pull(self.extras)
314 if status != 0:
343 if hook_response.status != 0:
344 pre_pull_messages = hook_response.output
315 resp.app_iter = self._build_failed_pre_pull_response(
345 resp.app_iter = self._build_failed_pre_pull_response(
316 capabilities, pre_pull_messages)
346 capabilities, pre_pull_messages)
317 return resp
347 return resp
@@ -326,7 +356,7 b' class GitRepository(object):'
326
356
327 out = subprocessio.SubprocessIOChunker(
357 out = subprocessio.SubprocessIOChunker(
328 cmd,
358 cmd,
329 inputstream=inputstream,
359 input_stream=inputstream,
330 env=gitenv,
360 env=gitenv,
331 cwd=self.content_path,
361 cwd=self.content_path,
332 shell=False,
362 shell=False,
@@ -346,7 +376,7 b' class GitRepository(object):'
346 log.debug('handling cmd %s', cmd)
376 log.debug('handling cmd %s', cmd)
347 output = subprocessio.SubprocessIOChunker(
377 output = subprocessio.SubprocessIOChunker(
348 cmd,
378 cmd,
349 inputstream=inputstream,
379 input_stream=inputstream,
350 env=gitenv,
380 env=gitenv,
351 cwd=self.content_path,
381 cwd=self.content_path,
352 shell=False,
382 shell=False,
@@ -357,10 +387,11 b' class GitRepository(object):'
357 for _ in output:
387 for _ in output:
358 pass
388 pass
359
389
390 # Upload-pack == clone
360 if git_command == 'git-upload-pack':
391 if git_command == 'git-upload-pack':
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
392 hook_response = hooks.git_post_pull(self.extras)
362 resp.app_iter = self._inject_messages_to_response(
393 post_pull_messages = hook_response.output
363 out, capabilities, pre_pull_messages, post_pull_messages)
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
364 else:
395 else:
365 resp.app_iter = out
396 resp.app_iter = out
366
397
This diff has been collapsed as it changes many lines, (668 lines changed) Show them Hide them
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -18,12 +18,12 b''
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
22 import re
21 import re
23 import stat
22 import stat
24 import traceback
23 import traceback
25 import urllib
24 import urllib.request
26 import urllib2
25 import urllib.parse
26 import urllib.error
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
@@ -31,7 +31,7 b' import pygit2'
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
@@ -40,8 +40,8 b' from dulwich.repo import Repo as Dulwich'
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
@@ -50,19 +50,12 b' from vcsserver.vcs_base import RemoteBas'
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = b'^{}'
54
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def str_to_dulwich(value):
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
63 return value.decode(settings.WIRE_ENCODING)
64
65
66 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
67 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
68
61
@@ -76,8 +69,8 b' def reraise_safe_exceptions(func):'
76 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
77 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
78 raise exc(safe_str(e))
71 raise exc(safe_str(e))
79 except Exception as e:
72 except Exception:
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
73 # NOTE(marcink): because of how dulwich handles some exceptions
81 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
82 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
83 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
@@ -114,10 +107,14 b' class GitFactory(RepoFactory):'
114
107
115 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
116 if use_libgit2:
109 if use_libgit2:
117 return Repository(wire['path'])
110 repo = Repository(safe_bytes(wire['path']))
118 else:
111 else:
119 repo_path = str_to_dulwich(wire['path'])
112 # dulwich mode
120 return Repo(repo_path)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo = Repo(repo_path)
115
116 log.debug('repository created: got GIT object: %s', repo)
117 return repo
121
118
122 def repo(self, wire, create=False, use_libgit2=False):
119 def repo(self, wire, create=False, use_libgit2=False):
123 """
120 """
@@ -129,6 +126,28 b' class GitFactory(RepoFactory):'
129 return self.repo(wire, use_libgit2=True)
126 return self.repo(wire, use_libgit2=True)
130
127
131
128
129 def create_signature_from_string(author_str, **kwargs):
130 """
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
132
133 :param author_str: String of the format 'Name <email>'
134 :return: pygit2.Signature object
135 """
136 match = re.match(r'^(.+) <(.+)>$', author_str)
137 if match is None:
138 raise ValueError(f"Invalid format: {author_str}")
139
140 name, email = match.groups()
141 return pygit2.Signature(name, email, **kwargs)
142
143
144 def get_obfuscated_url(url_obj):
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
146 url_obj.query = obfuscate_qs(url_obj.query)
147 obfuscated_uri = str(url_obj)
148 return obfuscated_uri
149
150
132 class GitRemote(RemoteBase):
151 class GitRemote(RemoteBase):
133
152
134 def __init__(self, factory):
153 def __init__(self, factory):
@@ -141,10 +160,17 b' class GitRemote(RemoteBase):'
141 "parents": self.parents,
160 "parents": self.parents,
142 "_commit": self.revision,
161 "_commit": self.revision,
143 }
162 }
163 self._bulk_file_methods = {
164 "size": self.get_node_size,
165 "data": self.get_node_data,
166 "flags": self.get_node_flags,
167 "is_binary": self.get_node_is_binary,
168 "md5": self.md5_hash
169 }
144
170
145 def _wire_to_config(self, wire):
171 def _wire_to_config(self, wire):
146 if 'config' in wire:
172 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
148 return {}
174 return {}
149
175
150 def _remote_conf(self, config):
176 def _remote_conf(self, config):
@@ -153,17 +179,17 b' class GitRemote(RemoteBase):'
153 ]
179 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
180 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
181 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
182 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
157 return params
183 return params
158
184
159 @reraise_safe_exceptions
185 @reraise_safe_exceptions
160 def discover_git_version(self):
186 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
187 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
188 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
189 prefix = b'git version'
164 if stdout.startswith(prefix):
190 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
191 stdout = stdout[len(prefix):]
166 return stdout.strip()
192 return safe_str(stdout.strip())
167
193
168 @reraise_safe_exceptions
194 @reraise_safe_exceptions
169 def is_empty(self, wire):
195 def is_empty(self, wire):
@@ -186,20 +212,27 b' class GitRemote(RemoteBase):'
186 def assert_correct_path(self, wire):
212 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
213 cache_on, context_uid, repo_id = self._cache_on(wire)
188 region = self._region(wire)
214 region = self._region(wire)
215
189 @region.conditional_cache_on_arguments(condition=cache_on)
216 @region.conditional_cache_on_arguments(condition=cache_on)
190 def _assert_correct_path(_context_uid, _repo_id):
217 def _assert_correct_path(_context_uid, _repo_id, fast_check):
218 if fast_check:
219 path = safe_str(wire['path'])
220 if pygit2.discover_repository(path):
221 return True
222 return False
223 else:
191 try:
224 try:
192 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
193 with repo_init as repo:
226 with repo_init:
194 pass
227 pass
195 except pygit2.GitError:
228 except pygit2.GitError:
196 path = wire.get('path')
229 path = wire.get('path')
197 tb = traceback.format_exc()
230 tb = traceback.format_exc()
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
231 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
199 return False
232 return False
233 return True
200
234
201 return True
235 return _assert_correct_path(context_uid, repo_id, True)
202 return _assert_correct_path(context_uid, repo_id)
203
236
204 @reraise_safe_exceptions
237 @reraise_safe_exceptions
205 def bare(self, wire):
238 def bare(self, wire):
@@ -208,17 +241,69 b' class GitRemote(RemoteBase):'
208 return repo.is_bare
241 return repo.is_bare
209
242
210 @reraise_safe_exceptions
243 @reraise_safe_exceptions
244 def get_node_data(self, wire, commit_id, path):
245 repo_init = self._factory.repo_libgit2(wire)
246 with repo_init as repo:
247 commit = repo[commit_id]
248 blob_obj = commit.tree[path]
249
250 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
251 raise exceptions.LookupException()(
252 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
253
254 return BytesEnvelope(blob_obj.data)
255
256 @reraise_safe_exceptions
257 def get_node_size(self, wire, commit_id, path):
258 repo_init = self._factory.repo_libgit2(wire)
259 with repo_init as repo:
260 commit = repo[commit_id]
261 blob_obj = commit.tree[path]
262
263 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
264 raise exceptions.LookupException()(
265 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
266
267 return blob_obj.size
268
269 @reraise_safe_exceptions
270 def get_node_flags(self, wire, commit_id, path):
271 repo_init = self._factory.repo_libgit2(wire)
272 with repo_init as repo:
273 commit = repo[commit_id]
274 blob_obj = commit.tree[path]
275
276 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
277 raise exceptions.LookupException()(
278 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
279
280 return blob_obj.filemode
281
282 @reraise_safe_exceptions
283 def get_node_is_binary(self, wire, commit_id, path):
284 repo_init = self._factory.repo_libgit2(wire)
285 with repo_init as repo:
286 commit = repo[commit_id]
287 blob_obj = commit.tree[path]
288
289 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
290 raise exceptions.LookupException()(
291 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
292
293 return blob_obj.is_binary
294
295 @reraise_safe_exceptions
211 def blob_as_pretty_string(self, wire, sha):
296 def blob_as_pretty_string(self, wire, sha):
212 repo_init = self._factory.repo_libgit2(wire)
297 repo_init = self._factory.repo_libgit2(wire)
213 with repo_init as repo:
298 with repo_init as repo:
214 blob_obj = repo[sha]
299 blob_obj = repo[sha]
215 blob = blob_obj.data
300 return BytesEnvelope(blob_obj.data)
216 return blob
217
301
218 @reraise_safe_exceptions
302 @reraise_safe_exceptions
219 def blob_raw_length(self, wire, sha):
303 def blob_raw_length(self, wire, sha):
220 cache_on, context_uid, repo_id = self._cache_on(wire)
304 cache_on, context_uid, repo_id = self._cache_on(wire)
221 region = self._region(wire)
305 region = self._region(wire)
306
222 @region.conditional_cache_on_arguments(condition=cache_on)
307 @region.conditional_cache_on_arguments(condition=cache_on)
223 def _blob_raw_length(_repo_id, _sha):
308 def _blob_raw_length(_repo_id, _sha):
224
309
@@ -230,10 +315,10 b' class GitRemote(RemoteBase):'
230 return _blob_raw_length(repo_id, sha)
315 return _blob_raw_length(repo_id, sha)
231
316
232 def _parse_lfs_pointer(self, raw_content):
317 def _parse_lfs_pointer(self, raw_content):
318 spec_string = b'version https://git-lfs.github.com/spec'
319 if raw_content and raw_content.startswith(spec_string):
233
320
234 spec_string = 'version https://git-lfs.github.com/spec'
321 pattern = re.compile(rb"""
235 if raw_content and raw_content.startswith(spec_string):
236 pattern = re.compile(r"""
237 (?:\n)?
322 (?:\n)?
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
323 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
324 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
@@ -249,8 +334,8 b' class GitRemote(RemoteBase):'
249 @reraise_safe_exceptions
334 @reraise_safe_exceptions
250 def is_large_file(self, wire, commit_id):
335 def is_large_file(self, wire, commit_id):
251 cache_on, context_uid, repo_id = self._cache_on(wire)
336 cache_on, context_uid, repo_id = self._cache_on(wire)
337 region = self._region(wire)
252
338
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
339 @region.conditional_cache_on_arguments(condition=cache_on)
255 def _is_large_file(_repo_id, _sha):
340 def _is_large_file(_repo_id, _sha):
256 repo_init = self._factory.repo_libgit2(wire)
341 repo_init = self._factory.repo_libgit2(wire)
@@ -266,8 +351,8 b' class GitRemote(RemoteBase):'
266 @reraise_safe_exceptions
351 @reraise_safe_exceptions
267 def is_binary(self, wire, tree_id):
352 def is_binary(self, wire, tree_id):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
353 cache_on, context_uid, repo_id = self._cache_on(wire)
354 region = self._region(wire)
269
355
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
356 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _is_binary(_repo_id, _tree_id):
357 def _is_binary(_repo_id, _tree_id):
273 repo_init = self._factory.repo_libgit2(wire)
358 repo_init = self._factory.repo_libgit2(wire)
@@ -278,6 +363,26 b' class GitRemote(RemoteBase):'
278 return _is_binary(repo_id, tree_id)
363 return _is_binary(repo_id, tree_id)
279
364
280 @reraise_safe_exceptions
365 @reraise_safe_exceptions
366 def md5_hash(self, wire, commit_id, path):
367 cache_on, context_uid, repo_id = self._cache_on(wire)
368 region = self._region(wire)
369
370 @region.conditional_cache_on_arguments(condition=cache_on)
371 def _md5_hash(_repo_id, _commit_id, _path):
372 repo_init = self._factory.repo_libgit2(wire)
373 with repo_init as repo:
374 commit = repo[_commit_id]
375 blob_obj = commit.tree[_path]
376
377 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
378 raise exceptions.LookupException()(
379 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
380
381 return ''
382
383 return _md5_hash(repo_id, commit_id, path)
384
385 @reraise_safe_exceptions
281 def in_largefiles_store(self, wire, oid):
386 def in_largefiles_store(self, wire, oid):
282 conf = self._wire_to_config(wire)
387 conf = self._wire_to_config(wire)
283 repo_init = self._factory.repo_libgit2(wire)
388 repo_init = self._factory.repo_libgit2(wire)
@@ -305,90 +410,105 b' class GitRemote(RemoteBase):'
305 store = LFSOidStore(
410 store = LFSOidStore(
306 oid=oid, repo=repo_name, store_location=store_location)
411 oid=oid, repo=repo_name, store_location=store_location)
307 return store.oid_path
412 return store.oid_path
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
413 raise ValueError(f'Unable to fetch oid with path {oid}')
309
414
310 @reraise_safe_exceptions
415 @reraise_safe_exceptions
311 def bulk_request(self, wire, rev, pre_load):
416 def bulk_request(self, wire, rev, pre_load):
312 cache_on, context_uid, repo_id = self._cache_on(wire)
417 cache_on, context_uid, repo_id = self._cache_on(wire)
313 region = self._region(wire)
418 region = self._region(wire)
419
314 @region.conditional_cache_on_arguments(condition=cache_on)
420 @region.conditional_cache_on_arguments(condition=cache_on)
315 def _bulk_request(_repo_id, _rev, _pre_load):
421 def _bulk_request(_repo_id, _rev, _pre_load):
316 result = {}
422 result = {}
317 for attr in pre_load:
423 for attr in pre_load:
318 try:
424 try:
319 method = self._bulk_methods[attr]
425 method = self._bulk_methods[attr]
426 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
320 args = [wire, rev]
427 args = [wire, rev]
321 result[attr] = method(*args)
428 result[attr] = method(*args)
322 except KeyError as e:
429 except KeyError as e:
323 raise exceptions.VcsException(e)(
430 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
324 "Unknown bulk attribute: %s" % attr)
325 return result
431 return result
326
432
327 return _bulk_request(repo_id, rev, sorted(pre_load))
433 return _bulk_request(repo_id, rev, sorted(pre_load))
328
434
329 def _build_opener(self, url):
435 @reraise_safe_exceptions
436 def bulk_file_request(self, wire, commit_id, path, pre_load):
437 cache_on, context_uid, repo_id = self._cache_on(wire)
438 region = self._region(wire)
439
440 @region.conditional_cache_on_arguments(condition=cache_on)
441 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
442 result = {}
443 for attr in pre_load:
444 try:
445 method = self._bulk_file_methods[attr]
446 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
447 result[attr] = method(wire, _commit_id, _path)
448 except KeyError as e:
449 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
450 return BinaryEnvelope(result)
451
452 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
453
454 def _build_opener(self, url: str):
330 handlers = []
455 handlers = []
331 url_obj = url_parser(url)
456 url_obj = url_parser(safe_bytes(url))
332 _, authinfo = url_obj.authinfo()
457 authinfo = url_obj.authinfo()[1]
333
458
334 if authinfo:
459 if authinfo:
335 # create a password manager
460 # create a password manager
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
461 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
337 passmgr.add_password(*authinfo)
462 passmgr.add_password(*authinfo)
338
463
339 handlers.extend((httpbasicauthhandler(passmgr),
464 handlers.extend((httpbasicauthhandler(passmgr),
340 httpdigestauthhandler(passmgr)))
465 httpdigestauthhandler(passmgr)))
341
466
342 return urllib2.build_opener(*handlers)
467 return urllib.request.build_opener(*handlers)
343
344 def _type_id_to_name(self, type_id):
345 return {
346 1: b'commit',
347 2: b'tree',
348 3: b'blob',
349 4: b'tag'
350 }[type_id]
351
468
352 @reraise_safe_exceptions
469 @reraise_safe_exceptions
353 def check_url(self, url, config):
470 def check_url(self, url, config):
354 url_obj = url_parser(url)
471 url_obj = url_parser(safe_bytes(url))
355 test_uri, _ = url_obj.authinfo()
472
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
473 test_uri = safe_str(url_obj.authinfo()[0])
357 url_obj.query = obfuscate_qs(url_obj.query)
474 obfuscated_uri = get_obfuscated_url(url_obj)
358 cleaned_uri = str(url_obj)
475
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
476 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
360
477
361 if not test_uri.endswith('info/refs'):
478 if not test_uri.endswith('info/refs'):
362 test_uri = test_uri.rstrip('/') + '/info/refs'
479 test_uri = test_uri.rstrip('/') + '/info/refs'
363
480
364 o = self._build_opener(url)
481 o = self._build_opener(test_uri)
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
482 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
366
483
367 q = {"service": 'git-upload-pack'}
484 q = {"service": 'git-upload-pack'}
368 qs = '?%s' % urllib.urlencode(q)
485 qs = '?%s' % urllib.parse.urlencode(q)
369 cu = "%s%s" % (test_uri, qs)
486 cu = f"{test_uri}{qs}"
370 req = urllib2.Request(cu, None, {})
487 req = urllib.request.Request(cu, None, {})
371
488
372 try:
489 try:
373 log.debug("Trying to open URL %s", cleaned_uri)
490 log.debug("Trying to open URL %s", obfuscated_uri)
374 resp = o.open(req)
491 resp = o.open(req)
375 if resp.code != 200:
492 if resp.code != 200:
376 raise exceptions.URLError()('Return Code is not 200')
493 raise exceptions.URLError()('Return Code is not 200')
377 except Exception as e:
494 except Exception as e:
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
495 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
379 # means it cannot be cloned
496 # means it cannot be cloned
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
497 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
381
498
382 # now detect if it's proper git repo
499 # now detect if it's proper git repo
383 gitdata = resp.read()
500 gitdata: bytes = resp.read()
384 if 'service=git-upload-pack' in gitdata:
501
502 if b'service=git-upload-pack' in gitdata:
385 pass
503 pass
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
504 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
387 # old style git can return some other format !
505 # old style git can return some other format !
388 pass
506 pass
389 else:
507 else:
390 raise exceptions.URLError()(
508 e = None
391 "url [%s] does not look like an git" % (cleaned_uri,))
509 raise exceptions.URLError(e)(
510 "url [%s] does not look like an hg repo org_exc: %s"
511 % (obfuscated_uri, e))
392
512
393 return True
513 return True
394
514
@@ -415,6 +535,7 b' class GitRemote(RemoteBase):'
415 def branch(self, wire, commit_id):
535 def branch(self, wire, commit_id):
416 cache_on, context_uid, repo_id = self._cache_on(wire)
536 cache_on, context_uid, repo_id = self._cache_on(wire)
417 region = self._region(wire)
537 region = self._region(wire)
538
418 @region.conditional_cache_on_arguments(condition=cache_on)
539 @region.conditional_cache_on_arguments(condition=cache_on)
419 def _branch(_context_uid, _repo_id, _commit_id):
540 def _branch(_context_uid, _repo_id, _commit_id):
420 regex = re.compile('^refs/heads')
541 regex = re.compile('^refs/heads')
@@ -422,7 +543,7 b' class GitRemote(RemoteBase):'
422 def filter_with(ref):
543 def filter_with(ref):
423 return regex.match(ref[0]) and ref[1] == _commit_id
544 return regex.match(ref[0]) and ref[1] == _commit_id
424
545
425 branches = filter(filter_with, self.get_refs(wire).items())
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
426 return [x[0].split('refs/heads/')[-1] for x in branches]
547 return [x[0].split('refs/heads/')[-1] for x in branches]
427
548
428 return _branch(context_uid, repo_id, commit_id)
549 return _branch(context_uid, repo_id, commit_id)
@@ -431,6 +552,7 b' class GitRemote(RemoteBase):'
431 def commit_branches(self, wire, commit_id):
552 def commit_branches(self, wire, commit_id):
432 cache_on, context_uid, repo_id = self._cache_on(wire)
553 cache_on, context_uid, repo_id = self._cache_on(wire)
433 region = self._region(wire)
554 region = self._region(wire)
555
434 @region.conditional_cache_on_arguments(condition=cache_on)
556 @region.conditional_cache_on_arguments(condition=cache_on)
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
436 repo_init = self._factory.repo_libgit2(wire)
558 repo_init = self._factory.repo_libgit2(wire)
@@ -449,152 +571,136 b' class GitRemote(RemoteBase):'
449 repo.object_store.add_object(blob)
571 repo.object_store.add_object(blob)
450 return blob.id
572 return blob.id
451
573
452 # TODO: this is quite complex, check if that can be simplified
574 @reraise_safe_exceptions
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id, date_args: list[int, int] = None):
576 repo_init = self._factory.repo_libgit2(wire)
577 with repo_init as repo:
578
579 if date_args:
580 current_time, offset = date_args
581
582 kw = {
583 'time': current_time,
584 'offset': offset
585 }
586 author = create_signature_from_string(author, **kw)
587 committer = create_signature_from_string(committer, **kw)
588
589 tree = new_tree_id
590 if isinstance(tree, (bytes, str)):
591 # validate this tree is in the repo...
592 tree = repo[safe_str(tree)].id
593
594 parents = []
595 # ensure we COMMIT on top of given branch head
596 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
597 if branch in repo.branches.local:
598 parents += [repo.branches[branch].target]
599 elif [x for x in repo.branches.local]:
600 parents += [repo.head.target]
601 #else:
602 # in case we want to commit on new branch we create it on top of HEAD
603 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
604
605 # # Create a new commit
606 commit_oid = repo.create_commit(
607 f'refs/heads/{branch}', # the name of the reference to update
608 author, # the author of the commit
609 committer, # the committer of the commit
610 message, # the commit message
611 tree, # the tree produced by the index
612 parents # list of parents for the new commit, usually just one,
613 )
614
615 new_commit_id = safe_str(commit_oid)
616
617 return new_commit_id
618
453 @reraise_safe_exceptions
619 @reraise_safe_exceptions
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
620 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 # Defines the root tree
456 class _Root(object):
457 def __repr__(self):
458 return 'ROOT TREE'
459 ROOT = _Root()
460
621
461 repo = self._factory.repo(wire)
622 def mode2pygit(mode):
462 object_store = repo.object_store
623 """
463
624 git only supports two filemode 644 and 755
464 # Create tree and populates it with blobs
465
466 if commit_tree and repo[commit_tree]:
467 git_commit = repo[commit_data['parents'][0]]
468 commit_tree = repo[git_commit.tree] # root tree
469 else:
470 commit_tree = objects.Tree()
471
472 for node in updated:
473 # Compute subdirs if needed
474 dirpath, nodename = vcspath.split(node['path'])
475 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
476 parent = commit_tree
477 ancestors = [('', parent)]
478
625
479 # Tries to dig for the deepest existing tree
626 0o100755 -> 33261
480 while dirnames:
627 0o100644 -> 33188
481 curdir = dirnames.pop(0)
628 """
482 try:
629 return {
483 dir_id = parent[curdir][1]
630 0o100644: pygit2.GIT_FILEMODE_BLOB,
484 except KeyError:
631 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
485 # put curdir back into dirnames and stops
632 0o120000: pygit2.GIT_FILEMODE_LINK
486 dirnames.insert(0, curdir)
633 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
487 break
488 else:
489 # If found, updates parent
490 parent = repo[dir_id]
491 ancestors.append((curdir, parent))
492 # Now parent is deepest existing tree and we need to create
493 # subtrees for dirnames (in reverse order)
494 # [this only applies for nodes from added]
495 new_trees = []
496
634
497 blob = objects.Blob.from_string(node['content'])
635 repo_init = self._factory.repo_libgit2(wire)
636 with repo_init as repo:
637 repo_index = repo.index
498
638
499 if dirnames:
639 for pathspec in updated:
500 # If there are trees which should be created we need to build
640 blob_id = repo.create_blob(pathspec['content'])
501 # them now (in reverse order)
641 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
502 reversed_dirnames = list(reversed(dirnames))
642 repo_index.add(ie)
503 curtree = objects.Tree()
504 curtree[node['node_path']] = node['mode'], blob.id
505 new_trees.append(curtree)
506 for dirname in reversed_dirnames[:-1]:
507 newtree = objects.Tree()
508 newtree[dirname] = (DIR_STAT, curtree.id)
509 new_trees.append(newtree)
510 curtree = newtree
511 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
512 else:
513 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
514
643
515 new_trees.append(parent)
644 for pathspec in removed:
516 # Update ancestors
645 repo_index.remove(pathspec)
517 reversed_ancestors = reversed(
518 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
519 for parent, tree, path in reversed_ancestors:
520 parent[path] = (DIR_STAT, tree.id)
521 object_store.add_object(tree)
522
646
523 object_store.add_object(blob)
647 # Write changes to the index
524 for tree in new_trees:
648 repo_index.write()
525 object_store.add_object(tree)
649
650 # Create a tree from the updated index
651 commit_tree = repo_index.write_tree()
652
653 new_tree_id = commit_tree
526
654
527 for node_path in removed:
655 author = commit_data['author']
528 paths = node_path.split('/')
656 committer = commit_data['committer']
529 tree = commit_tree # start with top-level
657 message = commit_data['message']
530 trees = [{'tree': tree, 'path': ROOT}]
658
531 # Traverse deep into the forest...
659 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
532 # resolve final tree by iterating the path.
533 # e.g a/b/c.txt will get
534 # - root as tree then
535 # - 'a' as tree,
536 # - 'b' as tree,
537 # - stop at c as blob.
538 for path in paths:
539 try:
540 obj = repo[tree[path][1]]
541 if isinstance(obj, objects.Tree):
542 trees.append({'tree': obj, 'path': path})
543 tree = obj
544 except KeyError:
545 break
546 #PROBLEM:
547 """
548 We're not editing same reference tree object
549 """
550 # Cut down the blob and all rotten trees on the way back...
551 for path, tree_data in reversed(zip(paths, trees)):
552 tree = tree_data['tree']
553 tree.__delitem__(path)
554 # This operation edits the tree, we need to mark new commit back
555
660
556 if len(tree) > 0:
661 new_commit_id = self.create_commit(wire, author, committer, message, branch,
557 # This tree still has elements - don't remove it or any
662 new_tree_id, date_args=date_args)
558 # of it's parents
559 break
560
561 object_store.add_object(commit_tree)
562
663
563 # Create commit
664 # libgit2, ensure the branch is there and exists
564 commit = objects.Commit()
665 self.create_branch(wire, branch, new_commit_id)
565 commit.tree = commit_tree.id
566 for k, v in commit_data.items():
567 setattr(commit, k, v)
568 object_store.add_object(commit)
569
666
570 self.create_branch(wire, branch, commit.id)
667 # libgit2, set new ref to this created commit
668 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
571
669
572 # dulwich set-ref
670 return new_commit_id
573 ref = 'refs/heads/%s' % branch
574 repo.refs[ref] = commit.id
575
576 return commit.id
577
671
578 @reraise_safe_exceptions
672 @reraise_safe_exceptions
579 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
673 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
580 if url != 'default' and '://' not in url:
674 if url != 'default' and '://' not in url:
581 client = LocalGitClient(url)
675 client = LocalGitClient(url)
582 else:
676 else:
583 url_obj = url_parser(url)
677 url_obj = url_parser(safe_bytes(url))
584 o = self._build_opener(url)
678 o = self._build_opener(url)
585 url, _ = url_obj.authinfo()
679 url = url_obj.authinfo()[0]
586 client = HttpGitClient(base_url=url, opener=o)
680 client = HttpGitClient(base_url=url, opener=o)
587 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
588
682
589 determine_wants = repo.object_store.determine_wants_all
683 determine_wants = repo.object_store.determine_wants_all
590 if refs:
684 if refs:
591 def determine_wants_requested(references):
685 refs = [ascii_bytes(x) for x in refs]
592 return [references[r] for r in references if r in refs]
686
687 def determine_wants_requested(remote_refs):
688 determined = []
689 for ref_name, ref_hash in remote_refs.items():
690 bytes_ref_name = safe_bytes(ref_name)
691
692 if bytes_ref_name in refs:
693 bytes_ref_hash = safe_bytes(ref_hash)
694 determined.append(bytes_ref_hash)
695 return determined
696
697 # swap with our custom requested wants
593 determine_wants = determine_wants_requested
698 determine_wants = determine_wants_requested
594
699
595 try:
700 try:
596 remote_refs = client.fetch(
701 remote_refs = client.fetch(
597 path=url, target=repo, determine_wants=determine_wants)
702 path=url, target=repo, determine_wants=determine_wants)
703
598 except NotGitRepository as e:
704 except NotGitRepository as e:
599 log.warning(
705 log.warning(
600 'Trying to fetch from "%s" failed, not a Git repository.', url)
706 'Trying to fetch from "%s" failed, not a Git repository.', url)
@@ -619,18 +725,21 b' class GitRemote(RemoteBase):'
619
725
620 if refs and not update_after:
726 if refs and not update_after:
621 # mikhail: explicitly set the head to the last ref.
727 # mikhail: explicitly set the head to the last ref.
622 repo["HEAD"] = remote_refs[refs[-1]]
728 repo[HEAD_MARKER] = remote_refs[refs[-1]]
623
729
624 if update_after:
730 if update_after:
625 # we want to checkout HEAD
731 # we want to check out HEAD
626 repo["HEAD"] = remote_refs["HEAD"]
732 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
627 index.build_index_from_tree(repo.path, repo.index_path(),
733 index.build_index_from_tree(repo.path, repo.index_path(),
628 repo.object_store, repo["HEAD"].tree)
734 repo.object_store, repo[HEAD_MARKER].tree)
735
736 if isinstance(remote_refs, FetchPackResult):
737 return remote_refs.refs
629 return remote_refs
738 return remote_refs
630
739
631 @reraise_safe_exceptions
740 @reraise_safe_exceptions
632 def sync_fetch(self, wire, url, refs=None, all_refs=False):
741 def sync_fetch(self, wire, url, refs=None, all_refs=False):
633 repo = self._factory.repo(wire)
742 self._factory.repo(wire)
634 if refs and not isinstance(refs, (list, tuple)):
743 if refs and not isinstance(refs, (list, tuple)):
635 refs = [refs]
744 refs = [refs]
636
745
@@ -649,7 +758,7 b' class GitRemote(RemoteBase):'
649 fetch_refs = []
758 fetch_refs = []
650
759
651 for ref_line in output.splitlines():
760 for ref_line in output.splitlines():
652 sha, ref = ref_line.split('\t')
761 sha, ref = ref_line.split(b'\t')
653 sha = sha.strip()
762 sha = sha.strip()
654 if ref in remote_refs:
763 if ref in remote_refs:
655 # duplicate, skip
764 # duplicate, skip
@@ -658,23 +767,23 b' class GitRemote(RemoteBase):'
658 log.debug("Skipping peeled reference %s", ref)
767 log.debug("Skipping peeled reference %s", ref)
659 continue
768 continue
660 # don't sync HEAD
769 # don't sync HEAD
661 if ref in ['HEAD']:
770 if ref in [HEAD_MARKER]:
662 continue
771 continue
663
772
664 remote_refs[ref] = sha
773 remote_refs[ref] = sha
665
774
666 if refs and sha in refs:
775 if refs and sha in refs:
667 # we filter fetch using our specified refs
776 # we filter fetch using our specified refs
668 fetch_refs.append('{}:{}'.format(ref, ref))
777 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
669 elif not refs:
778 elif not refs:
670 fetch_refs.append('{}:{}'.format(ref, ref))
779 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
671 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
780 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
672
781
673 if fetch_refs:
782 if fetch_refs:
674 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
783 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
675 fetch_refs_chunks = list(chunk)
784 fetch_refs_chunks = list(chunk)
676 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
785 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
677 _out, _err = self.run_git_command(
786 self.run_git_command(
678 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
787 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
679 fail_on_stderr=False,
788 fail_on_stderr=False,
680 _copts=self._remote_conf(config),
789 _copts=self._remote_conf(config),
@@ -711,17 +820,18 b' class GitRemote(RemoteBase):'
711 wire_remote = wire.copy()
820 wire_remote = wire.copy()
712 wire_remote['path'] = path2
821 wire_remote['path'] = path2
713 repo_remote = self._factory.repo(wire_remote)
822 repo_remote = self._factory.repo(wire_remote)
714 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
823 LocalGitClient(thin_packs=False).fetch(path2, repo_remote)
715
824
716 revs = [
825 revs = [
717 x.commit.id
826 x.commit.id
718 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
827 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
719 return revs
828 return revs
720
829
721 @reraise_safe_exceptions
830 @reraise_safe_exceptions
722 def get_object(self, wire, sha, maybe_unreachable=False):
831 def get_object(self, wire, sha, maybe_unreachable=False):
723 cache_on, context_uid, repo_id = self._cache_on(wire)
832 cache_on, context_uid, repo_id = self._cache_on(wire)
724 region = self._region(wire)
833 region = self._region(wire)
834
725 @region.conditional_cache_on_arguments(condition=cache_on)
835 @region.conditional_cache_on_arguments(condition=cache_on)
726 def _get_object(_context_uid, _repo_id, _sha):
836 def _get_object(_context_uid, _repo_id, _sha):
727 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
@@ -766,11 +876,11 b' class GitRemote(RemoteBase):'
766 raise exceptions.LookupException(e)(missing_commit_err)
876 raise exceptions.LookupException(e)(missing_commit_err)
767
877
768 commit_id = commit.hex
878 commit_id = commit.hex
769 type_id = commit.type
879 type_str = commit.type_str
770
880
771 return {
881 return {
772 'id': commit_id,
882 'id': commit_id,
773 'type': self._type_id_to_name(type_id),
883 'type': type_str,
774 'commit_id': commit_id,
884 'commit_id': commit_id,
775 'idx': 0
885 'idx': 0
776 }
886 }
@@ -781,6 +891,7 b' class GitRemote(RemoteBase):'
781 def get_refs(self, wire):
891 def get_refs(self, wire):
782 cache_on, context_uid, repo_id = self._cache_on(wire)
892 cache_on, context_uid, repo_id = self._cache_on(wire)
783 region = self._region(wire)
893 region = self._region(wire)
894
784 @region.conditional_cache_on_arguments(condition=cache_on)
895 @region.conditional_cache_on_arguments(condition=cache_on)
785 def _get_refs(_context_uid, _repo_id):
896 def _get_refs(_context_uid, _repo_id):
786
897
@@ -788,7 +899,7 b' class GitRemote(RemoteBase):'
788 with repo_init as repo:
899 with repo_init as repo:
789 regex = re.compile('^refs/(heads|tags)/')
900 regex = re.compile('^refs/(heads|tags)/')
790 return {x.name: x.target.hex for x in
901 return {x.name: x.target.hex for x in
791 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
902 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
792
903
793 return _get_refs(context_uid, repo_id)
904 return _get_refs(context_uid, repo_id)
794
905
@@ -796,13 +907,14 b' class GitRemote(RemoteBase):'
796 def get_branch_pointers(self, wire):
907 def get_branch_pointers(self, wire):
797 cache_on, context_uid, repo_id = self._cache_on(wire)
908 cache_on, context_uid, repo_id = self._cache_on(wire)
798 region = self._region(wire)
909 region = self._region(wire)
910
799 @region.conditional_cache_on_arguments(condition=cache_on)
911 @region.conditional_cache_on_arguments(condition=cache_on)
800 def _get_branch_pointers(_context_uid, _repo_id):
912 def _get_branch_pointers(_context_uid, _repo_id):
801
913
802 repo_init = self._factory.repo_libgit2(wire)
914 repo_init = self._factory.repo_libgit2(wire)
803 regex = re.compile('^refs/heads')
915 regex = re.compile('^refs/heads')
804 with repo_init as repo:
916 with repo_init as repo:
805 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
917 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
806 return {x.target.hex: x.shorthand for x in branches}
918 return {x.target.hex: x.shorthand for x in branches}
807
919
808 return _get_branch_pointers(context_uid, repo_id)
920 return _get_branch_pointers(context_uid, repo_id)
@@ -811,6 +923,7 b' class GitRemote(RemoteBase):'
811 def head(self, wire, show_exc=True):
923 def head(self, wire, show_exc=True):
812 cache_on, context_uid, repo_id = self._cache_on(wire)
924 cache_on, context_uid, repo_id = self._cache_on(wire)
813 region = self._region(wire)
925 region = self._region(wire)
926
814 @region.conditional_cache_on_arguments(condition=cache_on)
927 @region.conditional_cache_on_arguments(condition=cache_on)
815 def _head(_context_uid, _repo_id, _show_exc):
928 def _head(_context_uid, _repo_id, _show_exc):
816 repo_init = self._factory.repo_libgit2(wire)
929 repo_init = self._factory.repo_libgit2(wire)
@@ -824,12 +937,12 b' class GitRemote(RemoteBase):'
824
937
825 @reraise_safe_exceptions
938 @reraise_safe_exceptions
826 def init(self, wire):
939 def init(self, wire):
827 repo_path = str_to_dulwich(wire['path'])
940 repo_path = safe_str(wire['path'])
828 self.repo = Repo.init(repo_path)
941 self.repo = Repo.init(repo_path)
829
942
830 @reraise_safe_exceptions
943 @reraise_safe_exceptions
831 def init_bare(self, wire):
944 def init_bare(self, wire):
832 repo_path = str_to_dulwich(wire['path'])
945 repo_path = safe_str(wire['path'])
833 self.repo = Repo.init_bare(repo_path)
946 self.repo = Repo.init_bare(repo_path)
834
947
835 @reraise_safe_exceptions
948 @reraise_safe_exceptions
@@ -837,6 +950,7 b' class GitRemote(RemoteBase):'
837
950
838 cache_on, context_uid, repo_id = self._cache_on(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
839 region = self._region(wire)
952 region = self._region(wire)
953
840 @region.conditional_cache_on_arguments(condition=cache_on)
954 @region.conditional_cache_on_arguments(condition=cache_on)
841 def _revision(_context_uid, _repo_id, _rev):
955 def _revision(_context_uid, _repo_id, _rev):
842 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
@@ -856,6 +970,7 b' class GitRemote(RemoteBase):'
856 def date(self, wire, commit_id):
970 def date(self, wire, commit_id):
857 cache_on, context_uid, repo_id = self._cache_on(wire)
971 cache_on, context_uid, repo_id = self._cache_on(wire)
858 region = self._region(wire)
972 region = self._region(wire)
973
859 @region.conditional_cache_on_arguments(condition=cache_on)
974 @region.conditional_cache_on_arguments(condition=cache_on)
860 def _date(_repo_id, _commit_id):
975 def _date(_repo_id, _commit_id):
861 repo_init = self._factory.repo_libgit2(wire)
976 repo_init = self._factory.repo_libgit2(wire)
@@ -876,6 +991,7 b' class GitRemote(RemoteBase):'
876 def author(self, wire, commit_id):
991 def author(self, wire, commit_id):
877 cache_on, context_uid, repo_id = self._cache_on(wire)
992 cache_on, context_uid, repo_id = self._cache_on(wire)
878 region = self._region(wire)
993 region = self._region(wire)
994
879 @region.conditional_cache_on_arguments(condition=cache_on)
995 @region.conditional_cache_on_arguments(condition=cache_on)
880 def _author(_repo_id, _commit_id):
996 def _author(_repo_id, _commit_id):
881 repo_init = self._factory.repo_libgit2(wire)
997 repo_init = self._factory.repo_libgit2(wire)
@@ -888,12 +1004,12 b' class GitRemote(RemoteBase):'
888 author = commit.get_object().author
1004 author = commit.get_object().author
889
1005
890 if author.email:
1006 if author.email:
891 return u"{} <{}>".format(author.name, author.email)
1007 return f"{author.name} <{author.email}>"
892
1008
893 try:
1009 try:
894 return u"{}".format(author.name)
1010 return f"{author.name}"
895 except Exception:
1011 except Exception:
896 return u"{}".format(safe_unicode(author.raw_name))
1012 return f"{safe_str(author.raw_name)}"
897
1013
898 return _author(repo_id, commit_id)
1014 return _author(repo_id, commit_id)
899
1015
@@ -901,6 +1017,7 b' class GitRemote(RemoteBase):'
901 def message(self, wire, commit_id):
1017 def message(self, wire, commit_id):
902 cache_on, context_uid, repo_id = self._cache_on(wire)
1018 cache_on, context_uid, repo_id = self._cache_on(wire)
903 region = self._region(wire)
1019 region = self._region(wire)
1020
904 @region.conditional_cache_on_arguments(condition=cache_on)
1021 @region.conditional_cache_on_arguments(condition=cache_on)
905 def _message(_repo_id, _commit_id):
1022 def _message(_repo_id, _commit_id):
906 repo_init = self._factory.repo_libgit2(wire)
1023 repo_init = self._factory.repo_libgit2(wire)
@@ -913,6 +1030,7 b' class GitRemote(RemoteBase):'
913 def parents(self, wire, commit_id):
1030 def parents(self, wire, commit_id):
914 cache_on, context_uid, repo_id = self._cache_on(wire)
1031 cache_on, context_uid, repo_id = self._cache_on(wire)
915 region = self._region(wire)
1032 region = self._region(wire)
1033
916 @region.conditional_cache_on_arguments(condition=cache_on)
1034 @region.conditional_cache_on_arguments(condition=cache_on)
917 def _parents(_repo_id, _commit_id):
1035 def _parents(_repo_id, _commit_id):
918 repo_init = self._factory.repo_libgit2(wire)
1036 repo_init = self._factory.repo_libgit2(wire)
@@ -930,17 +1048,23 b' class GitRemote(RemoteBase):'
930 def children(self, wire, commit_id):
1048 def children(self, wire, commit_id):
931 cache_on, context_uid, repo_id = self._cache_on(wire)
1049 cache_on, context_uid, repo_id = self._cache_on(wire)
932 region = self._region(wire)
1050 region = self._region(wire)
1051
1052 head = self.head(wire)
1053
933 @region.conditional_cache_on_arguments(condition=cache_on)
1054 @region.conditional_cache_on_arguments(condition=cache_on)
934 def _children(_repo_id, _commit_id):
1055 def _children(_repo_id, _commit_id):
1056
935 output, __ = self.run_git_command(
1057 output, __ = self.run_git_command(
936 wire, ['rev-list', '--all', '--children'])
1058 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
937
1059
938 child_ids = []
1060 child_ids = []
939 pat = re.compile(r'^%s' % commit_id)
1061 pat = re.compile(fr'^{commit_id}')
940 for l in output.splitlines():
1062 for line in output.splitlines():
941 if pat.match(l):
1063 line = safe_str(line)
942 found_ids = l.split(' ')[1:]
1064 if pat.match(line):
1065 found_ids = line.split(' ')[1:]
943 child_ids.extend(found_ids)
1066 child_ids.extend(found_ids)
1067 break
944
1068
945 return child_ids
1069 return child_ids
946 return _children(repo_id, commit_id)
1070 return _children(repo_id, commit_id)
@@ -955,7 +1079,11 b' class GitRemote(RemoteBase):'
955 def create_branch(self, wire, branch_name, commit_id, force=False):
1079 def create_branch(self, wire, branch_name, commit_id, force=False):
956 repo_init = self._factory.repo_libgit2(wire)
1080 repo_init = self._factory.repo_libgit2(wire)
957 with repo_init as repo:
1081 with repo_init as repo:
1082 if commit_id:
958 commit = repo[commit_id]
1083 commit = repo[commit_id]
1084 else:
1085 # if commit is not given just use the HEAD
1086 commit = repo.head()
959
1087
960 if force:
1088 if force:
961 repo.branches.local.create(branch_name, commit, force=force)
1089 repo.branches.local.create(branch_name, commit, force=force)
@@ -973,23 +1101,39 b' class GitRemote(RemoteBase):'
973 def tag_remove(self, wire, tag_name):
1101 def tag_remove(self, wire, tag_name):
974 repo_init = self._factory.repo_libgit2(wire)
1102 repo_init = self._factory.repo_libgit2(wire)
975 with repo_init as repo:
1103 with repo_init as repo:
976 key = 'refs/tags/{}'.format(tag_name)
1104 key = f'refs/tags/{tag_name}'
977 repo.references.delete(key)
1105 repo.references.delete(key)
978
1106
979 @reraise_safe_exceptions
1107 @reraise_safe_exceptions
980 def tree_changes(self, wire, source_id, target_id):
1108 def tree_changes(self, wire, source_id, target_id):
981 # TODO(marcink): remove this seems it's only used by tests
982 repo = self._factory.repo(wire)
1109 repo = self._factory.repo(wire)
1110 # source can be empty
1111 source_id = safe_bytes(source_id if source_id else b'')
1112 target_id = safe_bytes(target_id)
1113
983 source = repo[source_id].tree if source_id else None
1114 source = repo[source_id].tree if source_id else None
984 target = repo[target_id].tree
1115 target = repo[target_id].tree
985 result = repo.object_store.tree_changes(source, target)
1116 result = repo.object_store.tree_changes(source, target)
986 return list(result)
1117
1118 added = set()
1119 modified = set()
1120 deleted = set()
1121 for (old_path, new_path), (_, _), (_, _) in list(result):
1122 if new_path and old_path:
1123 modified.add(new_path)
1124 elif new_path and not old_path:
1125 added.add(new_path)
1126 elif not new_path and old_path:
1127 deleted.add(old_path)
1128
1129 return list(added), list(modified), list(deleted)
987
1130
988 @reraise_safe_exceptions
1131 @reraise_safe_exceptions
989 def tree_and_type_for_path(self, wire, commit_id, path):
1132 def tree_and_type_for_path(self, wire, commit_id, path):
990
1133
991 cache_on, context_uid, repo_id = self._cache_on(wire)
1134 cache_on, context_uid, repo_id = self._cache_on(wire)
992 region = self._region(wire)
1135 region = self._region(wire)
1136
993 @region.conditional_cache_on_arguments(condition=cache_on)
1137 @region.conditional_cache_on_arguments(condition=cache_on)
994 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1138 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
995 repo_init = self._factory.repo_libgit2(wire)
1139 repo_init = self._factory.repo_libgit2(wire)
@@ -1001,13 +1145,14 b' class GitRemote(RemoteBase):'
1001 except KeyError:
1145 except KeyError:
1002 return None, None, None
1146 return None, None, None
1003
1147
1004 return tree.id.hex, tree.type, tree.filemode
1148 return tree.id.hex, tree.type_str, tree.filemode
1005 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1149 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1006
1150
1007 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1008 def tree_items(self, wire, tree_id):
1152 def tree_items(self, wire, tree_id):
1009 cache_on, context_uid, repo_id = self._cache_on(wire)
1153 cache_on, context_uid, repo_id = self._cache_on(wire)
1010 region = self._region(wire)
1154 region = self._region(wire)
1155
1011 @region.conditional_cache_on_arguments(condition=cache_on)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1012 def _tree_items(_repo_id, _tree_id):
1157 def _tree_items(_repo_id, _tree_id):
1013
1158
@@ -1016,13 +1161,13 b' class GitRemote(RemoteBase):'
1016 try:
1161 try:
1017 tree = repo[tree_id]
1162 tree = repo[tree_id]
1018 except KeyError:
1163 except KeyError:
1019 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1164 raise ObjectMissing(f'No tree with id: {tree_id}')
1020
1165
1021 result = []
1166 result = []
1022 for item in tree:
1167 for item in tree:
1023 item_sha = item.hex
1168 item_sha = item.hex
1024 item_mode = item.filemode
1169 item_mode = item.filemode
1025 item_type = item.type
1170 item_type = item.type_str
1026
1171
1027 if item_type == 'commit':
1172 if item_type == 'commit':
1028 # NOTE(marcink): submodules we translate to 'link' for backward compat
1173 # NOTE(marcink): submodules we translate to 'link' for backward compat
@@ -1066,7 +1211,7 b' class GitRemote(RemoteBase):'
1066 lines = diff.splitlines()
1211 lines = diff.splitlines()
1067 x = 0
1212 x = 0
1068 for line in lines:
1213 for line in lines:
1069 if line.startswith('diff'):
1214 if line.startswith(b'diff'):
1070 break
1215 break
1071 x += 1
1216 x += 1
1072 # Append new line just like 'diff' command do
1217 # Append new line just like 'diff' command do
@@ -1076,6 +1221,7 b' class GitRemote(RemoteBase):'
1076 @reraise_safe_exceptions
1221 @reraise_safe_exceptions
1077 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1222 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1078 repo_init = self._factory.repo_libgit2(wire)
1223 repo_init = self._factory.repo_libgit2(wire)
1224
1079 with repo_init as repo:
1225 with repo_init as repo:
1080 swap = True
1226 swap = True
1081 flags = 0
1227 flags = 0
@@ -1101,15 +1247,17 b' class GitRemote(RemoteBase):'
1101 if file_filter:
1247 if file_filter:
1102 for p in diff_obj:
1248 for p in diff_obj:
1103 if p.delta.old_file.path == file_filter:
1249 if p.delta.old_file.path == file_filter:
1104 return p.patch or ''
1250 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1105 # fo matching path == no diff
1251 # fo matching path == no diff
1106 return ''
1252 return BytesEnvelope(b'')
1107 return diff_obj.patch or ''
1253
1254 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1108
1255
1109 @reraise_safe_exceptions
1256 @reraise_safe_exceptions
1110 def node_history(self, wire, commit_id, path, limit):
1257 def node_history(self, wire, commit_id, path, limit):
1111 cache_on, context_uid, repo_id = self._cache_on(wire)
1258 cache_on, context_uid, repo_id = self._cache_on(wire)
1112 region = self._region(wire)
1259 region = self._region(wire)
1260
1113 @region.conditional_cache_on_arguments(condition=cache_on)
1261 @region.conditional_cache_on_arguments(condition=cache_on)
1114 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1262 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1115 # optimize for n==1, rev-list is much faster for that use-case
1263 # optimize for n==1, rev-list is much faster for that use-case
@@ -1122,14 +1270,14 b' class GitRemote(RemoteBase):'
1122 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1270 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1123
1271
1124 output, __ = self.run_git_command(wire, cmd)
1272 output, __ = self.run_git_command(wire, cmd)
1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1273 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1126
1274
1127 return [x for x in commit_ids]
1275 return [x for x in commit_ids]
1128 return _node_history(context_uid, repo_id, commit_id, path, limit)
1276 return _node_history(context_uid, repo_id, commit_id, path, limit)
1129
1277
1130 @reraise_safe_exceptions
1278 @reraise_safe_exceptions
1131 def node_annotate(self, wire, commit_id, path):
1279 def node_annotate_legacy(self, wire, commit_id, path):
1132
1280 # note: replaced by pygit2 implementation
1133 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1281 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1134 # -l ==> outputs long shas (and we need all 40 characters)
1282 # -l ==> outputs long shas (and we need all 40 characters)
1135 # --root ==> doesn't put '^' character for boundaries
1283 # --root ==> doesn't put '^' character for boundaries
@@ -1137,13 +1285,31 b' class GitRemote(RemoteBase):'
1137 output, __ = self.run_git_command(wire, cmd)
1285 output, __ = self.run_git_command(wire, cmd)
1138
1286
1139 result = []
1287 result = []
1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1288 for i, blame_line in enumerate(output.splitlines()[:-1]):
1141 line_no = i + 1
1289 line_no = i + 1
1142 commit_id, line = re.split(r' ', blame_line, 1)
1290 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1143 result.append((line_no, commit_id, line))
1291 result.append((line_no, blame_commit_id, line))
1292
1144 return result
1293 return result
1145
1294
1146 @reraise_safe_exceptions
1295 @reraise_safe_exceptions
1296 def node_annotate(self, wire, commit_id, path):
1297
1298 result_libgit = []
1299 repo_init = self._factory.repo_libgit2(wire)
1300 with repo_init as repo:
1301 commit = repo[commit_id]
1302 blame_obj = repo.blame(path, newest_commit=commit_id)
1303 for i, line in enumerate(commit.tree[path].data.splitlines()):
1304 line_no = i + 1
1305 hunk = blame_obj.for_line(line_no)
1306 blame_commit_id = hunk.final_commit_id.hex
1307
1308 result_libgit.append((line_no, blame_commit_id, line))
1309
1310 return result_libgit
1311
1312 @reraise_safe_exceptions
1147 def update_server_info(self, wire):
1313 def update_server_info(self, wire):
1148 repo = self._factory.repo(wire)
1314 repo = self._factory.repo(wire)
1149 update_server_info(repo)
1315 update_server_info(repo)
@@ -1153,6 +1319,7 b' class GitRemote(RemoteBase):'
1153
1319
1154 cache_on, context_uid, repo_id = self._cache_on(wire)
1320 cache_on, context_uid, repo_id = self._cache_on(wire)
1155 region = self._region(wire)
1321 region = self._region(wire)
1322
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1323 @region.conditional_cache_on_arguments(condition=cache_on)
1157 def _get_all_commit_ids(_context_uid, _repo_id):
1324 def _get_all_commit_ids(_context_uid, _repo_id):
1158
1325
@@ -1163,6 +1330,16 b' class GitRemote(RemoteBase):'
1163 except Exception:
1330 except Exception:
1164 # Can be raised for empty repositories
1331 # Can be raised for empty repositories
1165 return []
1332 return []
1333
1334 @region.conditional_cache_on_arguments(condition=cache_on)
1335 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1336 repo_init = self._factory.repo_libgit2(wire)
1337 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1338 results = []
1339 with repo_init as repo:
1340 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1341 results.append(commit.id.hex)
1342
1166 return _get_all_commit_ids(context_uid, repo_id)
1343 return _get_all_commit_ids(context_uid, repo_id)
1167
1344
1168 @reraise_safe_exceptions
1345 @reraise_safe_exceptions
@@ -1203,9 +1380,9 b' class GitRemote(RemoteBase):'
1203 _opts.update(opts)
1380 _opts.update(opts)
1204 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1381 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1205
1382
1206 return ''.join(proc), ''.join(proc.error)
1383 return b''.join(proc), b''.join(proc.stderr)
1207 except (EnvironmentError, OSError) as err:
1384 except OSError as err:
1208 cmd = ' '.join(cmd) # human friendly CMD
1385 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1209 tb_err = ("Couldn't run git command (%s).\n"
1386 tb_err = ("Couldn't run git command (%s).\n"
1210 "Original error was:%s\n"
1387 "Original error was:%s\n"
1211 "Call options:%s\n"
1388 "Call options:%s\n"
@@ -1224,6 +1401,9 b' class GitRemote(RemoteBase):'
1224 from vcsserver.hook_utils import install_git_hooks
1401 from vcsserver.hook_utils import install_git_hooks
1225 bare = self.bare(wire)
1402 bare = self.bare(wire)
1226 path = wire['path']
1403 path = wire['path']
1404 binary_dir = settings.BINARY_DIR
1405 if binary_dir:
1406 os.path.join(binary_dir, 'python3')
1227 return install_git_hooks(path, bare, force_create=force)
1407 return install_git_hooks(path, bare, force_create=force)
1228
1408
1229 @reraise_safe_exceptions
1409 @reraise_safe_exceptions
@@ -1240,13 +1420,15 b' class GitRemote(RemoteBase):'
1240 @reraise_safe_exceptions
1420 @reraise_safe_exceptions
1241 def set_head_ref(self, wire, head_name):
1421 def set_head_ref(self, wire, head_name):
1242 log.debug('Setting refs/head to `%s`', head_name)
1422 log.debug('Setting refs/head to `%s`', head_name)
1243 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1423 repo_init = self._factory.repo_libgit2(wire)
1244 output, __ = self.run_git_command(wire, cmd)
1424 with repo_init as repo:
1245 return [head_name] + output.splitlines()
1425 repo.set_head(f'refs/heads/{head_name}')
1426
1427 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1246
1428
1247 @reraise_safe_exceptions
1429 @reraise_safe_exceptions
1248 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1430 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1249 archive_dir_name, commit_id):
1431 archive_dir_name, commit_id, cache_config):
1250
1432
1251 def file_walker(_commit_id, path):
1433 def file_walker(_commit_id, path):
1252 repo_init = self._factory.repo_libgit2(wire)
1434 repo_init = self._factory.repo_libgit2(wire)
@@ -1262,20 +1444,20 b' class GitRemote(RemoteBase):'
1262 try:
1444 try:
1263 tree = repo[tree_id]
1445 tree = repo[tree_id]
1264 except KeyError:
1446 except KeyError:
1265 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1447 raise ObjectMissing(f'No tree with id: {tree_id}')
1266
1448
1267 index = LibGit2Index.Index()
1449 index = LibGit2Index.Index()
1268 index.read_tree(tree)
1450 index.read_tree(tree)
1269 file_iter = index
1451 file_iter = index
1270
1452
1271 for fn in file_iter:
1453 for file_node in file_iter:
1272 file_path = fn.path
1454 file_path = file_node.path
1273 mode = fn.mode
1455 mode = file_node.mode
1274 is_link = stat.S_ISLNK(mode)
1456 is_link = stat.S_ISLNK(mode)
1275 if mode == pygit2.GIT_FILEMODE_COMMIT:
1457 if mode == pygit2.GIT_FILEMODE_COMMIT:
1276 log.debug('Skipping path %s as a commit node', file_path)
1458 log.debug('Skipping path %s as a commit node', file_path)
1277 continue
1459 continue
1278 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1460 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1279
1461
1280 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1462 return store_archive_in_cache(
1281 archive_dir_name, commit_id)
1463 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -14,17 +14,17 b''
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import binascii
18 import io
18 import io
19 import logging
19 import logging
20 import os
21 import stat
20 import stat
22 import urllib
21 import urllib.request
23 import urllib2
22 import urllib.parse
24 import traceback
23 import traceback
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
@@ -32,15 +32,19 b' from mercurial import repair'
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
36 BinaryEnvelope
36 from vcsserver.hgcompat import (
37 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
43 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
44 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.vcs_base import RemoteBase
46 from vcsserver.config import hooks as hooks_config
47
44
48
45 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
46
50
@@ -48,25 +52,31 b' log = logging.getLogger(__name__)'
48 def make_ui_from_config(repo_config):
52 def make_ui_from_config(repo_config):
49
53
50 class LoggingUI(ui.ui):
54 class LoggingUI(ui.ui):
55
51 def status(self, *msg, **opts):
56 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
57 str_msg = map(safe_str, msg)
53 super(LoggingUI, self).status(*msg, **opts)
58 log.info(' '.join(str_msg).rstrip('\n'))
59 #super(LoggingUI, self).status(*msg, **opts)
54
60
55 def warn(self, *msg, **opts):
61 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
62 str_msg = map(safe_str, msg)
57 super(LoggingUI, self).warn(*msg, **opts)
63 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
64 #super(LoggingUI, self).warn(*msg, **opts)
58
65
59 def error(self, *msg, **opts):
66 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
67 str_msg = map(safe_str, msg)
61 super(LoggingUI, self).error(*msg, **opts)
68 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
69 #super(LoggingUI, self).error(*msg, **opts)
62
70
63 def note(self, *msg, **opts):
71 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
72 str_msg = map(safe_str, msg)
65 super(LoggingUI, self).note(*msg, **opts)
73 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
74 #super(LoggingUI, self).note(*msg, **opts)
66
75
67 def debug(self, *msg, **opts):
76 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
77 str_msg = map(safe_str, msg)
69 super(LoggingUI, self).debug(*msg, **opts)
78 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
79 #super(LoggingUI, self).debug(*msg, **opts)
70
80
71 baseui = LoggingUI()
81 baseui = LoggingUI()
72
82
@@ -76,26 +86,26 b' def make_ui_from_config(repo_config):'
76 baseui._tcfg = hgconfig.config()
86 baseui._tcfg = hgconfig.config()
77
87
78 for section, option, value in repo_config:
88 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
89 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
80
90
81 # make our hgweb quiet so it doesn't print output
91 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
92 baseui.setconfig(b'ui', b'quiet', b'true')
83
93
84 baseui.setconfig('ui', 'paginate', 'never')
94 baseui.setconfig(b'ui', b'paginate', b'never')
85 # for better Error reporting of Mercurial
95 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
96 baseui.setconfig(b'ui', b'message-output', b'stderr')
87
97
88 # force mercurial to only use 1 thread, otherwise it may try to set a
98 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
99 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
100 baseui.setconfig(b'worker', b'numcpus', 1)
91
101
92 # If there is no config for the largefiles extension, we explicitly disable
102 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
103 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
104 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
105 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
106 if not baseui.hasconfig(b'extensions', b'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
107 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
108 baseui.setconfig(b'extensions', b'largefiles', b'!')
99
109
100 return baseui
110 return baseui
101
111
@@ -107,19 +117,19 b' def reraise_safe_exceptions(func):'
107 try:
117 try:
108 return func(*args, **kwargs)
118 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
119 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
120 raise_from_original(exceptions.AbortException(e), e)
111 except RepoLookupError as e:
121 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
122 raise_from_original(exceptions.LookupException(e), e)
113 except RequirementError as e:
123 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
124 raise_from_original(exceptions.RequirementException(e), e)
115 except RepoError as e:
125 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
126 raise_from_original(exceptions.VcsException(e), e)
117 except LookupError as e:
127 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
128 raise_from_original(exceptions.LookupException(e), e)
119 except Exception as e:
129 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
130 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
131 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
132 raise_from_original(exceptions.UnhandledException(e), e)
123
133
124 raise
134 raise
125 return wrapper
135 return wrapper
@@ -130,9 +140,18 b' class MercurialFactory(RepoFactory):'
130
140
131 def _create_config(self, config, hooks=True):
141 def _create_config(self, config, hooks=True):
132 if not hooks:
142 if not hooks:
133 hooks_to_clean = frozenset((
143
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
144 hooks_to_clean = {
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
145
146 hooks_config.HOOK_REPO_SIZE,
147 hooks_config.HOOK_PRE_PULL,
148 hooks_config.HOOK_PULL,
149
150 hooks_config.HOOK_PRE_PUSH,
151 # TODO: what about PRETXT, this was disabled in pre 5.0.0
152 hooks_config.HOOK_PRETX_PUSH,
153
154 }
136 new_config = []
155 new_config = []
137 for section, option, value in config:
156 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
157 if section == 'hooks' and option in hooks_to_clean:
@@ -145,7 +164,9 b' class MercurialFactory(RepoFactory):'
145
164
146 def _create_repo(self, wire, create):
165 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
166 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
167 repo = instance(baseui, safe_bytes(wire["path"]), create)
168 log.debug('repository created: got HG object: %s', repo)
169 return repo
149
170
150 def repo(self, wire, create=False):
171 def repo(self, wire, create=False):
151 """
172 """
@@ -155,7 +176,7 b' class MercurialFactory(RepoFactory):'
155
176
156
177
157 def patch_ui_message_output(baseui):
178 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
179 baseui.setconfig(b'ui', b'quiet', b'false')
159 output = io.BytesIO()
180 output = io.BytesIO()
160
181
161 def write(data, **unused_kwargs):
182 def write(data, **unused_kwargs):
@@ -169,6 +190,22 b' def patch_ui_message_output(baseui):'
169 return baseui, output
190 return baseui, output
170
191
171
192
193 def get_obfuscated_url(url_obj):
194 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
195 url_obj.query = obfuscate_qs(url_obj.query)
196 obfuscated_uri = str(url_obj)
197 return obfuscated_uri
198
199
200 def normalize_url_for_hg(url: str):
201 _proto = None
202
203 if '+' in url[:url.find('://')]:
204 _proto = url[0:url.find('+')]
205 url = url[url.find('+') + 1:]
206 return url, _proto
207
208
172 class HgRemote(RemoteBase):
209 class HgRemote(RemoteBase):
173
210
174 def __init__(self, factory):
211 def __init__(self, factory):
@@ -187,6 +224,13 b' class HgRemote(RemoteBase):'
187 "hidden": self.ctx_hidden,
224 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
225 "_file_paths": self.ctx_list,
189 }
226 }
227 self._bulk_file_methods = {
228 "size": self.fctx_size,
229 "data": self.fctx_node_data,
230 "flags": self.fctx_flags,
231 "is_binary": self.is_binary,
232 "md5": self.md5_hash,
233 }
190
234
191 def _get_ctx(self, repo, ref):
235 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
236 return get_ctx(repo, ref)
@@ -194,7 +238,7 b' class HgRemote(RemoteBase):'
194 @reraise_safe_exceptions
238 @reraise_safe_exceptions
195 def discover_hg_version(self):
239 def discover_hg_version(self):
196 from mercurial import util
240 from mercurial import util
197 return util.version()
241 return safe_str(util.version())
198
242
199 @reraise_safe_exceptions
243 @reraise_safe_exceptions
200 def is_empty(self, wire):
244 def is_empty(self, wire):
@@ -210,10 +254,11 b' class HgRemote(RemoteBase):'
210 def bookmarks(self, wire):
254 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
255 cache_on, context_uid, repo_id = self._cache_on(wire)
212 region = self._region(wire)
256 region = self._region(wire)
257
213 @region.conditional_cache_on_arguments(condition=cache_on)
258 @region.conditional_cache_on_arguments(condition=cache_on)
214 def _bookmarks(_context_uid, _repo_id):
259 def _bookmarks(_context_uid, _repo_id):
215 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
216 return dict(repo._bookmarks)
261 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
217
262
218 return _bookmarks(context_uid, repo_id)
263 return _bookmarks(context_uid, repo_id)
219
264
@@ -221,16 +266,17 b' class HgRemote(RemoteBase):'
221 def branches(self, wire, normal, closed):
266 def branches(self, wire, normal, closed):
222 cache_on, context_uid, repo_id = self._cache_on(wire)
267 cache_on, context_uid, repo_id = self._cache_on(wire)
223 region = self._region(wire)
268 region = self._region(wire)
269
224 @region.conditional_cache_on_arguments(condition=cache_on)
270 @region.conditional_cache_on_arguments(condition=cache_on)
225 def _branches(_context_uid, _repo_id, _normal, _closed):
271 def _branches(_context_uid, _repo_id, _normal, _closed):
226 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
227 iter_branches = repo.branchmap().iterbranches()
273 iter_branches = repo.branchmap().iterbranches()
228 bt = {}
274 bt = {}
229 for branch_name, _heads, tip, is_closed in iter_branches:
275 for branch_name, _heads, tip_node, is_closed in iter_branches:
230 if normal and not is_closed:
276 if normal and not is_closed:
231 bt[branch_name] = tip
277 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
232 if closed and is_closed:
278 if closed and is_closed:
233 bt[branch_name] = tip
279 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
234
280
235 return bt
281 return bt
236
282
@@ -240,12 +286,14 b' class HgRemote(RemoteBase):'
240 def bulk_request(self, wire, commit_id, pre_load):
286 def bulk_request(self, wire, commit_id, pre_load):
241 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
242 region = self._region(wire)
288 region = self._region(wire)
289
243 @region.conditional_cache_on_arguments(condition=cache_on)
290 @region.conditional_cache_on_arguments(condition=cache_on)
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
291 def _bulk_request(_repo_id, _commit_id, _pre_load):
245 result = {}
292 result = {}
246 for attr in pre_load:
293 for attr in pre_load:
247 try:
294 try:
248 method = self._bulk_methods[attr]
295 method = self._bulk_methods[attr]
296 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
249 result[attr] = method(wire, commit_id)
297 result[attr] = method(wire, commit_id)
250 except KeyError as e:
298 except KeyError as e:
251 raise exceptions.VcsException(e)(
299 raise exceptions.VcsException(e)(
@@ -258,6 +306,7 b' class HgRemote(RemoteBase):'
258 def ctx_branch(self, wire, commit_id):
306 def ctx_branch(self, wire, commit_id):
259 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
260 region = self._region(wire)
308 region = self._region(wire)
309
261 @region.conditional_cache_on_arguments(condition=cache_on)
310 @region.conditional_cache_on_arguments(condition=cache_on)
262 def _ctx_branch(_repo_id, _commit_id):
311 def _ctx_branch(_repo_id, _commit_id):
263 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
@@ -269,6 +318,7 b' class HgRemote(RemoteBase):'
269 def ctx_date(self, wire, commit_id):
318 def ctx_date(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
271 region = self._region(wire)
320 region = self._region(wire)
321
272 @region.conditional_cache_on_arguments(condition=cache_on)
322 @region.conditional_cache_on_arguments(condition=cache_on)
273 def _ctx_date(_repo_id, _commit_id):
323 def _ctx_date(_repo_id, _commit_id):
274 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
@@ -286,6 +336,7 b' class HgRemote(RemoteBase):'
286 def ctx_files(self, wire, commit_id):
336 def ctx_files(self, wire, commit_id):
287 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
288 region = self._region(wire)
338 region = self._region(wire)
339
289 @region.conditional_cache_on_arguments(condition=cache_on)
340 @region.conditional_cache_on_arguments(condition=cache_on)
290 def _ctx_files(_repo_id, _commit_id):
341 def _ctx_files(_repo_id, _commit_id):
291 repo = self._factory.repo(wire)
342 repo = self._factory.repo(wire)
@@ -304,6 +355,7 b' class HgRemote(RemoteBase):'
304 def ctx_parents(self, wire, commit_id):
355 def ctx_parents(self, wire, commit_id):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
356 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self._region(wire)
357 region = self._region(wire)
358
307 @region.conditional_cache_on_arguments(condition=cache_on)
359 @region.conditional_cache_on_arguments(condition=cache_on)
308 def _ctx_parents(_repo_id, _commit_id):
360 def _ctx_parents(_repo_id, _commit_id):
309 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
@@ -317,6 +369,7 b' class HgRemote(RemoteBase):'
317 def ctx_children(self, wire, commit_id):
369 def ctx_children(self, wire, commit_id):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self._region(wire)
371 region = self._region(wire)
372
320 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
321 def _ctx_children(_repo_id, _commit_id):
374 def _ctx_children(_repo_id, _commit_id):
322 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
@@ -330,6 +383,7 b' class HgRemote(RemoteBase):'
330 def ctx_phase(self, wire, commit_id):
383 def ctx_phase(self, wire, commit_id):
331 cache_on, context_uid, repo_id = self._cache_on(wire)
384 cache_on, context_uid, repo_id = self._cache_on(wire)
332 region = self._region(wire)
385 region = self._region(wire)
386
333 @region.conditional_cache_on_arguments(condition=cache_on)
387 @region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
388 def _ctx_phase(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
@@ -342,6 +396,7 b' class HgRemote(RemoteBase):'
342 def ctx_obsolete(self, wire, commit_id):
396 def ctx_obsolete(self, wire, commit_id):
343 cache_on, context_uid, repo_id = self._cache_on(wire)
397 cache_on, context_uid, repo_id = self._cache_on(wire)
344 region = self._region(wire)
398 region = self._region(wire)
399
345 @region.conditional_cache_on_arguments(condition=cache_on)
400 @region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
401 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
@@ -353,6 +408,7 b' class HgRemote(RemoteBase):'
353 def ctx_hidden(self, wire, commit_id):
408 def ctx_hidden(self, wire, commit_id):
354 cache_on, context_uid, repo_id = self._cache_on(wire)
409 cache_on, context_uid, repo_id = self._cache_on(wire)
355 region = self._region(wire)
410 region = self._region(wire)
411
356 @region.conditional_cache_on_arguments(condition=cache_on)
412 @region.conditional_cache_on_arguments(condition=cache_on)
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
413 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
358 repo = self._factory.repo(wire)
414 repo = self._factory.repo(wire)
@@ -384,46 +440,42 b' class HgRemote(RemoteBase):'
384
440
385 @reraise_safe_exceptions
441 @reraise_safe_exceptions
386 def check_url(self, url, config):
442 def check_url(self, url, config):
387 _proto = None
443 url, _proto = normalize_url_for_hg(url)
388 if '+' in url[:url.find('://')]:
444 url_obj = url_parser(safe_bytes(url))
389 _proto = url[0:url.find('+')]
445
390 url = url[url.find('+') + 1:]
446 test_uri = safe_str(url_obj.authinfo()[0])
447 authinfo = url_obj.authinfo()[1]
448 obfuscated_uri = get_obfuscated_url(url_obj)
449 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
450
391 handlers = []
451 handlers = []
392 url_obj = url_parser(url)
393 test_uri, authinfo = url_obj.authinfo()
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
395 url_obj.query = obfuscate_qs(url_obj.query)
396
397 cleaned_uri = str(url_obj)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
399
400 if authinfo:
452 if authinfo:
401 # create a password manager
453 # create a password manager
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
454 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
403 passmgr.add_password(*authinfo)
455 passmgr.add_password(*authinfo)
404
456
405 handlers.extend((httpbasicauthhandler(passmgr),
457 handlers.extend((httpbasicauthhandler(passmgr),
406 httpdigestauthhandler(passmgr)))
458 httpdigestauthhandler(passmgr)))
407
459
408 o = urllib2.build_opener(*handlers)
460 o = urllib.request.build_opener(*handlers)
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
461 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
410 ('Accept', 'application/mercurial-0.1')]
462 ('Accept', 'application/mercurial-0.1')]
411
463
412 q = {"cmd": 'between'}
464 q = {"cmd": 'between'}
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
465 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
414 qs = '?%s' % urllib.urlencode(q)
466 qs = '?%s' % urllib.parse.urlencode(q)
415 cu = "%s%s" % (test_uri, qs)
467 cu = f"{test_uri}{qs}"
416 req = urllib2.Request(cu, None, {})
468 req = urllib.request.Request(cu, None, {})
417
469
418 try:
470 try:
419 log.debug("Trying to open URL %s", cleaned_uri)
471 log.debug("Trying to open URL %s", obfuscated_uri)
420 resp = o.open(req)
472 resp = o.open(req)
421 if resp.code != 200:
473 if resp.code != 200:
422 raise exceptions.URLError()('Return Code is not 200')
474 raise exceptions.URLError()('Return Code is not 200')
423 except Exception as e:
475 except Exception as e:
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
476 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
425 # means it cannot be cloned
477 # means it cannot be cloned
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
478 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
427
479
428 # now check if it's a proper hg repo, but don't do it for svn
480 # now check if it's a proper hg repo, but don't do it for svn
429 try:
481 try:
@@ -432,19 +484,18 b' class HgRemote(RemoteBase):'
432 else:
484 else:
433 # check for pure hg repos
485 # check for pure hg repos
434 log.debug(
486 log.debug(
435 "Verifying if URL is a Mercurial repository: %s",
487 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
436 cleaned_uri)
437 ui = make_ui_from_config(config)
488 ui = make_ui_from_config(config)
438 peer_checker = makepeer(ui, url)
489 peer_checker = makepeer(ui, safe_bytes(url))
439 peer_checker.lookup('tip')
490 peer_checker.lookup(b'tip')
440 except Exception as e:
491 except Exception as e:
441 log.warning("URL is not a valid Mercurial repository: %s",
492 log.warning("URL is not a valid Mercurial repository: %s",
442 cleaned_uri)
493 obfuscated_uri)
443 raise exceptions.URLError(e)(
494 raise exceptions.URLError(e)(
444 "url [%s] does not look like an hg repo org_exc: %s"
495 "url [%s] does not look like an hg repo org_exc: %s"
445 % (cleaned_uri, e))
496 % (obfuscated_uri, e))
446
497
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
498 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
448 return True
499 return True
449
500
450 @reraise_safe_exceptions
501 @reraise_safe_exceptions
@@ -452,14 +503,17 b' class HgRemote(RemoteBase):'
452 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
453
504
454 if file_filter:
505 if file_filter:
455 match_filter = match(file_filter[0], '', [file_filter[1]])
506 # unpack the file-filter
507 repo_path, node_path = file_filter
508 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
456 else:
509 else:
457 match_filter = file_filter
510 match_filter = file_filter
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
511 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
459
512
460 try:
513 try:
461 return "".join(patch.diff(
514 diff_iter = patch.diff(
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
515 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
516 return BytesEnvelope(b"".join(diff_iter))
463 except RepoLookupError as e:
517 except RepoLookupError as e:
464 raise exceptions.LookupException(e)()
518 raise exceptions.LookupException(e)()
465
519
@@ -467,12 +521,13 b' class HgRemote(RemoteBase):'
467 def node_history(self, wire, revision, path, limit):
521 def node_history(self, wire, revision, path, limit):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
522 cache_on, context_uid, repo_id = self._cache_on(wire)
469 region = self._region(wire)
523 region = self._region(wire)
524
470 @region.conditional_cache_on_arguments(condition=cache_on)
525 @region.conditional_cache_on_arguments(condition=cache_on)
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
526 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
472 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
473
528
474 ctx = self._get_ctx(repo, revision)
529 ctx = self._get_ctx(repo, revision)
475 fctx = ctx.filectx(path)
530 fctx = ctx.filectx(safe_bytes(path))
476
531
477 def history_iter():
532 def history_iter():
478 limit_rev = fctx.rev()
533 limit_rev = fctx.rev()
@@ -498,11 +553,12 b' class HgRemote(RemoteBase):'
498 def node_history_untill(self, wire, revision, path, limit):
553 def node_history_untill(self, wire, revision, path, limit):
499 cache_on, context_uid, repo_id = self._cache_on(wire)
554 cache_on, context_uid, repo_id = self._cache_on(wire)
500 region = self._region(wire)
555 region = self._region(wire)
556
501 @region.conditional_cache_on_arguments(condition=cache_on)
557 @region.conditional_cache_on_arguments(condition=cache_on)
502 def _node_history_until(_context_uid, _repo_id):
558 def _node_history_until(_context_uid, _repo_id):
503 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
504 ctx = self._get_ctx(repo, revision)
560 ctx = self._get_ctx(repo, revision)
505 fctx = ctx.filectx(path)
561 fctx = ctx.filectx(safe_bytes(path))
506
562
507 file_log = list(fctx.filelog())
563 file_log = list(fctx.filelog())
508 if limit:
564 if limit:
@@ -513,10 +569,29 b' class HgRemote(RemoteBase):'
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
569 return _node_history_until(context_uid, repo_id, revision, path, limit)
514
570
515 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def bulk_file_request(self, wire, commit_id, path, pre_load):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
574 region = self._region(wire)
575
576 @region.conditional_cache_on_arguments(condition=cache_on)
577 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
578 result = {}
579 for attr in pre_load:
580 try:
581 method = self._bulk_file_methods[attr]
582 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
583 result[attr] = method(wire, _commit_id, _path)
584 except KeyError as e:
585 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
586 return BinaryEnvelope(result)
587
588 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
589
590 @reraise_safe_exceptions
516 def fctx_annotate(self, wire, revision, path):
591 def fctx_annotate(self, wire, revision, path):
517 repo = self._factory.repo(wire)
592 repo = self._factory.repo(wire)
518 ctx = self._get_ctx(repo, revision)
593 ctx = self._get_ctx(repo, revision)
519 fctx = ctx.filectx(path)
594 fctx = ctx.filectx(safe_bytes(path))
520
595
521 result = []
596 result = []
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
597 for i, annotate_obj in enumerate(fctx.annotate(), 1):
@@ -530,18 +605,19 b' class HgRemote(RemoteBase):'
530 def fctx_node_data(self, wire, revision, path):
605 def fctx_node_data(self, wire, revision, path):
531 repo = self._factory.repo(wire)
606 repo = self._factory.repo(wire)
532 ctx = self._get_ctx(repo, revision)
607 ctx = self._get_ctx(repo, revision)
533 fctx = ctx.filectx(path)
608 fctx = ctx.filectx(safe_bytes(path))
534 return fctx.data()
609 return BytesEnvelope(fctx.data())
535
610
536 @reraise_safe_exceptions
611 @reraise_safe_exceptions
537 def fctx_flags(self, wire, commit_id, path):
612 def fctx_flags(self, wire, commit_id, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
613 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
614 region = self._region(wire)
615
540 @region.conditional_cache_on_arguments(condition=cache_on)
616 @region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
617 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
618 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
619 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
620 fctx = ctx.filectx(safe_bytes(path))
545 return fctx.flags()
621 return fctx.flags()
546
622
547 return _fctx_flags(repo_id, commit_id, path)
623 return _fctx_flags(repo_id, commit_id, path)
@@ -550,11 +626,12 b' class HgRemote(RemoteBase):'
550 def fctx_size(self, wire, commit_id, path):
626 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
627 cache_on, context_uid, repo_id = self._cache_on(wire)
552 region = self._region(wire)
628 region = self._region(wire)
629
553 @region.conditional_cache_on_arguments(condition=cache_on)
630 @region.conditional_cache_on_arguments(condition=cache_on)
554 def _fctx_size(_repo_id, _revision, _path):
631 def _fctx_size(_repo_id, _revision, _path):
555 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
556 ctx = self._get_ctx(repo, commit_id)
633 ctx = self._get_ctx(repo, commit_id)
557 fctx = ctx.filectx(path)
634 fctx = ctx.filectx(safe_bytes(path))
558 return fctx.size()
635 return fctx.size()
559 return _fctx_size(repo_id, commit_id, path)
636 return _fctx_size(repo_id, commit_id, path)
560
637
@@ -562,44 +639,59 b' class HgRemote(RemoteBase):'
562 def get_all_commit_ids(self, wire, name):
639 def get_all_commit_ids(self, wire, name):
563 cache_on, context_uid, repo_id = self._cache_on(wire)
640 cache_on, context_uid, repo_id = self._cache_on(wire)
564 region = self._region(wire)
641 region = self._region(wire)
642
565 @region.conditional_cache_on_arguments(condition=cache_on)
643 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
644 def _get_all_commit_ids(_context_uid, _repo_id, _name):
567 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
568 repo = repo.filtered(name)
646 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
570 return revs
647 return revs
571 return _get_all_commit_ids(context_uid, repo_id, name)
648 return _get_all_commit_ids(context_uid, repo_id, name)
572
649
573 @reraise_safe_exceptions
650 @reraise_safe_exceptions
574 def get_config_value(self, wire, section, name, untrusted=False):
651 def get_config_value(self, wire, section, name, untrusted=False):
575 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
576 return repo.ui.config(section, name, untrusted=untrusted)
653 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
577
654
578 @reraise_safe_exceptions
655 @reraise_safe_exceptions
579 def is_large_file(self, wire, commit_id, path):
656 def is_large_file(self, wire, commit_id, path):
580 cache_on, context_uid, repo_id = self._cache_on(wire)
657 cache_on, context_uid, repo_id = self._cache_on(wire)
581 region = self._region(wire)
658 region = self._region(wire)
659
582 @region.conditional_cache_on_arguments(condition=cache_on)
660 @region.conditional_cache_on_arguments(condition=cache_on)
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
661 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
584 return largefiles.lfutil.isstandin(path)
662 return largefiles.lfutil.isstandin(safe_bytes(path))
585
663
586 return _is_large_file(context_uid, repo_id, commit_id, path)
664 return _is_large_file(context_uid, repo_id, commit_id, path)
587
665
588 @reraise_safe_exceptions
666 @reraise_safe_exceptions
589 def is_binary(self, wire, revision, path):
667 def is_binary(self, wire, revision, path):
590 cache_on, context_uid, repo_id = self._cache_on(wire)
668 cache_on, context_uid, repo_id = self._cache_on(wire)
669 region = self._region(wire)
591
670
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
671 @region.conditional_cache_on_arguments(condition=cache_on)
594 def _is_binary(_repo_id, _sha, _path):
672 def _is_binary(_repo_id, _sha, _path):
595 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
596 ctx = self._get_ctx(repo, revision)
674 ctx = self._get_ctx(repo, revision)
597 fctx = ctx.filectx(path)
675 fctx = ctx.filectx(safe_bytes(path))
598 return fctx.isbinary()
676 return fctx.isbinary()
599
677
600 return _is_binary(repo_id, revision, path)
678 return _is_binary(repo_id, revision, path)
601
679
602 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def md5_hash(self, wire, revision, path):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 region = self._region(wire)
684
685 @region.conditional_cache_on_arguments(condition=cache_on)
686 def _md5_hash(_repo_id, _sha, _path):
687 repo = self._factory.repo(wire)
688 ctx = self._get_ctx(repo, revision)
689 fctx = ctx.filectx(safe_bytes(path))
690 return hashlib.md5(fctx.data()).hexdigest()
691
692 return _md5_hash(repo_id, revision, path)
693
694 @reraise_safe_exceptions
603 def in_largefiles_store(self, wire, sha):
695 def in_largefiles_store(self, wire, sha):
604 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
605 return largefiles.lfutil.instore(repo, sha)
697 return largefiles.lfutil.instore(repo, sha)
@@ -627,11 +719,10 b' class HgRemote(RemoteBase):'
627 @reraise_safe_exceptions
719 @reraise_safe_exceptions
628 def lookup(self, wire, revision, both):
720 def lookup(self, wire, revision, both):
629 cache_on, context_uid, repo_id = self._cache_on(wire)
721 cache_on, context_uid, repo_id = self._cache_on(wire)
722 region = self._region(wire)
630
723
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
724 @region.conditional_cache_on_arguments(condition=cache_on)
633 def _lookup(_context_uid, _repo_id, _revision, _both):
725 def _lookup(_context_uid, _repo_id, _revision, _both):
634
635 repo = self._factory.repo(wire)
726 repo = self._factory.repo(wire)
636 rev = _revision
727 rev = _revision
637 if isinstance(rev, int):
728 if isinstance(rev, int):
@@ -644,7 +735,7 b' class HgRemote(RemoteBase):'
644 rev = rev + -1
735 rev = rev + -1
645 try:
736 try:
646 ctx = self._get_ctx(repo, rev)
737 ctx = self._get_ctx(repo, rev)
647 except (TypeError, RepoLookupError) as e:
738 except (TypeError, RepoLookupError, binascii.Error) as e:
648 e._org_exc_tb = traceback.format_exc()
739 e._org_exc_tb = traceback.format_exc()
649 raise exceptions.LookupException(e)(rev)
740 raise exceptions.LookupException(e)(rev)
650 except LookupError as e:
741 except LookupError as e:
@@ -667,12 +758,12 b' class HgRemote(RemoteBase):'
667 repo = self._factory.repo(wire)
758 repo = self._factory.repo(wire)
668
759
669 # Disable any prompts for this repo
760 # Disable any prompts for this repo
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
761 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
671
762
672 bookmarks = dict(repo._bookmarks).keys()
763 bookmarks = list(dict(repo._bookmarks).keys())
673 remote = peer(repo, {}, url)
764 remote = peer(repo, {}, safe_bytes(url))
674 # Disable any prompts for this remote
765 # Disable any prompts for this remote
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
766 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
676
767
677 return exchange.push(
768 return exchange.push(
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
769 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
@@ -686,12 +777,15 b' class HgRemote(RemoteBase):'
686 @reraise_safe_exceptions
777 @reraise_safe_exceptions
687 def rev_range(self, wire, commit_filter):
778 def rev_range(self, wire, commit_filter):
688 cache_on, context_uid, repo_id = self._cache_on(wire)
779 cache_on, context_uid, repo_id = self._cache_on(wire)
780 region = self._region(wire)
689
781
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
782 @region.conditional_cache_on_arguments(condition=cache_on)
692 def _rev_range(_context_uid, _repo_id, _filter):
783 def _rev_range(_context_uid, _repo_id, _filter):
693 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
694 revisions = [rev for rev in revrange(repo, commit_filter)]
785 revisions = [
786 ascii_str(repo[rev].hex())
787 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
788 ]
695 return revisions
789 return revisions
696
790
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
791 return _rev_range(context_uid, repo_id, sorted(commit_filter))
@@ -710,17 +804,18 b' class HgRemote(RemoteBase):'
710 return len(repo) - 1, 0
804 return len(repo) - 1, 0
711
805
712 stop, start = get_revs(repo, [node + ':'])
806 stop, start = get_revs(repo, [node + ':'])
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
807 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
714 return revs
808 return revs
715
809
716 @reraise_safe_exceptions
810 @reraise_safe_exceptions
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
811 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
718 other_path = kwargs.pop('other_path', None)
812 org_path = safe_bytes(wire["path"])
813 other_path = safe_bytes(kwargs.pop('other_path', ''))
719
814
720 # case when we want to compare two independent repositories
815 # case when we want to compare two independent repositories
721 if other_path and other_path != wire["path"]:
816 if other_path and other_path != wire["path"]:
722 baseui = self._factory._create_config(wire["config"])
817 baseui = self._factory._create_config(wire["config"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
818 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
724 else:
819 else:
725 repo = self._factory.repo(wire)
820 repo = self._factory.repo(wire)
726 return list(repo.revs(rev_spec, *args))
821 return list(repo.revs(rev_spec, *args))
@@ -764,17 +859,20 b' class HgRemote(RemoteBase):'
764 def tags(self, wire):
859 def tags(self, wire):
765 cache_on, context_uid, repo_id = self._cache_on(wire)
860 cache_on, context_uid, repo_id = self._cache_on(wire)
766 region = self._region(wire)
861 region = self._region(wire)
862
767 @region.conditional_cache_on_arguments(condition=cache_on)
863 @region.conditional_cache_on_arguments(condition=cache_on)
768 def _tags(_context_uid, _repo_id):
864 def _tags(_context_uid, _repo_id):
769 repo = self._factory.repo(wire)
865 repo = self._factory.repo(wire)
770 return repo.tags()
866 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
771
867
772 return _tags(context_uid, repo_id)
868 return _tags(context_uid, repo_id)
773
869
774 @reraise_safe_exceptions
870 @reraise_safe_exceptions
775 def update(self, wire, node=None, clean=False):
871 def update(self, wire, node='', clean=False):
776 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
873 baseui = self._factory._create_config(wire['config'])
874 node = safe_bytes(node)
875
778 commands.update(baseui, repo, node=node, clean=clean)
876 commands.update(baseui, repo, node=node, clean=clean)
779
877
780 @reraise_safe_exceptions
878 @reraise_safe_exceptions
@@ -800,10 +898,10 b' class HgRemote(RemoteBase):'
800
898
801 baseui.write = write
899 baseui.write = write
802 if branch:
900 if branch:
803 args = [branch]
901 args = [safe_bytes(branch)]
804 else:
902 else:
805 args = []
903 args = []
806 commands.heads(baseui, repo, template='{node} ', *args)
904 commands.heads(baseui, repo, template=b'{node} ', *args)
807
905
808 return output.getvalue()
906 return output.getvalue()
809
907
@@ -812,63 +910,61 b' class HgRemote(RemoteBase):'
812 repo = self._factory.repo(wire)
910 repo = self._factory.repo(wire)
813 changelog = repo.changelog
911 changelog = repo.changelog
814 lookup = repo.lookup
912 lookup = repo.lookup
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
913 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
816 return hex(a)
914 return hex(a)
817
915
818 @reraise_safe_exceptions
916 @reraise_safe_exceptions
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
917 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
918 baseui = self._factory._create_config(wire["config"], hooks=hooks)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
919 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
822
920
823 @reraise_safe_exceptions
921 @reraise_safe_exceptions
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
922 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
825
923
826 repo = self._factory.repo(wire)
924 repo = self._factory.repo(wire)
827 baseui = self._factory._create_config(wire['config'])
925 baseui = self._factory._create_config(wire['config'])
828 publishing = baseui.configbool('phases', 'publish')
926 publishing = baseui.configbool(b'phases', b'publish')
829 if publishing:
830 new_commit = 'public'
831 else:
832 new_commit = 'draft'
833
927
834 def _filectxfn(_repo, ctx, path):
928 def _filectxfn(_repo, ctx, path: bytes):
835 """
929 """
836 Marks given path as added/changed/removed in a given _repo. This is
930 Marks given path as added/changed/removed in a given _repo. This is
837 for internal mercurial commit function.
931 for internal mercurial commit function.
838 """
932 """
839
933
840 # check if this path is removed
934 # check if this path is removed
841 if path in removed:
935 if safe_str(path) in removed:
842 # returning None is a way to mark node for removal
936 # returning None is a way to mark node for removal
843 return None
937 return None
844
938
845 # check if this path is added
939 # check if this path is added
846 for node in updated:
940 for node in updated:
847 if node['path'] == path:
941 if safe_bytes(node['path']) == path:
848 return memfilectx(
942 return memfilectx(
849 _repo,
943 _repo,
850 changectx=ctx,
944 changectx=ctx,
851 path=node['path'],
945 path=safe_bytes(node['path']),
852 data=node['content'],
946 data=safe_bytes(node['content']),
853 islink=False,
947 islink=False,
854 isexec=bool(node['mode'] & stat.S_IXUSR),
948 isexec=bool(node['mode'] & stat.S_IXUSR),
855 copysource=False)
949 copysource=False)
950 abort_exc = exceptions.AbortException()
951 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
856
952
857 raise exceptions.AbortException()(
953 if publishing:
858 "Given path haven't been marked as added, "
954 new_commit_phase = b'public'
859 "changed or removed (%s)" % path)
955 else:
860
956 new_commit_phase = b'draft'
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
957 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
862
958 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
863 commit_ctx = memctx(
959 commit_ctx = memctx(
864 repo=repo,
960 repo=repo,
865 parents=parents,
961 parents=parents,
866 text=message,
962 text=safe_bytes(message),
867 files=files,
963 files=[safe_bytes(x) for x in files],
868 filectxfn=_filectxfn,
964 filectxfn=_filectxfn,
869 user=user,
965 user=safe_bytes(user),
870 date=(commit_time, commit_timezone),
966 date=(commit_time, commit_timezone),
871 extra=extra)
967 extra=kwargs)
872
968
873 n = repo.commitctx(commit_ctx)
969 n = repo.commitctx(commit_ctx)
874 new_id = hex(n)
970 new_id = hex(n)
@@ -879,11 +975,11 b' class HgRemote(RemoteBase):'
879 def pull(self, wire, url, commit_ids=None):
975 def pull(self, wire, url, commit_ids=None):
880 repo = self._factory.repo(wire)
976 repo = self._factory.repo(wire)
881 # Disable any prompts for this repo
977 # Disable any prompts for this repo
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
978 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
883
979
884 remote = peer(repo, {}, url)
980 remote = peer(repo, {}, safe_bytes(url))
885 # Disable any prompts for this remote
981 # Disable any prompts for this remote
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
982 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
887
983
888 if commit_ids:
984 if commit_ids:
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
985 commit_ids = [bin(commit_id) for commit_id in commit_ids]
@@ -892,34 +988,47 b' class HgRemote(RemoteBase):'
892 repo, remote, heads=commit_ids, force=None).cgresult
988 repo, remote, heads=commit_ids, force=None).cgresult
893
989
894 @reraise_safe_exceptions
990 @reraise_safe_exceptions
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
991 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
896 repo = self._factory.repo(wire)
992 repo = self._factory.repo(wire)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
993 baseui = self._factory._create_config(wire['config'], hooks=hooks)
898
994
995 source = safe_bytes(source)
996
899 # Mercurial internally has a lot of logic that checks ONLY if
997 # Mercurial internally has a lot of logic that checks ONLY if
900 # option is defined, we just pass those if they are defined then
998 # option is defined, we just pass those if they are defined then
901 opts = {}
999 opts = {}
1000
902 if bookmark:
1001 if bookmark:
903 opts['bookmark'] = bookmark
1002 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1003 if isinstance(bookmark, list) else safe_bytes(bookmark)
1004
904 if branch:
1005 if branch:
905 opts['branch'] = branch
1006 opts['branch'] = [safe_bytes(x) for x in branch] \
1007 if isinstance(branch, list) else safe_bytes(branch)
1008
906 if revision:
1009 if revision:
907 opts['rev'] = revision
1010 opts['rev'] = [safe_bytes(x) for x in revision] \
1011 if isinstance(revision, list) else safe_bytes(revision)
908
1012
909 commands.pull(baseui, repo, source, **opts)
1013 commands.pull(baseui, repo, source, **opts)
910
1014
911 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
1016 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
913 repo = self._factory.repo(wire)
1017 repo = self._factory.repo(wire)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1018 baseui = self._factory._create_config(wire['config'], hooks=hooks)
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
1019
1020 revisions = [safe_bytes(x) for x in revisions] \
1021 if isinstance(revisions, list) else safe_bytes(revisions)
1022
1023 commands.push(baseui, repo, safe_bytes(dest_path),
1024 rev=revisions,
916 new_branch=push_branches)
1025 new_branch=push_branches)
917
1026
918 @reraise_safe_exceptions
1027 @reraise_safe_exceptions
919 def strip(self, wire, revision, update, backup):
1028 def strip(self, wire, revision, update, backup):
920 repo = self._factory.repo(wire)
1029 repo = self._factory.repo(wire)
921 ctx = self._get_ctx(repo, revision)
1030 ctx = self._get_ctx(repo, revision)
922 hgext_strip(
1031 hgext_strip.strip(
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1032 repo.baseui, repo, ctx.node(), update=update, backup=backup)
924
1033
925 @reraise_safe_exceptions
1034 @reraise_safe_exceptions
@@ -943,25 +1052,25 b' class HgRemote(RemoteBase):'
943 def merge(self, wire, revision):
1052 def merge(self, wire, revision):
944 repo = self._factory.repo(wire)
1053 repo = self._factory.repo(wire)
945 baseui = self._factory._create_config(wire['config'])
1054 baseui = self._factory._create_config(wire['config'])
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
1055 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
947
1056
948 # In case of sub repositories are used mercurial prompts the user in
1057 # In case of sub repositories are used mercurial prompts the user in
949 # case of merge conflicts or different sub repository sources. By
1058 # case of merge conflicts or different sub repository sources. By
950 # setting the interactive flag to `False` mercurial doesn't prompt the
1059 # setting the interactive flag to `False` mercurial doesn't prompt the
951 # used but instead uses a default value.
1060 # used but instead uses a default value.
952 repo.ui.setconfig('ui', 'interactive', False)
1061 repo.ui.setconfig(b'ui', b'interactive', False)
953 commands.merge(baseui, repo, rev=revision)
1062 commands.merge(baseui, repo, rev=safe_bytes(revision))
954
1063
955 @reraise_safe_exceptions
1064 @reraise_safe_exceptions
956 def merge_state(self, wire):
1065 def merge_state(self, wire):
957 repo = self._factory.repo(wire)
1066 repo = self._factory.repo(wire)
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
1067 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
959
1068
960 # In case of sub repositories are used mercurial prompts the user in
1069 # In case of sub repositories are used mercurial prompts the user in
961 # case of merge conflicts or different sub repository sources. By
1070 # case of merge conflicts or different sub repository sources. By
962 # setting the interactive flag to `False` mercurial doesn't prompt the
1071 # setting the interactive flag to `False` mercurial doesn't prompt the
963 # used but instead uses a default value.
1072 # used but instead uses a default value.
964 repo.ui.setconfig('ui', 'interactive', False)
1073 repo.ui.setconfig(b'ui', b'interactive', False)
965 ms = hg_merge.mergestate(repo)
1074 ms = hg_merge.mergestate(repo)
966 return [x for x in ms.unresolved()]
1075 return [x for x in ms.unresolved()]
967
1076
@@ -969,20 +1078,22 b' class HgRemote(RemoteBase):'
969 def commit(self, wire, message, username, close_branch=False):
1078 def commit(self, wire, message, username, close_branch=False):
970 repo = self._factory.repo(wire)
1079 repo = self._factory.repo(wire)
971 baseui = self._factory._create_config(wire['config'])
1080 baseui = self._factory._create_config(wire['config'])
972 repo.ui.setconfig('ui', 'username', username)
1081 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1082 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
974
1083
975 @reraise_safe_exceptions
1084 @reraise_safe_exceptions
976 def rebase(self, wire, source=None, dest=None, abort=False):
1085 def rebase(self, wire, source='', dest='', abort=False):
977 repo = self._factory.repo(wire)
1086 repo = self._factory.repo(wire)
978 baseui = self._factory._create_config(wire['config'])
1087 baseui = self._factory._create_config(wire['config'])
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
1088 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
980 # In case of sub repositories are used mercurial prompts the user in
1089 # In case of sub repositories are used mercurial prompts the user in
981 # case of merge conflicts or different sub repository sources. By
1090 # case of merge conflicts or different sub repository sources. By
982 # setting the interactive flag to `False` mercurial doesn't prompt the
1091 # setting the interactive flag to `False` mercurial doesn't prompt the
983 # used but instead uses a default value.
1092 # used but instead uses a default value.
984 repo.ui.setconfig('ui', 'interactive', False)
1093 repo.ui.setconfig(b'ui', b'interactive', False)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1094
1095 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1096 abort=abort, keep=not abort)
986
1097
987 @reraise_safe_exceptions
1098 @reraise_safe_exceptions
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1099 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
@@ -992,17 +1103,18 b' class HgRemote(RemoteBase):'
992
1103
993 date = (tag_time, tag_timezone)
1104 date = (tag_time, tag_timezone)
994 try:
1105 try:
995 hg_tag.tag(repo, name, node, message, local, user, date)
1106 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
996 except Abort as e:
1107 except Abort as e:
997 log.exception("Tag operation aborted")
1108 log.exception("Tag operation aborted")
998 # Exception can contain unicode which we convert
1109 # Exception can contain unicode which we convert
999 raise exceptions.AbortException(e)(repr(e))
1110 raise exceptions.AbortException(e)(repr(e))
1000
1111
1001 @reraise_safe_exceptions
1112 @reraise_safe_exceptions
1002 def bookmark(self, wire, bookmark, revision=None):
1113 def bookmark(self, wire, bookmark, revision=''):
1003 repo = self._factory.repo(wire)
1114 repo = self._factory.repo(wire)
1004 baseui = self._factory._create_config(wire['config'])
1115 baseui = self._factory._create_config(wire['config'])
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1116 revision = revision or ''
1117 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1006
1118
1007 @reraise_safe_exceptions
1119 @reraise_safe_exceptions
1008 def install_hooks(self, wire, force=False):
1120 def install_hooks(self, wire, force=False):
@@ -1021,8 +1133,8 b' class HgRemote(RemoteBase):'
1021 pass
1133 pass
1022
1134
1023 @reraise_safe_exceptions
1135 @reraise_safe_exceptions
1024 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1136 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1025 archive_dir_name, commit_id):
1137 archive_dir_name, commit_id, cache_config):
1026
1138
1027 def file_walker(_commit_id, path):
1139 def file_walker(_commit_id, path):
1028 repo = self._factory.repo(wire)
1140 repo = self._factory.repo(wire)
@@ -1042,6 +1154,6 b' class HgRemote(RemoteBase):'
1042
1154
1043 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1155 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1044
1156
1045 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1157 return store_archive_in_cache(
1046 archive_dir_name, commit_id)
1158 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1047
1159
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -15,32 +15,34 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
19
18
20 import os
19 import os
21 import subprocess
20 import subprocess
22 import time
21 from urllib.error import URLError
23 from urllib2 import URLError
22 import urllib.parse
24 import urlparse
25 import logging
23 import logging
26 import posixpath as vcspath
24 import posixpath as vcspath
27 import StringIO
25 import io
28 import urllib
26 import urllib.request
27 import urllib.parse
28 import urllib.error
29 import traceback
29 import traceback
30
30
31 import svn.client
31
32 import svn.core
32 import svn.client # noqa
33 import svn.delta
33 import svn.core # noqa
34 import svn.diff
34 import svn.delta # noqa
35 import svn.fs
35 import svn.diff # noqa
36 import svn.repos
36 import svn.fs # noqa
37 import svn.repos # noqa
37
38
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
42 from vcsserver.str_utils import safe_str, safe_bytes
43 from vcsserver.type_utils import assert_bytes
42 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.vcs_base import RemoteBase
43
45 from vcsserver.lib.svnremoterepo import svnremoterepo
44 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
45
47
46
48
@@ -52,7 +54,7 b' svn_compatible_versions_map = {'
52 'pre-1.9-compatible': '1.8',
54 'pre-1.9-compatible': '1.8',
53 }
55 }
54
56
55 current_compatible_version = '1.12'
57 current_compatible_version = '1.14'
56
58
57
59
58 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
@@ -63,7 +65,7 b' def reraise_safe_exceptions(func):'
63 except Exception as e:
65 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
67 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
68 raise_from_original(exceptions.UnhandledException(e), e)
67 raise
69 raise
68 return wrapper
70 return wrapper
69
71
@@ -82,12 +84,12 b' class SubversionFactory(RepoFactory):'
82 or compatible_version
84 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
85 fs_config['compatible-version'] = compatible_version_string
84
86
85 log.debug('Create SVN repo with config "%s"', fs_config)
87 log.debug('Create SVN repo with config `%s`', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
89 else:
88 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
89
91
90 log.debug('Got SVN object: %s', repo)
92 log.debug('repository created: got SVN object: %s', repo)
91 return repo
93 return repo
92
94
93 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
@@ -107,9 +109,39 b' class SvnRemote(RemoteBase):'
107
109
108 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
111 self._factory = factory
110 # TODO: Remove once we do not use internal Mercurial objects anymore
112
111 # for subversion
113 self._bulk_methods = {
112 self._hg_factory = hg_factory
114 # NOT supported in SVN ATM...
115 }
116 self._bulk_file_methods = {
117 "size": self.get_file_size,
118 "data": self.get_file_content,
119 "flags": self.get_node_type,
120 "is_binary": self.is_binary,
121 "md5": self.md5_hash
122 }
123
124 @reraise_safe_exceptions
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
126 cache_on, context_uid, repo_id = self._cache_on(wire)
127 region = self._region(wire)
128
129 # since we use unified API, we need to cast from str to in for SVN
130 commit_id = int(commit_id)
131
132 @region.conditional_cache_on_arguments(condition=cache_on)
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
134 result = {}
135 for attr in pre_load:
136 try:
137 method = self._bulk_file_methods[attr]
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
139 result[attr] = method(wire, _commit_id, _path)
140 except KeyError as e:
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
142 return BinaryEnvelope(result)
143
144 return _bulk_file_request(repo_id, commit_id, path, sorted(pre_load))
113
145
114 @reraise_safe_exceptions
146 @reraise_safe_exceptions
115 def discover_svn_version(self):
147 def discover_svn_version(self):
@@ -118,31 +150,27 b' class SvnRemote(RemoteBase):'
118 svn_ver = svn.core.SVN_VERSION
150 svn_ver = svn.core.SVN_VERSION
119 except ImportError:
151 except ImportError:
120 svn_ver = None
152 svn_ver = None
121 return svn_ver
153 return safe_str(svn_ver)
122
154
123 @reraise_safe_exceptions
155 @reraise_safe_exceptions
124 def is_empty(self, wire):
156 def is_empty(self, wire):
125
126 try:
157 try:
127 return self.lookup(wire, -1) == 0
158 return self.lookup(wire, -1) == 0
128 except Exception:
159 except Exception:
129 log.exception("failed to read object_store")
160 log.exception("failed to read object_store")
130 return False
161 return False
131
162
132 def check_url(self, url, config_items):
163 def check_url(self, url, config):
133 # this can throw exception if not installed, but we detect this
134 from hgsubversion import svnrepo
135
164
136 baseui = self._hg_factory._create_config(config_items)
165 # uuid function gets only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
138 # throws exception
166 # throws exception
167 username, password, src_url = self.get_url_and_credentials(url)
139 try:
168 try:
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
141 except Exception:
170 except Exception:
142 tb = traceback.format_exc()
171 tb = traceback.format_exc()
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 raise URLError(
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
145 '"%s" is not a valid Subversion source url.' % (url, ))
146 return True
174 return True
147
175
148 def is_path_valid_repository(self, wire, path):
176 def is_path_valid_repository(self, wire, path):
@@ -173,6 +201,7 b' class SvnRemote(RemoteBase):'
173 stdout, stderr = subprocessio.run_command(cmd)
201 stdout, stderr = subprocessio.run_command(cmd)
174 return stdout
202 return stdout
175
203
204 @reraise_safe_exceptions
176 def lookup(self, wire, revision):
205 def lookup(self, wire, revision):
177 if revision not in [-1, None, 'HEAD']:
206 if revision not in [-1, None, 'HEAD']:
178 raise NotImplementedError
207 raise NotImplementedError
@@ -181,6 +210,7 b' class SvnRemote(RemoteBase):'
181 head = svn.fs.youngest_rev(fs_ptr)
210 head = svn.fs.youngest_rev(fs_ptr)
182 return head
211 return head
183
212
213 @reraise_safe_exceptions
184 def lookup_interval(self, wire, start_ts, end_ts):
214 def lookup_interval(self, wire, start_ts, end_ts):
185 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
186 fsobj = svn.repos.fs(repo)
216 fsobj = svn.repos.fs(repo)
@@ -198,10 +228,12 b' class SvnRemote(RemoteBase):'
198 end_rev = svn.fs.youngest_rev(fsobj)
228 end_rev = svn.fs.youngest_rev(fsobj)
199 return start_rev, end_rev
229 return start_rev, end_rev
200
230
231 @reraise_safe_exceptions
201 def revision_properties(self, wire, revision):
232 def revision_properties(self, wire, revision):
202
233
203 cache_on, context_uid, repo_id = self._cache_on(wire)
234 cache_on, context_uid, repo_id = self._cache_on(wire)
204 region = self._region(wire)
235 region = self._region(wire)
236
205 @region.conditional_cache_on_arguments(condition=cache_on)
237 @region.conditional_cache_on_arguments(condition=cache_on)
206 def _revision_properties(_repo_id, _revision):
238 def _revision_properties(_repo_id, _revision):
207 repo = self._factory.repo(wire)
239 repo = self._factory.repo(wire)
@@ -228,7 +260,7 b' class SvnRemote(RemoteBase):'
228 removed = []
260 removed = []
229
261
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
231 for path, change in editor.changes.iteritems():
263 for path, change in editor.changes.items():
232 # TODO: Decide what to do with directory nodes. Subversion can add
264 # TODO: Decide what to do with directory nodes. Subversion can add
233 # empty directories.
265 # empty directories.
234
266
@@ -243,7 +275,7 b' class SvnRemote(RemoteBase):'
243 removed.append(path)
275 removed.append(path)
244 else:
276 else:
245 raise NotImplementedError(
277 raise NotImplementedError(
246 "Action %s not supported on path %s" % (
278 "Action {} not supported on path {}".format(
247 change.action, path))
279 change.action, path))
248
280
249 changes = {
281 changes = {
@@ -257,6 +289,7 b' class SvnRemote(RemoteBase):'
257 def node_history(self, wire, path, revision, limit):
289 def node_history(self, wire, path, revision, limit):
258 cache_on, context_uid, repo_id = self._cache_on(wire)
290 cache_on, context_uid, repo_id = self._cache_on(wire)
259 region = self._region(wire)
291 region = self._region(wire)
292
260 @region.conditional_cache_on_arguments(condition=cache_on)
293 @region.conditional_cache_on_arguments(condition=cache_on)
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
262 cross_copies = False
295 cross_copies = False
@@ -276,9 +309,11 b' class SvnRemote(RemoteBase):'
276 return history_revisions
309 return history_revisions
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
278
311
312 @reraise_safe_exceptions
279 def node_properties(self, wire, path, revision):
313 def node_properties(self, wire, path, revision):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self._region(wire)
315 region = self._region(wire)
316
282 @region.conditional_cache_on_arguments(condition=cache_on)
317 @region.conditional_cache_on_arguments(condition=cache_on)
283 def _node_properties(_repo_id, _path, _revision):
318 def _node_properties(_repo_id, _path, _revision):
284 repo = self._factory.repo(wire)
319 repo = self._factory.repo(wire)
@@ -288,7 +323,7 b' class SvnRemote(RemoteBase):'
288 return _node_properties(repo_id, path, revision)
323 return _node_properties(repo_id, path, revision)
289
324
290 def file_annotate(self, wire, path, revision):
325 def file_annotate(self, wire, path, revision):
291 abs_path = 'file://' + urllib.pathname2url(
326 abs_path = 'file://' + urllib.request.pathname2url(
292 vcspath.join(wire['path'], path))
327 vcspath.join(wire['path'], path))
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
294
329
@@ -314,12 +349,14 b' class SvnRemote(RemoteBase):'
314
349
315 return annotations
350 return annotations
316
351
317 def get_node_type(self, wire, path, revision=None):
352 @reraise_safe_exceptions
353 def get_node_type(self, wire, revision=None, path=''):
318
354
319 cache_on, context_uid, repo_id = self._cache_on(wire)
355 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
356 region = self._region(wire)
357
321 @region.conditional_cache_on_arguments(condition=cache_on)
358 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _get_node_type(_repo_id, _path, _revision):
359 def _get_node_type(_repo_id, _revision, _path):
323 repo = self._factory.repo(wire)
360 repo = self._factory.repo(wire)
324 fs_ptr = svn.repos.fs(repo)
361 fs_ptr = svn.repos.fs(repo)
325 if _revision is None:
362 if _revision is None:
@@ -327,12 +364,14 b' class SvnRemote(RemoteBase):'
327 root = svn.fs.revision_root(fs_ptr, _revision)
364 root = svn.fs.revision_root(fs_ptr, _revision)
328 node = svn.fs.check_path(root, path)
365 node = svn.fs.check_path(root, path)
329 return NODE_TYPE_MAPPING.get(node, None)
366 return NODE_TYPE_MAPPING.get(node, None)
330 return _get_node_type(repo_id, path, revision)
367 return _get_node_type(repo_id, revision, path)
331
368
332 def get_nodes(self, wire, path, revision=None):
369 @reraise_safe_exceptions
370 def get_nodes(self, wire, revision=None, path=''):
333
371
334 cache_on, context_uid, repo_id = self._cache_on(wire)
372 cache_on, context_uid, repo_id = self._cache_on(wire)
335 region = self._region(wire)
373 region = self._region(wire)
374
336 @region.conditional_cache_on_arguments(condition=cache_on)
375 @region.conditional_cache_on_arguments(condition=cache_on)
337 def _get_nodes(_repo_id, _path, _revision):
376 def _get_nodes(_repo_id, _path, _revision):
338 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
@@ -342,27 +381,32 b' class SvnRemote(RemoteBase):'
342 root = svn.fs.revision_root(fsobj, _revision)
381 root = svn.fs.revision_root(fsobj, _revision)
343 entries = svn.fs.dir_entries(root, path)
382 entries = svn.fs.dir_entries(root, path)
344 result = []
383 result = []
345 for entry_path, entry_info in entries.iteritems():
384 for entry_path, entry_info in entries.items():
346 result.append(
385 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
387 return result
349 return _get_nodes(repo_id, path, revision)
388 return _get_nodes(repo_id, path, revision)
350
389
351 def get_file_content(self, wire, path, rev=None):
390 @reraise_safe_exceptions
391 def get_file_content(self, wire, rev=None, path=''):
352 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394
354 if rev is None:
395 if rev is None:
355 rev = svn.fs.youngest_revision(fsobj)
396 rev = svn.fs.youngest_rev(fsobj)
397
356 root = svn.fs.revision_root(fsobj, rev)
398 root = svn.fs.revision_root(fsobj, rev)
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 return content.read()
400 return BytesEnvelope(content.read())
359
401
360 def get_file_size(self, wire, path, revision=None):
402 @reraise_safe_exceptions
403 def get_file_size(self, wire, revision=None, path=''):
361
404
362 cache_on, context_uid, repo_id = self._cache_on(wire)
405 cache_on, context_uid, repo_id = self._cache_on(wire)
363 region = self._region(wire)
406 region = self._region(wire)
407
364 @region.conditional_cache_on_arguments(condition=cache_on)
408 @region.conditional_cache_on_arguments(condition=cache_on)
365 def _get_file_size(_repo_id, _path, _revision):
409 def _get_file_size(_repo_id, _revision, _path):
366 repo = self._factory.repo(wire)
410 repo = self._factory.repo(wire)
367 fsobj = svn.repos.fs(repo)
411 fsobj = svn.repos.fs(repo)
368 if _revision is None:
412 if _revision is None:
@@ -370,17 +414,17 b' class SvnRemote(RemoteBase):'
370 root = svn.fs.revision_root(fsobj, _revision)
414 root = svn.fs.revision_root(fsobj, _revision)
371 size = svn.fs.file_length(root, path)
415 size = svn.fs.file_length(root, path)
372 return size
416 return size
373 return _get_file_size(repo_id, path, revision)
417 return _get_file_size(repo_id, revision, path)
374
418
375 def create_repository(self, wire, compatible_version=None):
419 def create_repository(self, wire, compatible_version=None):
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
377 self._factory.repo(wire, create=True,
421 self._factory.repo(wire, create=True,
378 compatible_version=compatible_version)
422 compatible_version=compatible_version)
379
423
380 def get_url_and_credentials(self, src_url):
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
381 obj = urlparse.urlparse(src_url)
425 obj = urllib.parse.urlparse(src_url)
382 username = obj.username or None
426 username = obj.username or ''
383 password = obj.password or None
427 password = obj.password or ''
384 return username, password, src_url
428 return username, password, src_url
385
429
386 def import_remote_repository(self, wire, src_url):
430 def import_remote_repository(self, wire, src_url):
@@ -411,16 +455,17 b' class SvnRemote(RemoteBase):'
411 log.debug('Return process ended with code: %s', rdump.returncode)
455 log.debug('Return process ended with code: %s', rdump.returncode)
412 if rdump.returncode != 0:
456 if rdump.returncode != 0:
413 errors = rdump.stderr.read()
457 errors = rdump.stderr.read()
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
415 rdump.returncode, errors)
459
416 reason = 'UNKNOWN'
460 reason = 'UNKNOWN'
417 if 'svnrdump: E230001:' in errors:
461 if b'svnrdump: E230001:' in errors:
418 reason = 'INVALID_CERTIFICATE'
462 reason = 'INVALID_CERTIFICATE'
419
463
420 if reason == 'UNKNOWN':
464 if reason == 'UNKNOWN':
421 reason = 'UNKNOWN:{}'.format(errors)
465 reason = f'UNKNOWN:{safe_str(errors)}'
466
422 raise Exception(
467 raise Exception(
423 'Failed to dump the remote repository from %s. Reason:%s' % (
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
424 src_url, reason))
469 src_url, reason))
425 if load.returncode != 0:
470 if load.returncode != 0:
426 raise Exception(
471 raise Exception(
@@ -428,8 +473,9 b' class SvnRemote(RemoteBase):'
428 (src_url, ))
473 (src_url, ))
429
474
430 def commit(self, wire, message, author, timestamp, updated, removed):
475 def commit(self, wire, message, author, timestamp, updated, removed):
431 assert isinstance(message, str)
476
432 assert isinstance(author, str)
477 message = safe_bytes(message)
478 author = safe_bytes(author)
433
479
434 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
435 fsobj = svn.repos.fs(repo)
481 fsobj = svn.repos.fs(repo)
@@ -453,6 +499,7 b' class SvnRemote(RemoteBase):'
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
454 return commit_id
500 return commit_id
455
501
502 @reraise_safe_exceptions
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
457 ignore_whitespace=False, context=3):
504 ignore_whitespace=False, context=3):
458
505
@@ -461,12 +508,12 b' class SvnRemote(RemoteBase):'
461 diff_creator = SvnDiffer(
508 diff_creator = SvnDiffer(
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
463 try:
510 try:
464 return diff_creator.generate_diff()
511 return BytesEnvelope(diff_creator.generate_diff())
465 except svn.core.SubversionException as e:
512 except svn.core.SubversionException as e:
466 log.exception(
513 log.exception(
467 "Error during diff operation operation. "
514 "Error during diff operation operation. "
468 "Path might not exist %s, %s" % (path1, path2))
515 "Path might not exist %s, %s", path1, path2)
469 return ""
516 return BytesEnvelope(b'')
470
517
471 @reraise_safe_exceptions
518 @reraise_safe_exceptions
472 def is_large_file(self, wire, path):
519 def is_large_file(self, wire, path):
@@ -475,16 +522,29 b' class SvnRemote(RemoteBase):'
475 @reraise_safe_exceptions
522 @reraise_safe_exceptions
476 def is_binary(self, wire, rev, path):
523 def is_binary(self, wire, rev, path):
477 cache_on, context_uid, repo_id = self._cache_on(wire)
524 cache_on, context_uid, repo_id = self._cache_on(wire)
525 region = self._region(wire)
478
526
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
527 @region.conditional_cache_on_arguments(condition=cache_on)
481 def _is_binary(_repo_id, _rev, _path):
528 def _is_binary(_repo_id, _rev, _path):
482 raw_bytes = self.get_file_content(wire, path, rev)
529 raw_bytes = self.get_file_content(wire, rev, path)
483 return raw_bytes and '\0' in raw_bytes
530 if not raw_bytes:
531 return False
532 return b'\0' in raw_bytes
484
533
485 return _is_binary(repo_id, rev, path)
534 return _is_binary(repo_id, rev, path)
486
535
487 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def md5_hash(self, wire, rev, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
540
541 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _md5_hash(_repo_id, _rev, _path):
543 return ''
544
545 return _md5_hash(repo_id, rev, path)
546
547 @reraise_safe_exceptions
488 def run_svn_command(self, wire, cmd, **opts):
548 def run_svn_command(self, wire, cmd, **opts):
489 path = wire.get('path', None)
549 path = wire.get('path', None)
490
550
@@ -500,14 +560,14 b' class SvnRemote(RemoteBase):'
500
560
501 try:
561 try:
502 _opts.update(opts)
562 _opts.update(opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
504
564
505 return ''.join(p), ''.join(p.error)
565 return b''.join(proc), b''.join(proc.stderr)
506 except (EnvironmentError, OSError) as err:
566 except OSError as err:
507 if safe_call:
567 if safe_call:
508 return '', safe_str(err).strip()
568 return '', safe_str(err).strip()
509 else:
569 else:
510 cmd = ' '.join(cmd) # human friendly CMD
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
511 tb_err = ("Couldn't run svn command (%s).\n"
571 tb_err = ("Couldn't run svn command (%s).\n"
512 "Original error was:%s\n"
572 "Original error was:%s\n"
513 "Call options:%s\n"
573 "Call options:%s\n"
@@ -522,9 +582,8 b' class SvnRemote(RemoteBase):'
522 binary_dir = settings.BINARY_DIR
582 binary_dir = settings.BINARY_DIR
523 executable = None
583 executable = None
524 if binary_dir:
584 if binary_dir:
525 executable = os.path.join(binary_dir, 'python')
585 executable = os.path.join(binary_dir, 'python3')
526 return install_svn_hooks(
586 return install_svn_hooks(repo_path, force_create=force)
527 repo_path, executable=executable, force_create=force)
528
587
529 @reraise_safe_exceptions
588 @reraise_safe_exceptions
530 def get_hooks_info(self, wire):
589 def get_hooks_info(self, wire):
@@ -541,13 +600,14 b' class SvnRemote(RemoteBase):'
541 pass
600 pass
542
601
543 @reraise_safe_exceptions
602 @reraise_safe_exceptions
544 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
545 archive_dir_name, commit_id):
604 archive_dir_name, commit_id, cache_config):
546
605
547 def walk_tree(root, root_dir, _commit_id):
606 def walk_tree(root, root_dir, _commit_id):
548 """
607 """
549 Special recursive svn repo walker
608 Special recursive svn repo walker
550 """
609 """
610 root_dir = safe_bytes(root_dir)
551
611
552 filemode_default = 0o100644
612 filemode_default = 0o100644
553 filemode_executable = 0o100755
613 filemode_executable = 0o100755
@@ -560,10 +620,10 b' class SvnRemote(RemoteBase):'
560 # return only DIR, and then all entries in that dir
620 # return only DIR, and then all entries in that dir
561 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
562 new_root = os.path.join(root_dir, f_name)
622 new_root = os.path.join(root_dir, f_name)
563 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
623 yield from walk_tree(root, new_root, _commit_id)
564 yield _f_name, _f_data, _f_type
565 else:
624 else:
566 f_path = os.path.join(root_dir, f_name).rstrip('/')
625
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
567 prop_list = svn.fs.node_proplist(root, f_path)
627 prop_list = svn.fs.node_proplist(root, f_path)
568
628
569 f_mode = filemode_default
629 f_mode = filemode_default
@@ -601,8 +661,8 b' class SvnRemote(RemoteBase):'
601 data_stream = f_data['content_stream']
661 data_stream = f_data['content_stream']
602 yield ArchiveNode(file_path, mode, is_link, data_stream)
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
603
663
604 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
664 return store_archive_in_cache(
605 archive_dir_name, commit_id)
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
606
666
607
667
608 class SvnDiffer(object):
668 class SvnDiffer(object):
@@ -643,15 +703,15 b' class SvnDiffer(object):'
643 "Source type: %s, target type: %s" %
703 "Source type: %s, target type: %s" %
644 (self.src_kind, self.tgt_kind))
704 (self.src_kind, self.tgt_kind))
645
705
646 def generate_diff(self):
706 def generate_diff(self) -> bytes:
647 buf = StringIO.StringIO()
707 buf = io.BytesIO()
648 if self.tgt_kind == svn.core.svn_node_dir:
708 if self.tgt_kind == svn.core.svn_node_dir:
649 self._generate_dir_diff(buf)
709 self._generate_dir_diff(buf)
650 else:
710 else:
651 self._generate_file_diff(buf)
711 self._generate_file_diff(buf)
652 return buf.getvalue()
712 return buf.getvalue()
653
713
654 def _generate_dir_diff(self, buf):
714 def _generate_dir_diff(self, buf: io.BytesIO):
655 editor = DiffChangeEditor()
715 editor = DiffChangeEditor()
656 editor_ptr, editor_baton = svn.delta.make_editor(editor)
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
657 svn.repos.dir_delta2(
717 svn.repos.dir_delta2(
@@ -672,7 +732,7 b' class SvnDiffer(object):'
672 self._generate_node_diff(
732 self._generate_node_diff(
673 buf, change, path, self.tgt_path, path, self.src_path)
733 buf, change, path, self.tgt_path, path, self.src_path)
674
734
675 def _generate_file_diff(self, buf):
735 def _generate_file_diff(self, buf: io.BytesIO):
676 change = None
736 change = None
677 if self.src_kind == svn.core.svn_node_none:
737 if self.src_kind == svn.core.svn_node_none:
678 change = "add"
738 change = "add"
@@ -684,7 +744,13 b' class SvnDiffer(object):'
684 buf, change, tgt_path, tgt_base, src_path, src_base)
744 buf, change, tgt_path, tgt_base, src_path, src_base)
685
745
686 def _generate_node_diff(
746 def _generate_node_diff(
687 self, buf, change, tgt_path, tgt_base, src_path, src_base):
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
748
749 tgt_path_bytes = safe_bytes(tgt_path)
750 tgt_path = safe_str(tgt_path)
751
752 src_path_bytes = safe_bytes(src_path)
753 src_path = safe_str(src_path)
688
754
689 if self.src_rev == self.tgt_rev and tgt_base == src_base:
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
690 # makes consistent behaviour with git/hg to return empty diff if
756 # makes consistent behaviour with git/hg to return empty diff if
@@ -697,55 +763,55 b' class SvnDiffer(object):'
697 self.binary_content = False
763 self.binary_content = False
698 mime_type = self._get_mime_type(tgt_full_path)
764 mime_type = self._get_mime_type(tgt_full_path)
699
765
700 if mime_type and not mime_type.startswith('text'):
766 if mime_type and not mime_type.startswith(b'text'):
701 self.binary_content = True
767 self.binary_content = True
702 buf.write("=" * 67 + '\n')
768 buf.write(b"=" * 67 + b'\n')
703 buf.write("Cannot display: file marked as a binary type.\n")
769 buf.write(b"Cannot display: file marked as a binary type.\n")
704 buf.write("svn:mime-type = %s\n" % mime_type)
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
705 buf.write("Index: %s\n" % (tgt_path, ))
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
706 buf.write("=" * 67 + '\n')
772 buf.write(b"=" * 67 + b'\n')
707 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
708 'tgt_path': tgt_path})
709
774
710 if change == 'add':
775 if change == 'add':
711 # TODO: johbo: SVN is missing a zero here compared to git
776 # TODO: johbo: SVN is missing a zero here compared to git
712 buf.write("new file mode 10644\n")
777 buf.write(b"new file mode 10644\n")
778
779 # TODO(marcink): intro to binary detection of svn patches
780 # if self.binary_content:
781 # buf.write(b'GIT binary patch\n')
782
783 buf.write(b"--- /dev/null\t(revision 0)\n")
784 src_lines = []
785 else:
786 if change == 'delete':
787 buf.write(b"deleted file mode 10644\n")
713
788
714 #TODO(marcink): intro to binary detection of svn patches
789 # TODO(marcink): intro to binary detection of svn patches
715 # if self.binary_content:
790 # if self.binary_content:
716 # buf.write('GIT binary patch\n')
791 # buf.write('GIT binary patch\n')
717
792
718 buf.write("--- /dev/null\t(revision 0)\n")
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
719 src_lines = []
720 else:
721 if change == 'delete':
722 buf.write("deleted file mode 10644\n")
723
724 #TODO(marcink): intro to binary detection of svn patches
725 # if self.binary_content:
726 # buf.write('GIT binary patch\n')
727
728 buf.write("--- a/%s\t(revision %s)\n" % (
729 src_path, self.src_rev))
730 src_lines = self._svn_readlines(self.src_root, src_full_path)
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
731
795
732 if change == 'delete':
796 if change == 'delete':
733 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
734 tgt_lines = []
798 tgt_lines = []
735 else:
799 else:
736 buf.write("+++ b/%s\t(revision %s)\n" % (
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
737 tgt_path, self.tgt_rev))
738 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
739
802
803 # we made our diff header, time to generate the diff content into our buffer
804
740 if not self.binary_content:
805 if not self.binary_content:
741 udiff = svn_diff.unified_diff(
806 udiff = svn_diff.unified_diff(
742 src_lines, tgt_lines, context=self.context,
807 src_lines, tgt_lines, context=self.context,
743 ignore_blank_lines=self.ignore_whitespace,
808 ignore_blank_lines=self.ignore_whitespace,
744 ignore_case=False,
809 ignore_case=False,
745 ignore_space_changes=self.ignore_whitespace)
810 ignore_space_changes=self.ignore_whitespace)
811
746 buf.writelines(udiff)
812 buf.writelines(udiff)
747
813
748 def _get_mime_type(self, path):
814 def _get_mime_type(self, path) -> bytes:
749 try:
815 try:
750 mime_type = svn.fs.node_prop(
816 mime_type = svn.fs.node_prop(
751 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
@@ -761,7 +827,9 b' class SvnDiffer(object):'
761 if node_kind not in (
827 if node_kind not in (
762 svn.core.svn_node_file, svn.core.svn_node_symlink):
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
763 return []
829 return []
764 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
830 content = svn.core.Stream(
831 svn.fs.file_contents(fs_root, node_path)).read()
832
765 return content.splitlines(True)
833 return content.splitlines(True)
766
834
767
835
@@ -799,7 +867,7 b' class TxnNodeProcessor(object):'
799 """
867 """
800
868
801 def __init__(self, node, txn_root):
869 def __init__(self, node, txn_root):
802 assert isinstance(node['path'], str)
870 assert_bytes(node['path'])
803
871
804 self.node = node
872 self.node = node
805 self.txn_root = txn_root
873 self.txn_root = txn_root
@@ -835,23 +903,24 b' class TxnNodeProcessor(object):'
835 svn.fs.make_file(self.txn_root, self.node['path'])
903 svn.fs.make_file(self.txn_root, self.node['path'])
836
904
837 def _update_file_content(self):
905 def _update_file_content(self):
838 assert isinstance(self.node['content'], str)
906 assert_bytes(self.node['content'])
907
839 handler, baton = svn.fs.apply_textdelta(
908 handler, baton = svn.fs.apply_textdelta(
840 self.txn_root, self.node['path'], None, None)
909 self.txn_root, self.node['path'], None, None)
841 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
842
911
843 def _update_file_properties(self):
912 def _update_file_properties(self):
844 properties = self.node.get('properties', {})
913 properties = self.node.get('properties', {})
845 for key, value in properties.iteritems():
914 for key, value in properties.items():
846 svn.fs.change_node_prop(
915 svn.fs.change_node_prop(
847 self.txn_root, self.node['path'], key, value)
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
848
917
849
918
850 def apr_time_t(timestamp):
919 def apr_time_t(timestamp):
851 """
920 """
852 Convert a Python timestamp into APR timestamp type apr_time_t
921 Convert a Python timestamp into APR timestamp type apr_time_t
853 """
922 """
854 return timestamp * 1E6
923 return int(timestamp * 1E6)
855
924
856
925
857 def svn_opt_revision_value_t(num):
926 def svn_opt_revision_value_t(num):
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -27,7 +27,7 b' import mercurial.hgweb.hgweb_mod'
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30 from vcsserver.str_utils import ascii_bytes, safe_bytes
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
@@ -81,7 +81,7 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg'
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = gen.next()
84 data = next(gen)
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
@@ -94,16 +94,17 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg'
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get('cmd', '')
97 cmd = req.qsparams.get(b'cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 log.warning('cmd: `%s` is not supported by the mercurial wireprotocol v1', cmd)
101 from mercurial.hgweb.common import statusmessage
102 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
104 res.setbodybytes(b'')
104 return res.sendresponse()
105 return res.sendresponse()
105
106
106 return super(HgWeb, self)._runwsgi(req, res, repo)
107 return super()._runwsgi(req, res, repo)
107
108
108
109
109 def make_hg_ui_from_config(repo_config):
110 def make_hg_ui_from_config(repo_config):
@@ -115,10 +116,13 b' def make_hg_ui_from_config(repo_config):'
115 baseui._tcfg = mercurial.config.config()
116 baseui._tcfg = mercurial.config.config()
116
117
117 for section, option, value in repo_config:
118 for section, option, value in repo_config:
118 baseui.setconfig(section, option, value)
119 baseui.setconfig(
120 ascii_bytes(section, allow_bytes=True),
121 ascii_bytes(option, allow_bytes=True),
122 ascii_bytes(value, allow_bytes=True))
119
123
120 # make our hgweb quiet so it doesn't print output
124 # make our hgweb quiet so it doesn't print output
121 baseui.setconfig('ui', 'quiet', 'true')
125 baseui.setconfig(b'ui', b'quiet', b'true')
122
126
123 return baseui
127 return baseui
124
128
@@ -131,11 +135,14 b' def update_hg_ui_from_hgrc(baseui, repo_'
131 return
135 return
132 log.debug('reading hgrc from %s', path)
136 log.debug('reading hgrc from %s', path)
133 cfg = mercurial.config.config()
137 cfg = mercurial.config.config()
134 cfg.read(path)
138 cfg.read(ascii_bytes(path))
135 for section in HG_UI_SECTIONS:
139 for section in HG_UI_SECTIONS:
136 for k, v in cfg.items(section):
140 for k, v in cfg.items(section):
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
141 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 baseui.setconfig(section, k, v)
142 baseui.setconfig(
143 ascii_bytes(section, allow_bytes=True),
144 ascii_bytes(k, allow_bytes=True),
145 ascii_bytes(v, allow_bytes=True))
139
146
140
147
141 def create_hg_wsgi_app(repo_path, repo_name, config):
148 def create_hg_wsgi_app(repo_path, repo_name, config):
@@ -151,7 +158,7 b' def create_hg_wsgi_app(repo_path, repo_n'
151 update_hg_ui_from_hgrc(baseui, repo_path)
158 update_hg_ui_from_hgrc(baseui, repo_path)
152
159
153 try:
160 try:
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
161 return HgWeb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
155 except mercurial.error.RequirementError as e:
162 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(e)(e)
163 raise exceptions.RequirementException(e)(e)
157
164
@@ -225,10 +232,10 b' class GitLFSHandler(object):'
225
232
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
233 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 git_path = settings.GIT_EXECUTABLE
234 git_path = settings.GIT_EXECUTABLE
228 update_server_info = config.pop('git_update_server_info')
235 update_server_info = config.pop(b'git_update_server_info')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
236 git_lfs_enabled = config.pop(b'git_lfs_enabled')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
237 git_lfs_store_path = config.pop(b'git_lfs_store_path')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
238 git_lfs_http_scheme = config.pop(b'git_lfs_http_scheme', 'http')
232 app = GitLFSHandler(
239 app = GitLFSHandler(
233 repo_path, repo_name, git_path, update_server_info, config)
240 repo_path, repo_name, git_path, update_server_info, config)
234
241
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -23,15 +23,17 b' along with git_http_backend.py Project.'
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import collections
26 import logging
27 import logging
27 import subprocess32 as subprocess
28 import subprocess
28 from collections import deque
29 import threading
29 from threading import Event, Thread
30
31 from vcsserver.str_utils import safe_str
30
32
31 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
32
34
33
35
34 class StreamFeeder(Thread):
36 class StreamFeeder(threading.Thread):
35 """
37 """
36 Normal writing into pipe-like is blocking once the buffer is filled.
38 Normal writing into pipe-like is blocking once the buffer is filled.
37 This thread allows a thread to seep data from a file-like into a pipe
39 This thread allows a thread to seep data from a file-like into a pipe
@@ -40,24 +42,18 b' class StreamFeeder(Thread):'
40 """
42 """
41
43
42 def __init__(self, source):
44 def __init__(self, source):
43 super(StreamFeeder, self).__init__()
45 super().__init__()
44 self.daemon = True
46 self.daemon = True
45 filelike = False
47 filelike = False
46 self.bytes = bytes()
48 self.bytes = b''
47 if type(source) in (type(''), bytes, bytearray): # string-like
49 if type(source) in (str, bytes, bytearray): # string-like
48 self.bytes = bytes(source)
50 self.bytes = bytes(source)
49 else: # can be either file pointer or file-like
51 else: # can be either file pointer or file-like
50 if type(source) in (int, long): # file pointer it is
52 if isinstance(source, int): # file pointer it is
51 # converting file descriptor (int) stdin into file-like
53 # converting file descriptor (int) stdin into file-like
52 try:
53 source = os.fdopen(source, 'rb', 16384)
54 source = os.fdopen(source, 'rb', 16384)
54 except Exception:
55 pass
56 # let's see if source is file-like by now
55 # let's see if source is file-like by now
57 try:
56 filelike = hasattr(source, 'read')
58 filelike = source.read
59 except Exception:
60 pass
61 if not filelike and not self.bytes:
57 if not filelike and not self.bytes:
62 raise TypeError("StreamFeeder's source object must be a readable "
58 raise TypeError("StreamFeeder's source object must be a readable "
63 "file-like, a file descriptor, or a string-like.")
59 "file-like, a file descriptor, or a string-like.")
@@ -65,28 +61,31 b' class StreamFeeder(Thread):'
65 self.readiface, self.writeiface = os.pipe()
61 self.readiface, self.writeiface = os.pipe()
66
62
67 def run(self):
63 def run(self):
68 t = self.writeiface
64 writer = self.writeiface
69 try:
65 try:
70 if self.bytes:
66 if self.bytes:
71 os.write(t, self.bytes)
67 os.write(writer, self.bytes)
72 else:
68 else:
73 s = self.source
69 s = self.source
74 b = s.read(4096)
70
75 while b:
71 while 1:
76 os.write(t, b)
72 _bytes = s.read(4096)
77 b = s.read(4096)
73 if not _bytes:
74 break
75 os.write(writer, _bytes)
76
78 finally:
77 finally:
79 os.close(t)
78 os.close(writer)
80
79
81 @property
80 @property
82 def output(self):
81 def output(self):
83 return self.readiface
82 return self.readiface
84
83
85
84
86 class InputStreamChunker(Thread):
85 class InputStreamChunker(threading.Thread):
87 def __init__(self, source, target, buffer_size, chunk_size):
86 def __init__(self, source, target, buffer_size, chunk_size):
88
87
89 super(InputStreamChunker, self).__init__()
88 super().__init__()
90
89
91 self.daemon = True # die die die.
90 self.daemon = True # die die die.
92
91
@@ -95,16 +94,16 b' class InputStreamChunker(Thread):'
95 self.chunk_count_max = int(buffer_size / chunk_size) + 1
94 self.chunk_count_max = int(buffer_size / chunk_size) + 1
96 self.chunk_size = chunk_size
95 self.chunk_size = chunk_size
97
96
98 self.data_added = Event()
97 self.data_added = threading.Event()
99 self.data_added.clear()
98 self.data_added.clear()
100
99
101 self.keep_reading = Event()
100 self.keep_reading = threading.Event()
102 self.keep_reading.set()
101 self.keep_reading.set()
103
102
104 self.EOF = Event()
103 self.EOF = threading.Event()
105 self.EOF.clear()
104 self.EOF.clear()
106
105
107 self.go = Event()
106 self.go = threading.Event()
108 self.go.set()
107 self.go.set()
109
108
110 def stop(self):
109 def stop(self):
@@ -115,7 +114,7 b' class InputStreamChunker(Thread):'
115 # go of the input because, if successful, .close() will send EOF
114 # go of the input because, if successful, .close() will send EOF
116 # down the pipe.
115 # down the pipe.
117 self.source.close()
116 self.source.close()
118 except:
117 except Exception:
119 pass
118 pass
120
119
121 def run(self):
120 def run(self):
@@ -146,7 +145,7 b' class InputStreamChunker(Thread):'
146
145
147 try:
146 try:
148 b = s.read(cs)
147 b = s.read(cs)
149 except ValueError:
148 except ValueError: # probably "I/O operation on closed file"
150 b = ''
149 b = ''
151
150
152 self.EOF.set()
151 self.EOF.set()
@@ -166,18 +165,20 b' class BufferedGenerator(object):'
166 StopIteration after the last chunk of data is yielded.
165 StopIteration after the last chunk of data is yielded.
167 """
166 """
168
167
169 def __init__(self, source, buffer_size=65536, chunk_size=4096,
168 def __init__(self, name, source, buffer_size=65536, chunk_size=4096,
170 starting_values=None, bottomless=False):
169 starting_values=None, bottomless=False):
171 starting_values = starting_values or []
170 starting_values = starting_values or []
171 self.name = name
172 self.buffer_size = buffer_size
173 self.chunk_size = chunk_size
172
174
173 if bottomless:
175 if bottomless:
174 maxlen = int(buffer_size / chunk_size)
176 maxlen = int(buffer_size / chunk_size)
175 else:
177 else:
176 maxlen = None
178 maxlen = None
177
179
178 self.data = deque(starting_values, maxlen)
180 self.data_queue = collections.deque(starting_values, maxlen)
179 self.worker = InputStreamChunker(source, self.data, buffer_size,
181 self.worker = InputStreamChunker(source, self.data_queue, buffer_size, chunk_size)
180 chunk_size)
181 if starting_values:
182 if starting_values:
182 self.worker.data_added.set()
183 self.worker.data_added.set()
183 self.worker.start()
184 self.worker.start()
@@ -185,17 +186,21 b' class BufferedGenerator(object):'
185 ####################
186 ####################
186 # Generator's methods
187 # Generator's methods
187 ####################
188 ####################
189 def __str__(self):
190 return f'BufferedGenerator(name={self.name} chunk: {self.chunk_size} on buffer: {self.buffer_size})'
188
191
189 def __iter__(self):
192 def __iter__(self):
190 return self
193 return self
191
194
192 def next(self):
195 def __next__(self):
193 while not len(self.data) and not self.worker.EOF.is_set():
196
197 while not self.length and not self.worker.EOF.is_set():
194 self.worker.data_added.clear()
198 self.worker.data_added.clear()
195 self.worker.data_added.wait(0.2)
199 self.worker.data_added.wait(0.2)
196 if len(self.data):
200
201 if self.length:
197 self.worker.keep_reading.set()
202 self.worker.keep_reading.set()
198 return bytes(self.data.popleft())
203 return bytes(self.data_queue.popleft())
199 elif self.worker.EOF.is_set():
204 elif self.worker.EOF.is_set():
200 raise StopIteration
205 raise StopIteration
201
206
@@ -249,7 +254,7 b' class BufferedGenerator(object):'
249 @property
254 @property
250 def done_reading(self):
255 def done_reading(self):
251 """
256 """
252 Done_reding does not mean that the iterator's buffer is empty.
257 Done_reading does not mean that the iterator's buffer is empty.
253 Iterator might have done reading from underlying source, but the read
258 Iterator might have done reading from underlying source, but the read
254 chunks might still be available for serving through .next() method.
259 chunks might still be available for serving through .next() method.
255
260
@@ -262,31 +267,31 b' class BufferedGenerator(object):'
262 """
267 """
263 returns int.
268 returns int.
264
269
265 This is the lenght of the que of chunks, not the length of
270 This is the length of the queue of chunks, not the length of
266 the combined contents in those chunks.
271 the combined contents in those chunks.
267
272
268 __len__() cannot be meaningfully implemented because this
273 __len__() cannot be meaningfully implemented because this
269 reader is just flying throuh a bottomless pit content and
274 reader is just flying through a bottomless pit content and
270 can only know the lenght of what it already saw.
275 can only know the length of what it already saw.
271
276
272 If __len__() on WSGI server per PEP 3333 returns a value,
277 If __len__() on WSGI server per PEP 3333 returns a value,
273 the responce's length will be set to that. In order not to
278 the response's length will be set to that. In order not to
274 confuse WSGI PEP3333 servers, we will not implement __len__
279 confuse WSGI PEP3333 servers, we will not implement __len__
275 at all.
280 at all.
276 """
281 """
277 return len(self.data)
282 return len(self.data_queue)
278
283
279 def prepend(self, x):
284 def prepend(self, x):
280 self.data.appendleft(x)
285 self.data_queue.appendleft(x)
281
286
282 def append(self, x):
287 def append(self, x):
283 self.data.append(x)
288 self.data_queue.append(x)
284
289
285 def extend(self, o):
290 def extend(self, o):
286 self.data.extend(o)
291 self.data_queue.extend(o)
287
292
288 def __getitem__(self, i):
293 def __getitem__(self, i):
289 return self.data[i]
294 return self.data_queue[i]
290
295
291
296
292 class SubprocessIOChunker(object):
297 class SubprocessIOChunker(object):
@@ -314,7 +319,7 b' class SubprocessIOChunker(object):'
314
319
315 - We are multithreaded. Writing in and reading out, err are all sep threads.
320 - We are multithreaded. Writing in and reading out, err are all sep threads.
316 - We support concurrent (in and out) stream processing.
321 - We support concurrent (in and out) stream processing.
317 - The output is not a stream. It's a queue of read string (bytes, not unicode)
322 - The output is not a stream. It's a queue of read string (bytes, not str)
318 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
323 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
319 - We are non-blocking in more respects than communicate()
324 - We are non-blocking in more respects than communicate()
320 (reading from subprocess out pauses when internal buffer is full, but
325 (reading from subprocess out pauses when internal buffer is full, but
@@ -323,16 +328,16 b' class SubprocessIOChunker(object):'
323 does not block the parallel inpipe reading occurring parallel thread.)
328 does not block the parallel inpipe reading occurring parallel thread.)
324
329
325 The purpose of the object is to allow us to wrap subprocess interactions into
330 The purpose of the object is to allow us to wrap subprocess interactions into
326 and interable that can be passed to a WSGI server as the application's return
331 an iterable that can be passed to a WSGI server as the application's return
327 value. Because of stream-processing-ability, WSGI does not have to read ALL
332 value. Because of stream-processing-ability, WSGI does not have to read ALL
328 of the subprocess's output and buffer it, before handing it to WSGI server for
333 of the subprocess's output and buffer it, before handing it to WSGI server for
329 HTTP response. Instead, the class initializer reads just a bit of the stream
334 HTTP response. Instead, the class initializer reads just a bit of the stream
330 to figure out if error ocurred or likely to occur and if not, just hands the
335 to figure out if error occurred or likely to occur and if not, just hands the
331 further iteration over subprocess output to the server for completion of HTTP
336 further iteration over subprocess output to the server for completion of HTTP
332 response.
337 response.
333
338
334 The real or perceived subprocess error is trapped and raised as one of
339 The real or perceived subprocess error is trapped and raised as one of
335 EnvironmentError family of exceptions
340 OSError family of exceptions
336
341
337 Example usage:
342 Example usage:
338 # try:
343 # try:
@@ -342,7 +347,7 b' class SubprocessIOChunker(object):'
342 # buffer_size = 65536,
347 # buffer_size = 65536,
343 # chunk_size = 4096
348 # chunk_size = 4096
344 # )
349 # )
345 # except (EnvironmentError) as e:
350 # except (OSError) as e:
346 # print str(e)
351 # print str(e)
347 # raise e
352 # raise e
348 #
353 #
@@ -358,15 +363,17 b' class SubprocessIOChunker(object):'
358 _close_input_fd = None
363 _close_input_fd = None
359
364
360 _closed = False
365 _closed = False
366 _stdout = None
367 _stderr = None
361
368
362 def __init__(self, cmd, inputstream=None, buffer_size=65536,
369 def __init__(self, cmd, input_stream=None, buffer_size=65536,
363 chunk_size=4096, starting_values=None, fail_on_stderr=True,
370 chunk_size=4096, starting_values=None, fail_on_stderr=True,
364 fail_on_return_code=True, **kwargs):
371 fail_on_return_code=True, **kwargs):
365 """
372 """
366 Initializes SubprocessIOChunker
373 Initializes SubprocessIOChunker
367
374
368 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
375 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
369 :param inputstream: (Default: None) A file-like, string, or file pointer.
376 :param input_stream: (Default: None) A file-like, string, or file pointer.
370 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
377 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
371 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
378 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
372 :param starting_values: (Default: []) An array of strings to put in front of output que.
379 :param starting_values: (Default: []) An array of strings to put in front of output que.
@@ -376,71 +383,86 b' class SubprocessIOChunker(object):'
376 exception if the return code is not 0.
383 exception if the return code is not 0.
377 """
384 """
378
385
386 kwargs['shell'] = kwargs.get('shell', True)
387
379 starting_values = starting_values or []
388 starting_values = starting_values or []
380 if inputstream:
389 if input_stream:
381 input_streamer = StreamFeeder(inputstream)
390 input_streamer = StreamFeeder(input_stream)
382 input_streamer.start()
391 input_streamer.start()
383 inputstream = input_streamer.output
392 input_stream = input_streamer.output
384 self._close_input_fd = inputstream
393 self._close_input_fd = input_stream
385
394
386 self._fail_on_stderr = fail_on_stderr
395 self._fail_on_stderr = fail_on_stderr
387 self._fail_on_return_code = fail_on_return_code
396 self._fail_on_return_code = fail_on_return_code
388
397 self.cmd = cmd
389 _shell = kwargs.get('shell', True)
390 kwargs['shell'] = _shell
391
398
392 _p = subprocess.Popen(cmd, bufsize=-1,
399 _p = subprocess.Popen(cmd, bufsize=-1, stdin=input_stream, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
393 stdin=inputstream,
394 stdout=subprocess.PIPE,
395 stderr=subprocess.PIPE,
396 **kwargs)
400 **kwargs)
401 self.process = _p
397
402
398 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
403 bg_out = BufferedGenerator('stdout', _p.stdout, buffer_size, chunk_size, starting_values)
399 starting_values)
404 bg_err = BufferedGenerator('stderr', _p.stderr, 10240, 1, bottomless=True)
400 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
401
405
402 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
406 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
403 # doing this until we reach either end of file, or end of buffer.
407 # doing this until we reach either end of file, or end of buffer.
404 bg_out.data_added_event.wait(1)
408 bg_out.data_added_event.wait(0.2)
405 bg_out.data_added_event.clear()
409 bg_out.data_added_event.clear()
406
410
407 # at this point it's still ambiguous if we are done reading or just full buffer.
411 # at this point it's still ambiguous if we are done reading or just full buffer.
408 # Either way, if error (returned by ended process, or implied based on
412 # Either way, if error (returned by ended process, or implied based on
409 # presence of stuff in stderr output) we error out.
413 # presence of stuff in stderr output) we error out.
410 # Else, we are happy.
414 # Else, we are happy.
411 _returncode = _p.poll()
415 return_code = _p.poll()
416 ret_code_ok = return_code in [None, 0]
417 ret_code_fail = return_code is not None and return_code != 0
418 if (
419 (ret_code_fail and fail_on_return_code) or
420 (ret_code_ok and fail_on_stderr and bg_err.length)
421 ):
412
422
413 if ((_returncode and fail_on_return_code) or
414 (fail_on_stderr and _returncode is None and bg_err.length)):
415 try:
423 try:
416 _p.terminate()
424 _p.terminate()
417 except Exception:
425 except Exception:
418 pass
426 pass
427
419 bg_out.stop()
428 bg_out.stop()
429 out = b''.join(bg_out)
430 self._stdout = out
431
420 bg_err.stop()
432 bg_err.stop()
421 if fail_on_stderr:
433 err = b''.join(bg_err)
422 err = ''.join(bg_err)
434 self._stderr = err
423 raise EnvironmentError(
435
424 "Subprocess exited due to an error:\n" + err)
436 # code from https://github.com/schacon/grack/pull/7
425 if _returncode and fail_on_return_code:
437 if err.strip() == b'fatal: The remote end hung up unexpectedly' and out.startswith(b'0034shallow '):
426 err = ''.join(bg_err)
438 bg_out = iter([out])
439 _p = None
440 elif err and fail_on_stderr:
441 text_err = err.decode()
442 raise OSError(
443 f"Subprocess exited due to an error:\n{text_err}")
444
445 if ret_code_fail and fail_on_return_code:
446 text_err = err.decode()
427 if not err:
447 if not err:
428 # maybe get empty stderr, try stdout instead
448 # maybe get empty stderr, try stdout instead
429 # in many cases git reports the errors on stdout too
449 # in many cases git reports the errors on stdout too
430 err = ''.join(bg_out)
450 text_err = out.decode()
431 raise EnvironmentError(
451 raise OSError(
432 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
452 f"Subprocess exited with non 0 ret code:{return_code}: stderr:{text_err}")
433 _returncode, err))
434
453
435 self.process = _p
454 self.stdout = bg_out
436 self.output = bg_out
455 self.stderr = bg_err
437 self.error = bg_err
456 self.inputstream = input_stream
438 self.inputstream = inputstream
457
458 def __str__(self):
459 proc = getattr(self, 'process', 'NO_PROCESS')
460 return f'SubprocessIOChunker: {proc}'
439
461
440 def __iter__(self):
462 def __iter__(self):
441 return self
463 return self
442
464
443 def next(self):
465 def __next__(self):
444 # Note: mikhail: We need to be sure that we are checking the return
466 # Note: mikhail: We need to be sure that we are checking the return
445 # code after the stdout stream is closed. Some processes, e.g. git
467 # code after the stdout stream is closed. Some processes, e.g. git
446 # are doing some magic in between closing stdout and terminating the
468 # are doing some magic in between closing stdout and terminating the
@@ -449,27 +471,31 b' class SubprocessIOChunker(object):'
449 result = None
471 result = None
450 stop_iteration = None
472 stop_iteration = None
451 try:
473 try:
452 result = self.output.next()
474 result = next(self.stdout)
453 except StopIteration as e:
475 except StopIteration as e:
454 stop_iteration = e
476 stop_iteration = e
455
477
456 if self.process.poll() and self._fail_on_return_code:
478 if self.process:
457 err = '%s' % ''.join(self.error)
479 return_code = self.process.poll()
458 raise EnvironmentError(
480 ret_code_fail = return_code is not None and return_code != 0
459 "Subprocess exited due to an error:\n" + err)
481 if ret_code_fail and self._fail_on_return_code:
482 self.stop_streams()
483 err = self.get_stderr()
484 raise OSError(
485 f"Subprocess exited (exit_code:{return_code}) due to an error during iteration:\n{err}")
460
486
461 if stop_iteration:
487 if stop_iteration:
462 raise stop_iteration
488 raise stop_iteration
463 return result
489 return result
464
490
465 def throw(self, type, value=None, traceback=None):
491 def throw(self, exc_type, value=None, traceback=None):
466 if self.output.length or not self.output.done_reading:
492 if self.stdout.length or not self.stdout.done_reading:
467 raise type(value)
493 raise exc_type(value)
468
494
469 def close(self):
495 def close(self):
470 if self._closed:
496 if self._closed:
471 return
497 return
472 self._closed = True
498
473 try:
499 try:
474 self.process.terminate()
500 self.process.terminate()
475 except Exception:
501 except Exception:
@@ -477,11 +503,11 b' class SubprocessIOChunker(object):'
477 if self._close_input_fd:
503 if self._close_input_fd:
478 os.close(self._close_input_fd)
504 os.close(self._close_input_fd)
479 try:
505 try:
480 self.output.close()
506 self.stdout.close()
481 except Exception:
507 except Exception:
482 pass
508 pass
483 try:
509 try:
484 self.error.close()
510 self.stderr.close()
485 except Exception:
511 except Exception:
486 pass
512 pass
487 try:
513 try:
@@ -489,6 +515,24 b' class SubprocessIOChunker(object):'
489 except Exception:
515 except Exception:
490 pass
516 pass
491
517
518 self._closed = True
519
520 def stop_streams(self):
521 getattr(self.stdout, 'stop', lambda: None)()
522 getattr(self.stderr, 'stop', lambda: None)()
523
524 def get_stdout(self):
525 if self._stdout:
526 return self._stdout
527 else:
528 return b''.join(self.stdout)
529
530 def get_stderr(self):
531 if self._stderr:
532 return self._stderr
533 else:
534 return b''.join(self.stderr)
535
492
536
493 def run_command(arguments, env=None):
537 def run_command(arguments, env=None):
494 """
538 """
@@ -506,9 +550,9 b' def run_command(arguments, env=None):'
506 if env:
550 if env:
507 _opts.update({'env': env})
551 _opts.update({'env': env})
508 proc = SubprocessIOChunker(cmd, **_opts)
552 proc = SubprocessIOChunker(cmd, **_opts)
509 return ''.join(proc), ''.join(proc.error)
553 return b''.join(proc), b''.join(proc.stderr)
510 except (EnvironmentError, OSError) as err:
554 except OSError as err:
511 cmd = ' '.join(cmd) # human friendly CMD
555 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
512 tb_err = ("Couldn't run subprocessio command (%s).\n"
556 tb_err = ("Couldn't run subprocessio command (%s).\n"
513 "Original error was:%s\n" % (cmd, err))
557 "Original error was:%s\n" % (cmd, err))
514 log.exception(tb_err)
558 log.exception(tb_err)
@@ -1,7 +1,7 b''
1 # -*- coding: utf-8 -*-
2 #
1 #
3 # Copyright (C) 2004-2009 Edgewall Software
2 # Copyright (C) 2004-2009 Edgewall Software
4 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
3 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
4 # Copyright (C) 2014-2023 RhodeCode GmbH
5 # All rights reserved.
5 # All rights reserved.
6 #
6 #
7 # This software is licensed as described in the file COPYING, which
7 # This software is licensed as described in the file COPYING, which
@@ -17,15 +17,15 b''
17 import difflib
17 import difflib
18
18
19
19
20 def get_filtered_hunks(fromlines, tolines, context=None,
20 def get_filtered_hunks(from_lines, to_lines, context=None,
21 ignore_blank_lines=False, ignore_case=False,
21 ignore_blank_lines: bool = False, ignore_case: bool = False,
22 ignore_space_changes=False):
22 ignore_space_changes: bool = False):
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
24 opcodes, grouped according to the ``context`` and ``ignore_*``
24 opcodes, grouped according to the ``context`` and ``ignore_*``
25 parameters.
25 parameters.
26
26
27 :param fromlines: list of lines corresponding to the old content
27 :param from_lines: list of lines corresponding to the old content
28 :param tolines: list of lines corresponding to the new content
28 :param to_lines: list of lines corresponding to the new content
29 :param ignore_blank_lines: differences about empty lines only are ignored
29 :param ignore_blank_lines: differences about empty lines only are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
@@ -37,27 +37,27 b' def get_filtered_hunks(fromlines, toline'
37 to filter out the results will come straight from the
37 to filter out the results will come straight from the
38 SequenceMatcher.
38 SequenceMatcher.
39 """
39 """
40 hunks = get_hunks(fromlines, tolines, context)
40 hunks = get_hunks(from_lines, to_lines, context)
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
42 hunks = filter_ignorable_lines(hunks, fromlines, tolines, context,
42 hunks = filter_ignorable_lines(hunks, from_lines, to_lines, context,
43 ignore_blank_lines, ignore_case,
43 ignore_blank_lines, ignore_case,
44 ignore_space_changes)
44 ignore_space_changes)
45 return hunks
45 return hunks
46
46
47
47
48 def get_hunks(fromlines, tolines, context=None):
48 def get_hunks(from_lines, to_lines, context=None):
49 """Generator yielding grouped opcodes describing differences .
49 """Generator yielding grouped opcodes describing differences .
50
50
51 See `get_filtered_hunks` for the parameter descriptions.
51 See `get_filtered_hunks` for the parameter descriptions.
52 """
52 """
53 matcher = difflib.SequenceMatcher(None, fromlines, tolines)
53 matcher = difflib.SequenceMatcher(None, from_lines, to_lines)
54 if context is None:
54 if context is None:
55 return (hunk for hunk in [matcher.get_opcodes()])
55 return (hunk for hunk in [matcher.get_opcodes()])
56 else:
56 else:
57 return matcher.get_grouped_opcodes(context)
57 return matcher.get_grouped_opcodes(context)
58
58
59
59
60 def filter_ignorable_lines(hunks, fromlines, tolines, context,
60 def filter_ignorable_lines(hunks, from_lines, to_lines, context,
61 ignore_blank_lines, ignore_case,
61 ignore_blank_lines, ignore_case,
62 ignore_space_changes):
62 ignore_space_changes):
63 """Detect line changes that should be ignored and emits them as
63 """Detect line changes that should be ignored and emits them as
@@ -67,11 +67,12 b' def filter_ignorable_lines(hunks, fromli'
67 See `get_filtered_hunks` for the parameter descriptions.
67 See `get_filtered_hunks` for the parameter descriptions.
68 """
68 """
69 def is_ignorable(tag, fromlines, tolines):
69 def is_ignorable(tag, fromlines, tolines):
70
70 if tag == 'delete' and ignore_blank_lines:
71 if tag == 'delete' and ignore_blank_lines:
71 if ''.join(fromlines) == '':
72 if b''.join(fromlines) == b'':
72 return True
73 return True
73 elif tag == 'insert' and ignore_blank_lines:
74 elif tag == 'insert' and ignore_blank_lines:
74 if ''.join(tolines) == '':
75 if b''.join(tolines) == b'':
75 return True
76 return True
76 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 if len(fromlines) != len(tolines):
78 if len(fromlines) != len(tolines):
@@ -81,7 +82,7 b' def filter_ignorable_lines(hunks, fromli'
81 if ignore_case:
82 if ignore_case:
82 input_str = input_str.lower()
83 input_str = input_str.lower()
83 if ignore_space_changes:
84 if ignore_space_changes:
84 input_str = ' '.join(input_str.split())
85 input_str = b' '.join(input_str.split())
85 return input_str
86 return input_str
86
87
87 for i in range(len(fromlines)):
88 for i in range(len(fromlines)):
@@ -101,7 +102,7 b' def filter_ignorable_lines(hunks, fromli'
101 else:
102 else:
102 prev = (tag, i1, i2, j1, j2)
103 prev = (tag, i1, i2, j1, j2)
103 else:
104 else:
104 if is_ignorable(tag, fromlines[i1:i2], tolines[j1:j2]):
105 if is_ignorable(tag, from_lines[i1:i2], to_lines[j1:j2]):
105 ignored_lines = True
106 ignored_lines = True
106 if prev:
107 if prev:
107 prev = 'equal', prev[1], i2, prev[3], j2
108 prev = 'equal', prev[1], i2, prev[3], j2
@@ -125,6 +126,7 b' def filter_ignorable_lines(hunks, fromli'
125 nn = n + n
126 nn = n + n
126
127
127 group = []
128 group = []
129
128 def all_equal():
130 def all_equal():
129 all(op[0] == 'equal' for op in group)
131 all(op[0] == 'equal' for op in group)
130 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
132 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
@@ -150,22 +152,30 b' def filter_ignorable_lines(hunks, fromli'
150 yield hunk
152 yield hunk
151
153
152
154
153 NO_NEWLINE_AT_END = '\\ No newline at end of file'
155 NO_NEWLINE_AT_END = b'\\ No newline at end of file'
156 LINE_TERM = b'\n'
154
157
155
158
156 def unified_diff(fromlines, tolines, context=None, ignore_blank_lines=0,
159 def unified_diff(from_lines, to_lines, context=None, ignore_blank_lines: bool = False,
157 ignore_case=0, ignore_space_changes=0, lineterm='\n'):
160 ignore_case: bool = False, ignore_space_changes: bool = False, lineterm=LINE_TERM) -> bytes:
158 """
161 """
159 Generator producing lines corresponding to a textual diff.
162 Generator producing lines corresponding to a textual diff.
160
163
161 See `get_filtered_hunks` for the parameter descriptions.
164 See `get_filtered_hunks` for the parameter descriptions.
162 """
165 """
163 # TODO: johbo: Check if this can be nicely integrated into the matching
166 # TODO: johbo: Check if this can be nicely integrated into the matching
167
164 if ignore_space_changes:
168 if ignore_space_changes:
165 fromlines = [l.strip() for l in fromlines]
169 from_lines = [l.strip() for l in from_lines]
166 tolines = [l.strip() for l in tolines]
170 to_lines = [l.strip() for l in to_lines]
167
171
168 for group in get_filtered_hunks(fromlines, tolines, context,
172 def _hunk_range(start, length) -> bytes:
173 if length != 1:
174 return b'%d,%d' % (start, length)
175 else:
176 return b'%d' % (start,)
177
178 for group in get_filtered_hunks(from_lines, to_lines, context,
169 ignore_blank_lines, ignore_case,
179 ignore_blank_lines, ignore_case,
170 ignore_space_changes):
180 ignore_space_changes):
171 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
181 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
@@ -173,37 +183,30 b' def unified_diff(fromlines, tolines, con'
173 i1, i2 = -1, -1 # support for Add changes
183 i1, i2 = -1, -1 # support for Add changes
174 if j1 == 0 and j2 == 0:
184 if j1 == 0 and j2 == 0:
175 j1, j2 = -1, -1 # support for Delete changes
185 j1, j2 = -1, -1 # support for Delete changes
176 yield '@@ -%s +%s @@%s' % (
186 yield b'@@ -%b +%b @@%b' % (
177 _hunk_range(i1 + 1, i2 - i1),
187 _hunk_range(i1 + 1, i2 - i1),
178 _hunk_range(j1 + 1, j2 - j1),
188 _hunk_range(j1 + 1, j2 - j1),
179 lineterm)
189 lineterm)
180 for tag, i1, i2, j1, j2 in group:
190 for tag, i1, i2, j1, j2 in group:
181 if tag == 'equal':
191 if tag == 'equal':
182 for line in fromlines[i1:i2]:
192 for line in from_lines[i1:i2]:
183 if not line.endswith(lineterm):
193 if not line.endswith(lineterm):
184 yield ' ' + line + lineterm
194 yield b' ' + line + lineterm
185 yield NO_NEWLINE_AT_END + lineterm
186 else:
187 yield ' ' + line
188 else:
189 if tag in ('replace', 'delete'):
190 for line in fromlines[i1:i2]:
191 if not line.endswith(lineterm):
192 yield '-' + line + lineterm
193 yield NO_NEWLINE_AT_END + lineterm
195 yield NO_NEWLINE_AT_END + lineterm
194 else:
196 else:
195 yield '-' + line
197 yield b' ' + line
196 if tag in ('replace', 'insert'):
198 else:
197 for line in tolines[j1:j2]:
199 if tag in ('replace', 'delete'):
200 for line in from_lines[i1:i2]:
198 if not line.endswith(lineterm):
201 if not line.endswith(lineterm):
199 yield '+' + line + lineterm
202 yield b'-' + line + lineterm
200 yield NO_NEWLINE_AT_END + lineterm
203 yield NO_NEWLINE_AT_END + lineterm
201 else:
204 else:
202 yield '+' + line
205 yield b'-' + line
203
206 if tag in ('replace', 'insert'):
204
207 for line in to_lines[j1:j2]:
205 def _hunk_range(start, length):
208 if not line.endswith(lineterm):
206 if length != 1:
209 yield b'+' + line + lineterm
207 return '%d,%d' % (start, length)
210 yield NO_NEWLINE_AT_END + lineterm
208 else:
211 else:
209 return '%d' % (start, )
212 yield b'+' + line
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -18,8 +18,7 b''
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21 import configparser
22 import configobj
23
22
24
23
25 class ContextINI(object):
24 class ContextINI(object):
@@ -53,17 +52,17 b' class ContextINI(object):'
53 with open(self.new_path, 'wb'):
52 with open(self.new_path, 'wb'):
54 pass
53 pass
55
54
56 config = configobj.ConfigObj(
55 parser = configparser.ConfigParser()
57 self.new_path, file_error=True, write_empty_values=True)
56 parser.read(self.ini_file_path)
58
57
59 for data in self.ini_params:
58 for data in self.ini_params:
60 section, ini_params = data.items()[0]
59 section, ini_params = list(data.items())[0]
61 key, val = ini_params.items()[0]
60 key, val = list(ini_params.items())[0]
62 if section not in config:
61 if section not in parser:
63 config[section] = {}
62 parser[section] = {}
64 config[section][key] = val
63 parser[section][key] = val
65
64 with open(self.ini_file_path, 'w') as f:
66 config.write()
65 parser.write(f)
67 return self.new_path
66 return self.new_path
68
67
69 def __exit__(self, exc_type, exc_val, exc_tb):
68 def __exit__(self, exc_type, exc_val, exc_tb):
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -21,8 +21,7 b' import pytest'
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver.remote import git
25
26
25
27 SAMPLE_REFS = {
26 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
@@ -49,7 +48,7 b' def test_discover_git_version(git_remote'
49
48
50
49
51 class TestGitFetch(object):
50 class TestGitFetch(object):
52 def setup(self):
51 def setup_method(self):
53 self.mock_repo = Mock()
52 self.mock_repo = Mock()
54 factory = Mock()
53 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
@@ -67,8 +66,8 b' class TestGitFetch(object):'
67
66
68 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
69 selected_refs = {
68 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
71 }
73
72
74 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
@@ -80,7 +79,7 b' class TestGitFetch(object):'
80 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
80 self.remote_git.pull(
82 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
82 refs=list(selected_refs.keys()))
84 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
86
85
@@ -93,7 +92,7 b' class TestGitFetch(object):'
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
93 }
95
94
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
@@ -114,7 +113,7 b' class TestReraiseSafeExceptions(object):'
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 for method_name, method in methods:
114 for method_name, method in methods:
116 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
117 assert method.im_func.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
118
117
119 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
@@ -138,10 +137,13 b' class TestReraiseSafeExceptions(object):'
138 class TestDulwichRepoWrapper(object):
137 class TestDulwichRepoWrapper(object):
139 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 assert repo is not None
144 del repo
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
145 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
146
148
147
149
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -21,9 +21,10 b' import traceback'
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hgcompat
27 from vcsserver.remote import hg
27
28
28
29
29 class TestDiff(object):
30 class TestDiff(object):
@@ -32,8 +33,8 b' class TestDiff(object):'
32 factory = Mock()
33 factory = Mock()
33 hg_remote = hg.HgRemote(factory)
34 hg_remote = hg.HgRemote(factory)
34 with patch('mercurial.patch.diff') as diff_mock:
35 with patch('mercurial.patch.diff') as diff_mock:
35 diff_mock.side_effect = LookupError(
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
36 'deadbeef', 'index', 'message')
37
37 with pytest.raises(Exception) as exc_info:
38 with pytest.raises(Exception) as exc_info:
38 hg_remote.diff(
39 hg_remote.diff(
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
@@ -51,13 +52,13 b' class TestReraiseSafeExceptions(object):'
51 decorator = hg.reraise_safe_exceptions(None)
52 decorator = hg.reraise_safe_exceptions(None)
52 for method_name, method in methods:
53 for method_name, method in methods:
53 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 assert method.im_func.__code__ == decorator.__code__
55 assert method.__func__.__code__ == decorator.__code__
55
56
56 @pytest.mark.parametrize('side_effect, expected_type', [
57 @pytest.mark.parametrize('side_effect, expected_type', [
57 (hgcompat.Abort(), 'abort'),
58 (hgcompat.Abort('failed-abort'), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
59 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
61 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RequirementError(), 'requirement'),
63 (hgcompat.RequirementError(), 'requirement'),
63 ])
64 ])
@@ -75,10 +76,9 b' class TestReraiseSafeExceptions(object):'
75 @hg.reraise_safe_exceptions
76 @hg.reraise_safe_exceptions
76 def fake_method():
77 def fake_method():
77 try:
78 try:
78 raise hgcompat.Abort()
79 raise hgcompat.Abort('test-abort')
79 except:
80 except:
80 self.original_traceback = traceback.format_tb(
81 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
81 sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -21,14 +21,14 b' import pytest'
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto._capabilities.__name__ == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
@@ -72,11 +72,6 b' def test_dynamic_capabilities_uses_large'
72 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
73
73
74
74
75 def test_hgsubversion_import():
76 from hgsubversion import svnrepo
77 assert svnrepo
78
79
80 @pytest.fixture
75 @pytest.fixture
81 def patched_capabilities(request):
76 def patched_capabilities(request):
82 """
77 """
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -15,17 +15,18 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
19 import io
20 import threading
18 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
19 import msgpack
22 from SocketServer import TCPServer
20
21 from http.server import BaseHTTPRequestHandler
22 from socketserver import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
28
27
28 from vcsserver.hooks import HooksHttpClient
29 from vcsserver.lib.rc_json import json
29 from vcsserver import hooks
30 from vcsserver import hooks
30
31
31
32
@@ -44,7 +45,7 b' def get_hg_ui(extras=None):'
44 }
45 }
45 required_extras.update(extras)
46 required_extras.update(extras)
46 hg_ui = mercurial.ui.ui()
47 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
48
49
49 return hg_ui
50 return hg_ui
50
51
@@ -67,6 +68,7 b' def test_git_post_receive_is_disabled():'
67
68
68 def test_git_post_receive_calls_repo_size():
69 def test_git_post_receive_calls_repo_size():
69 extras = {'hooks': ['push', 'repo_size']}
70 extras = {'hooks': ['push', 'repo_size']}
71
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
72 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
73 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
74 None, '', {'RC_SCM_DATA': json.dumps(extras)})
@@ -81,6 +83,7 b' def test_git_post_receive_calls_repo_siz'
81
83
82 def test_git_post_receive_does_not_call_disabled_repo_size():
84 def test_git_post_receive_does_not_call_disabled_repo_size():
83 extras = {'hooks': ['push']}
85 extras = {'hooks': ['push']}
86
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
87 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
88 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
89 None, '', {'RC_SCM_DATA': json.dumps(extras)})
@@ -149,18 +152,19 b' class TestHooksHttpClient(object):'
149 client = hooks.HooksHttpClient(uri)
152 client = hooks.HooksHttpClient(uri)
150 assert client.hooks_uri == uri
153 assert client.hooks_uri == uri
151
154
152 def test_serialize_returns_json_string(self):
155 def test_serialize_returns_serialized_string(self):
153 client = hooks.HooksHttpClient('localhost:3000')
156 client = hooks.HooksHttpClient('localhost:3000')
154 hook_name = 'test'
157 hook_name = 'test'
155 extras = {
158 extras = {
156 'first': 1,
159 'first': 1,
157 'second': 'two'
160 'second': 'two'
158 }
161 }
159 result = client._serialize(hook_name, extras)
162 hooks_proto, result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
163 expected_result = msgpack.packb({
161 'method': hook_name,
164 'method': hook_name,
162 'extras': extras
165 'extras': extras,
163 })
166 })
167 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1', 'Connection': 'keep-alive'}
164 assert result == expected_result
168 assert result == expected_result
165
169
166 def test_call_queries_http_server(self, http_mirror):
170 def test_call_queries_http_server(self, http_mirror):
@@ -171,10 +175,10 b' class TestHooksHttpClient(object):'
171 'second': 'two'
175 'second': 'two'
172 }
176 }
173 result = client(hook_name, extras)
177 result = client(hook_name, extras)
174 expected_result = {
178 expected_result = msgpack.unpackb(msgpack.packb({
175 'method': hook_name,
179 'method': hook_name,
176 'extras': extras
180 'extras': extras
177 }
181 }), raw=False)
178 assert result == expected_result
182 assert result == expected_result
179
183
180
184
@@ -211,9 +215,10 b' def http_mirror(request):'
211
215
212
216
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
217 class MirrorHttpHandler(BaseHTTPRequestHandler):
218
214 def do_POST(self):
219 def do_POST(self):
215 length = int(self.headers['Content-Length'])
220 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
221 body = self.rfile.read(length)
217 self.send_response(200)
222 self.send_response(200)
218 self.end_headers()
223 self.end_headers()
219 self.wfile.write(body)
224 self.wfile.write(body)
@@ -239,3 +244,43 b' class MirrorHttpServer(object):'
239 @property
244 @property
240 def uri(self):
245 def uri(self):
241 return '{}:{}'.format(self.ip_address, self.port)
246 return '{}:{}'.format(self.ip_address, self.port)
247
248
249 def test_hooks_http_client_init():
250 hooks_uri = 'http://localhost:8000'
251 client = HooksHttpClient(hooks_uri)
252 assert client.hooks_uri == hooks_uri
253
254
255 def test_hooks_http_client_call():
256 hooks_uri = 'http://localhost:8000'
257
258 method = 'test_method'
259 extras = {'key': 'value'}
260
261 with \
262 mock.patch('http.client.HTTPConnection') as mock_connection,\
263 mock.patch('msgpack.load') as mock_load:
264
265 client = HooksHttpClient(hooks_uri)
266
267 mock_load.return_value = {'result': 'success'}
268 response = mock.MagicMock()
269 response.status = 200
270 mock_connection.request.side_effect = None
271 mock_connection.getresponse.return_value = response
272
273 result = client(method, extras)
274
275 mock_connection.assert_called_with(hooks_uri)
276 mock_connection.return_value.request.assert_called_once()
277 assert result == {'result': 'success'}
278
279
280 def test_hooks_http_client_serialize():
281 method = 'test_method'
282 extras = {'key': 'value'}
283 headers, body = HooksHttpClient._serialize(method, extras)
284
285 assert headers == {'rc-hooks-protocol': HooksHttpClient.proto, 'Connection': 'keep-alive'}
286 assert msgpack.unpackb(body) == {'method': method, 'extras': extras}
@@ -30,13 +30,13 b' def data():'
30
30
31 def test_http_app_streaming_with_data(data, repeat, vcs_app):
31 def test_http_app_streaming_with_data(data, repeat, vcs_app):
32 app = vcs_app
32 app = vcs_app
33 for x in xrange(repeat / 10):
33 for x in range(repeat // 10):
34 response = app.post('/stream/git/', params=data)
34 response = app.post('/stream/git/', params=data)
35 assert response.status_code == 200
35 assert response.status_code == 200
36
36
37
37
38 def test_http_app_streaming_no_data(repeat, vcs_app):
38 def test_http_app_streaming_no_data(repeat, vcs_app):
39 app = vcs_app
39 app = vcs_app
40 for x in xrange(repeat / 10):
40 for x in range(repeat // 10):
41 response = app.post('/stream/git/')
41 response = app.post('/stream/git/')
42 assert response.status_code == 200
42 assert response.status_code == 200
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -23,6 +23,7 b' import vcsserver'
23 import tempfile
23 import tempfile
24 from vcsserver import hook_utils
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.str_utils import safe_bytes, safe_str
26 from vcsserver.utils import AttributeDict
27 from vcsserver.utils import AttributeDict
27
28
28
29
@@ -31,7 +32,7 b' class TestCheckRhodecodeHook(object):'
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 with open(hook, 'wb') as f:
34 with open(hook, 'wb') as f:
34 f.write('dummy test')
35 f.write(b'dummy test')
35 result = hook_utils.check_rhodecode_hook(hook)
36 result = hook_utils.check_rhodecode_hook(hook)
36 assert result is False
37 assert result is False
37
38
@@ -47,7 +48,7 b' class TestCheckRhodecodeHook(object):'
47 def test_signatures(self, file_content, expected_result, tmpdir):
48 def test_signatures(self, file_content, expected_result, tmpdir):
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 with open(hook, 'wb') as f:
50 with open(hook, 'wb') as f:
50 f.write(file_content)
51 f.write(safe_bytes(file_content))
51
52
52 result = hook_utils.check_rhodecode_hook(hook)
53 result = hook_utils.check_rhodecode_hook(hook)
53
54
@@ -71,8 +72,7 b' class BaseInstallHooks(object):'
71 content = hook_file.read()
72 content = hook_file.read()
72
73
73 expected_env = '#!{}'.format(executable)
74 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.__version__)
75 vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
77 assert expected_rc_version in content
78
78
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -42,8 +42,7 b' def test_applies_largefiles_patch_only_i'
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
46 ('', ''),
48 (None, None),
47 (None, None),
49 ('foo=bar', 'foo=bar'),
48 ('foo=bar', 'foo=bar'),
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -16,6 +16,7 b''
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import more_itertools
19
20
20 import dulwich.protocol
21 import dulwich.protocol
21 import mock
22 import mock
@@ -25,7 +26,7 b' import webtest'
25
26
26 from vcsserver import hooks, pygrack
27 from vcsserver import hooks, pygrack
27
28
28 # pylint: disable=redefined-outer-name,protected-access
29 from vcsserver.str_utils import ascii_bytes
29
30
30
31
31 @pytest.fixture()
32 @pytest.fixture()
@@ -75,8 +76,7 b' def test_pre_pull_hook_fails_with_sideba'
75 '0000',
76 '0000',
76 '0009done\n',
77 '0009done\n',
77 ])
78 ])
78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
79 return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
@@ -86,8 +86,8 b' def test_pre_pull_hook_fails_with_sideba'
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
89 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
90 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
@@ -115,12 +115,16 b' def test_pull_has_hook_messages(pygrack_'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118
119 pre_pull = 'pre_pull_output'
120 post_pull = 'post_pull_output'
121
118 with mock.patch('vcsserver.hooks.git_pre_pull',
122 with mock.patch('vcsserver.hooks.git_pre_pull',
119 return_value=hooks.HookResponse(0, 'foo')):
123 return_value=hooks.HookResponse(0, pre_pull)):
120 with mock.patch('vcsserver.hooks.git_post_pull',
124 with mock.patch('vcsserver.hooks.git_post_pull',
121 return_value=hooks.HookResponse(1, 'bar')):
125 return_value=hooks.HookResponse(1, post_pull)):
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
126 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 return_value=['0008NAK\n0009subp\n0000']):
127 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
124 response = pygrack_app.post(
128 response = pygrack_app.post(
125 '/git-upload-pack', params=request,
129 '/git-upload-pack', params=request,
126 content_type='application/x-git-upload-pack')
130 content_type='application/x-git-upload-pack')
@@ -129,13 +133,17 b' def test_pull_has_hook_messages(pygrack_'
129 proto = dulwich.protocol.Protocol(data.read, None)
133 proto = dulwich.protocol.Protocol(data.read, None)
130 packets = list(proto.read_pkt_seq())
134 packets = list(proto.read_pkt_seq())
131
135
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
136 assert packets == [b'NAK\n',
137 # pre-pull only outputs if IT FAILS as in != 0 ret code
138 #b'\x02pre_pull_output',
139 b'subp\n',
140 b'\x02post_pull_output']
133
141
134
142
135 def test_get_want_capabilities(pygrack_instance):
143 def test_get_want_capabilities(pygrack_instance):
136 data = io.BytesIO(
144 data = io.BytesIO(
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
145 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
146 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
139
147
140 request = webob.Request({
148 request = webob.Request({
141 'wsgi.input': data,
149 'wsgi.input': data,
@@ -146,20 +154,20 b' def test_get_want_capabilities(pygrack_i'
146 capabilities = pygrack_instance._get_want_capabilities(request)
154 capabilities = pygrack_instance._get_want_capabilities(request)
147
155
148 assert capabilities == frozenset(
156 assert capabilities == frozenset(
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
157 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
150 assert data.tell() == 0
158 assert data.tell() == 0
151
159
152
160
153 @pytest.mark.parametrize('data,capabilities,expected', [
161 @pytest.mark.parametrize('data,capabilities,expected', [
154 ('foo', [], []),
162 ('foo', [], []),
155 ('', ['side-band-64k'], []),
163 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
156 ('', ['side-band'], []),
164 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
165 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
166 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
167 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
168 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
169 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
170 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
163 ], ids=[
171 ], ids=[
164 'foo-empty',
172 'foo-empty',
165 'empty-64k', 'empty',
173 'empty-64k', 'empty',
@@ -174,54 +182,59 b' def test_get_messages(pygrack_instance, '
174
182
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
183 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 # Unexpected response
184 # Unexpected response
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
185 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
178 # No sideband
186 # No sideband
179 ('no-sideband', [], 'foo', 'bar'),
187 ([b'no-sideband'], [], 'foo', 'bar'),
180 # No messages
188 # No messages
181 ('no-messages', ['side-band-64k'], '', ''),
189 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
182 ])
190 ])
183 def test_inject_messages_to_response_nothing_to_do(
191 def test_inject_messages_to_response_nothing_to_do(
184 pygrack_instance, response, capabilities, pre_pull_messages,
192 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
185 post_pull_messages):
186 new_response = pygrack_instance._inject_messages_to_response(
187 response, capabilities, pre_pull_messages, post_pull_messages)
188
193
189 assert new_response == response
194 new_response = pygrack_instance._build_post_pull_response(
195 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
196
197 assert list(new_response) == response
190
198
191
199
192 @pytest.mark.parametrize('capabilities', [
200 @pytest.mark.parametrize('capabilities', [
193 ['side-band'],
201 [pygrack.CAPABILITY_SIDE_BAND],
194 ['side-band-64k'],
202 [pygrack.CAPABILITY_SIDE_BAND_64K],
195 ])
203 ])
196 def test_inject_messages_to_response_single_element(pygrack_instance,
204 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
197 capabilities):
205 response = [b'0008NAK\n0009subp\n0000']
198 response = ['0008NAK\n0009subp\n0000']
206 new_response = pygrack_instance._build_post_pull_response(
199 new_response = pygrack_instance._inject_messages_to_response(
207 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
200 response, capabilities, 'foo', 'bar')
201
208
202 expected_response = [
209 expected_response = b''.join([
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
210 b'0008NAK\n',
211 b'0008\x02foo',
212 b'0009subp\n',
213 b'0008\x02bar',
214 b'0000'])
204
215
205 assert new_response == expected_response
216 assert b''.join(new_response) == expected_response
206
217
207
218
208 @pytest.mark.parametrize('capabilities', [
219 @pytest.mark.parametrize('capabilities', [
209 ['side-band'],
220 [pygrack.CAPABILITY_SIDE_BAND],
210 ['side-band-64k'],
221 [pygrack.CAPABILITY_SIDE_BAND_64K],
222 ])
223 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
224 response = more_itertools.always_iterable([
225 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
211 ])
226 ])
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
227 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
213 capabilities):
214 response = [
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 new_response = pygrack_instance._inject_messages_to_response(
217 response, capabilities, 'foo', 'bar')
218
228
219 expected_response = [
229 expected_response = b''.join([
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
230 b'0008NAK\n',
221 '000asubp4\n', '0008\x02bar', '0000'
231 b'0008\x02foo',
222 ]
232 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
233 b'0008\x02bar',
234 b'0000'
235 ])
223
236
224 assert new_response == expected_response
237 assert b''.join(new_response) == expected_response
225
238
226
239
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
240 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
@@ -231,19 +244,52 b' def test_build_failed_pre_pull_response_'
231
244
232
245
233 @pytest.mark.parametrize('capabilities', [
246 @pytest.mark.parametrize('capabilities', [
234 ['side-band'],
247 [pygrack.CAPABILITY_SIDE_BAND],
235 ['side-band-64k'],
248 [pygrack.CAPABILITY_SIDE_BAND_64K],
236 ['side-band-64k', 'no-progress'],
249 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
237 ])
250 ])
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
251 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 response = pygrack_instance._build_failed_pre_pull_response(
252 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
240 capabilities, 'foo')
241
253
242 expected_response = [
254 expected_response = [
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
255 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
256 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
245 pygrack.GitRepository.EMPTY_PACK),
257 pygrack.GitRepository.FLUSH_PACKET,
246 '0000',
247 ]
258 ]
248
259
249 assert response == expected_response
260 assert response == expected_response
261
262
263 def test_inject_messages_to_response_generator(pygrack_instance):
264
265 def response_generator():
266 response = [
267 # protocol start
268 b'0008NAK\n',
269 ]
270 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
271 response += [
272 # protocol end
273 pygrack.GitRepository.FLUSH_PACKET
274 ]
275 for elem in response:
276 yield elem
277
278 new_response = pygrack_instance._build_post_pull_response(
279 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
280
281 assert iter(new_response)
282
283 expected_response = b''.join([
284 # start
285 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
286 ] + [
287 # ... rest
288 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
289 ] + [
290 # final message,
291 b'0013\x02POST_PULL_MSG\n0000',
292
293 ])
294
295 assert b''.join(new_response) == expected_response
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -25,10 +25,11 b' import pytest'
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28 from vcsserver.str_utils import ascii_bytes
28
29
29
30
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
32 app = webtest.TestApp(scm_app.HgWeb(repo))
33 app = webtest.TestApp(scm_app.HgWeb(repo))
33
34
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
@@ -37,7 +38,7 b' def test_hg_does_not_accept_invalid_cmd('
37
38
38
39
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
41 config = (
42 config = (
42 ('paths', 'default', ''),
43 ('paths', 'default', ''),
43 )
44 )
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -22,22 +22,23 b' import sys'
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25 from vcsserver.str_utils import ascii_bytes
25
26
26
27
27 class KindaFilelike(object): # pragma: no cover
28 class FileLikeObj(object): # pragma: no cover
28
29
29 def __init__(self, data, size):
30 def __init__(self, data: bytes, size):
30 chunks = size / len(data)
31 chunks = size // len(data)
31
32
32 self.stream = self._get_stream(data, chunks)
33 self.stream = self._get_stream(data, chunks)
33
34
34 def _get_stream(self, data, chunks):
35 def _get_stream(self, data, chunks):
35 for x in xrange(chunks):
36 for x in range(chunks):
36 yield data
37 yield data
37
38
38 def read(self, n):
39 def read(self, n):
39
40
40 buffer_stream = ''
41 buffer_stream = b''
41 for chunk in self.stream:
42 for chunk in self.stream:
42 buffer_stream += chunk
43 buffer_stream += chunk
43 if len(buffer_stream) >= n:
44 if len(buffer_stream) >= n:
@@ -51,7 +52,7 b' class KindaFilelike(object): # pragma: '
51 def environ():
52 def environ():
52 """Delete coverage variables, as they make the tests fail."""
53 """Delete coverage variables, as they make the tests fail."""
53 env = dict(os.environ)
54 env = dict(os.environ)
54 for key in env.keys():
55 for key in list(env.keys()):
55 if key.startswith('COV_CORE_'):
56 if key.startswith('COV_CORE_'):
56 del env[key]
57 del env[key]
57
58
@@ -63,93 +64,92 b' def _get_python_args(script):'
63
64
64
65
65 def test_raise_exception_on_non_zero_return_code(environ):
66 def test_raise_exception_on_non_zero_return_code(environ):
66 args = _get_python_args('sys.exit(1)')
67 call_args = _get_python_args('raise ValueError("fail")')
67 with pytest.raises(EnvironmentError):
68 with pytest.raises(OSError):
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
69
70
70
71
71 def test_does_not_fail_on_non_zero_return_code(environ):
72 def test_does_not_fail_on_non_zero_return_code(environ):
72 args = _get_python_args('sys.exit(1)')
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
73 output = ''.join(
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
74 subprocessio.SubprocessIOChunker(
75 output = b''.join(proc)
75 args, shell=False, fail_on_return_code=False, env=environ
76 )
77 )
78
76
79 assert output == ''
77 assert output == b'hello'
80
78
81
79
82 def test_raise_exception_on_stderr(environ):
80 def test_raise_exception_on_stderr(environ):
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
84 with pytest.raises(EnvironmentError) as excinfo:
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
86
82
87 assert 'exited due to an error:\nX' in str(excinfo.value)
83 with pytest.raises(OSError) as excinfo:
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
85
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
88
87
89
88
90 def test_does_not_fail_on_stderr(environ):
89 def test_does_not_fail_on_stderr(environ):
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
92 output = ''.join(
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
93 subprocessio.SubprocessIOChunker(
92 output = b''.join(proc)
94 args, shell=False, fail_on_stderr=False, env=environ
95 )
96 )
97
93
98 assert output == ''
94 assert output == b''
99
95
100
96
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
97 @pytest.mark.parametrize('size', [
98 1,
99 10 ** 5
100 ])
102 def test_output_with_no_input(size, environ):
101 def test_output_with_no_input(size, environ):
103 print(type(environ))
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
104 data = 'X'
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
104 output = b''.join(proc)
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
107
105
108 assert output == data * size
106 assert output == ascii_bytes("X" * size)
109
107
110
108
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
109 @pytest.mark.parametrize('size', [
110 1,
111 10 ** 5
112 ])
112 def test_output_with_no_input_does_not_fail(size, environ):
113 def test_output_with_no_input_does_not_fail(size, environ):
113 data = 'X'
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 output = ''.join(
116 subprocessio.SubprocessIOChunker(
117 args, shell=False, fail_on_return_code=False, env=environ
118 )
119 )
120
114
121 print("{} {}".format(len(data * size), len(output)))
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
122 assert output == data * size
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
117 output = b''.join(proc)
118
119 assert output == ascii_bytes("X" * size)
123
120
124
121
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
122 @pytest.mark.parametrize('size', [
123 1,
124 10 ** 5
125 ])
126 def test_output_with_input(size, environ):
126 def test_output_with_input(size, environ):
127 data_len = size
127 data_len = size
128 inputstream = KindaFilelike('X', size)
128 inputstream = FileLikeObj(b'X', size)
129
129
130 # This acts like the cat command.
130 # This acts like the cat command.
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 output = ''.join(
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
133 subprocessio.SubprocessIOChunker(
133 output = b''.join(
134 args, shell=False, inputstream=inputstream, env=environ
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
135 )
136 )
135 )
137
136
138 assert len(output) == data_len
137 assert len(output) == data_len
139
138
140
139
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
140 @pytest.mark.parametrize('size', [
141 1,
142 10 ** 5
143 ])
142 def test_output_with_input_skipping_iterator(size, environ):
144 def test_output_with_input_skipping_iterator(size, environ):
143 data_len = size
145 data_len = size
144 inputstream = KindaFilelike('X', size)
146 inputstream = FileLikeObj(b'X', size)
145
147
146 # This acts like the cat command.
148 # This acts like the cat command.
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148
150
149 # Note: assigning the chunker makes sure that it is not deleted too early
151 # Note: assigning the chunker makes sure that it is not deleted too early
150 chunker = subprocessio.SubprocessIOChunker(
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
151 args, shell=False, inputstream=inputstream, env=environ
153 output = b''.join(proc.stdout)
152 )
153 output = ''.join(chunker.output)
154
154
155 assert len(output) == data_len
155 assert len(output) == data_len
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -20,10 +20,12 b' import mock'
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23 from vcsserver.str_utils import ascii_bytes
24
23
25
24 class MockPopen(object):
26 class MockPopen(object):
25 def __init__(self, stderr):
27 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
28 self.stdout = io.BytesIO(b'')
27 self.stderr = io.BytesIO(stderr)
29 self.stderr = io.BytesIO(stderr)
28 self.returncode = 1
30 self.returncode = 1
29
31
@@ -44,7 +46,7 b" INVALID_CERTIFICATE_STDERR = '\\n'.join(["
44 @pytest.mark.xfail(sys.platform == "cygwin",
46 @pytest.mark.xfail(sys.platform == "cygwin",
45 reason="SVN not packaged for Cygwin")
47 reason="SVN not packaged for Cygwin")
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 from vcsserver import svn
49 from vcsserver.remote import svn
48 factory = mock.Mock()
50 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
51 factory.repo = mock.Mock(return_value=mock.Mock())
50
52
@@ -52,31 +54,29 b' def test_import_remote_repository_certif'
52 remote.is_path_valid_repository = lambda wire, path: True
54 remote.is_path_valid_repository = lambda wire, path: True
53
55
54 with mock.patch('subprocess.Popen',
56 with mock.patch('subprocess.Popen',
55 return_value=MockPopen(stderr)):
57 return_value=MockPopen(ascii_bytes(stderr))):
56 with pytest.raises(Exception) as excinfo:
58 with pytest.raises(Exception) as excinfo:
57 remote.import_remote_repository({'path': 'path'}, 'url')
59 remote.import_remote_repository({'path': 'path'}, 'url')
58
60
59 expected_error_args = (
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
61
62
62 assert excinfo.value.args == expected_error_args
63 assert excinfo.value.args[0] == expected_error_args
63
64
64
65
65 def test_svn_libraries_can_be_imported():
66 def test_svn_libraries_can_be_imported():
66 import svn
67 import svn.client
67 import svn.client
68 assert svn.client is not None
68 assert svn.client is not None
69
69
70
70
71 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
72 ('http://server.com', (None, None, 'http://server.com')),
72 ('http://server.com', ('', '', 'http://server.com')),
73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 ('<script>', (None, None, '<script>')),
75 ('<script>', ('', '', '<script>')),
76 ('http://', (None, None, 'http://')),
76 ('http://', ('', '', 'http://')),
77 ])
77 ])
78 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver import svn
79 from vcsserver.remote import svn
80
80
81 factory = mock.Mock()
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
82 factory.repo = mock.Mock(return_value=mock.Mock())
@@ -85,3 +85,19 b' def test_username_password_extraction_fr'
85 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
86
86
87 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
88
89
90 @pytest.mark.parametrize('call_url', [
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 ])
95 def test_check_url(call_url):
96 from vcsserver.remote import svn
97 factory = mock.Mock()
98 factory.repo = mock.Mock(return_value=mock.Mock())
99
100 remote = svn.SvnRemote(factory)
101 remote.is_path_valid_repository = lambda wire, path: True
102 assert remote.check_url(call_url, {'dummy': 'config'})
103
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -19,25 +19,26 b' import wsgiref.simple_server'
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22
22 from vcsserver.str_utils import ascii_bytes, safe_str
23
24 # pylint: disable=protected-access,too-many-public-methods
25
23
26
24
27 @wsgiref.validate.validator
25 @wsgiref.validate.validator
28 def demo_app(environ, start_response):
26 def demo_app(environ, start_response):
29 """WSGI app used for testing."""
27 """WSGI app used for testing."""
28
29 input_data = safe_str(environ['wsgi.input'].read(1024))
30
30 data = [
31 data = [
31 'Hello World!\n',
32 'Hello World!\n',
32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 f'input_data={input_data}\n',
33 ]
34 ]
34 for key, value in sorted(environ.items()):
35 for key, value in sorted(environ.items()):
35 data.append('%s=%s\n' % (key, value))
36 data.append(f'{key}={value}\n')
36
37
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write('Old school write method\n')
39 write(b'Old school write method\n')
39 write('***********************\n')
40 write(b'***********************\n')
40 return data
41 return list(map(ascii_bytes, data))
41
42
42
43
43 BASE_ENVIRON = {
44 BASE_ENVIRON = {
@@ -53,11 +54,11 b' BASE_ENVIRON = {'
53
54
54 def test_complete_environ():
55 def test_complete_environ():
55 environ = dict(BASE_ENVIRON)
56 environ = dict(BASE_ENVIRON)
56 data = "data"
57 data = b"data"
57 wsgi_app_caller._complete_environ(environ, data)
58 wsgi_app_caller._complete_environ(environ, data)
58 wsgiref.validate.check_environ(environ)
59 wsgiref.validate.check_environ(environ)
59
60
60 assert data == environ['wsgi.input'].read()
61 assert data == environ['wsgi.input'].read(1024)
61
62
62
63
63 def test_start_response():
64 def test_start_response():
@@ -81,16 +82,17 b' def test_start_response_with_error():'
81
82
82
83
83 def test_wsgi_app_caller():
84 def test_wsgi_app_caller():
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
87 responses, status, headers = caller.handle(environ, input_data)
89 responses, status, headers = caller.handle(environ, input_data)
88 response = ''.join(responses)
90 response = b''.join(responses)
89
91
90 assert status == '200 OK'
92 assert status == '200 OK'
91 assert headers == [('Content-Type', 'text/plain')]
93 assert headers == [('Content-Type', 'text/plain')]
92 assert response.startswith(
94 assert response.startswith(b'Old school write method\n***********************\n')
93 'Old school write method\n***********************\n')
95 assert b'Hello World!\n' in response
94 assert 'Hello World!\n' in response
96 assert b'foo.var=bla\n' in response
95 assert 'foo.var=bla\n' in response
97
96 assert 'input_data=%s\n' % input_data in response
98 assert ascii_bytes(f'input_data={input_data}\n') in response
@@ -1,6 +1,4 b''
1 # -*- coding: utf-8 -*-
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -14,13 +14,14 b''
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import base64
18 import time
18 import time
19 import logging
19 import logging
20
20
21 import msgpack
22
21 import vcsserver
23 import vcsserver
22 from vcsserver.utils import safe_str
24 from vcsserver.str_utils import safe_str, ascii_str
23
24
25
25 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
26
27
@@ -34,12 +35,24 b' def get_user_agent(environ):'
34 return environ.get('HTTP_USER_AGENT')
35 return environ.get('HTTP_USER_AGENT')
35
36
36
37
37 def get_vcs_method(environ):
38 def get_call_context(request) -> dict:
38 return environ.get('HTTP_X_RC_METHOD')
39 cc = {}
40 registry = request.registry
41 if hasattr(registry, 'vcs_call_context'):
42 cc.update({
43 'X-RC-Method': registry.vcs_call_context.get('method'),
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
45 })
46
47 return cc
39
48
40
49
41 def get_vcs_repo(environ):
50 def get_headers_call_context(environ, strict=True):
42 return environ.get('HTTP_X_RC_REPO_NAME')
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
53 return msgpack.unpackb(packed_cc)
54 elif strict:
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
43
56
44
57
45 class RequestWrapperTween(object):
58 class RequestWrapperTween(object):
@@ -54,23 +67,26 b' class RequestWrapperTween(object):'
54 log.debug('Starting request time measurement')
67 log.debug('Starting request time measurement')
55 response = None
68 response = None
56
69
57 ua = get_user_agent(request.environ)
58 vcs_method = get_vcs_method(request.environ)
59 repo_name = get_vcs_repo(request.environ)
60
61 try:
70 try:
62 response = self.handler(request)
71 response = self.handler(request)
63 finally:
72 finally:
73 ua = get_user_agent(request.environ)
74 call_context = get_call_context(request)
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
77
64 count = request.request_count()
78 count = request.request_count()
65 _ver_ = vcsserver.__version__
79 _ver_ = vcsserver.__version__
66 _path = safe_str(get_access_path(request.environ))
80 _path = safe_str(get_access_path(request.environ))
81
67 ip = '127.0.0.1'
82 ip = '127.0.0.1'
68 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
69 resp_code = getattr(response, 'status_code', 'UNDEFINED')
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
70
85
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
87
71 total = time.time() - start
88 total = time.time() - start
72
89
73 _view_path = "{}/{}@{}".format(_path, vcs_method, repo_name)
74 log.info(
90 log.info(
75 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
76 count, ip, request.environ.get('REQUEST_METHOD'),
92 count, ip, request.environ.get('REQUEST_METHOD'),
@@ -87,15 +103,15 b' class RequestWrapperTween(object):'
87 statsd.timing(
103 statsd.timing(
88 "vcsserver_req_timing.histogram", elapsed_time_ms,
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
89 tags=[
105 tags=[
90 "view_name:{}".format(match_route),
106 f"view_name:{match_route}",
91 "code:{}".format(resp_code)
107 f"code:{resp_code}"
92 ],
108 ],
93 use_decimals=False
109 use_decimals=False
94 )
110 )
95 statsd.incr(
111 statsd.incr(
96 "vcsserver_req_total", tags=[
112 "vcsserver_req_total", tags=[
97 "view_name:{}".format(match_route),
113 f"view_name:{match_route}",
98 "code:{}".format(resp_code)
114 f"code:{resp_code}"
99 ])
115 ])
100
116
101 return response
117 return response
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -20,91 +20,35 b' import hashlib'
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22
22
23 def safe_int(val, default=None):
23 class AttributeDictBase(dict):
24 """
24 def __getstate__(self):
25 Returns int() of val if val is not convertable to int use default
25 odict = self.__dict__ # get attribute dictionary
26 instead
26 return odict
27
28 :param val:
29 :param default:
30 """
31
32 try:
33 val = int(val)
34 except (ValueError, TypeError):
35 val = default
36
37 return val
38
39
40 def safe_str(unicode_, to_encoding=None):
41 """
42 safe str function. Does few trick to turn unicode_ into string
43
44 :param unicode_: unicode to encode
45 :param to_encoding: encode to this type UTF8 default
46 :rtype: str
47 :returns: str object
48 """
49 to_encoding = to_encoding or ['utf8']
50 # if it's not basestr cast to str
51 if not isinstance(unicode_, basestring):
52 return str(unicode_)
53
54 if isinstance(unicode_, str):
55 return unicode_
56
57 if not isinstance(to_encoding, (list, tuple)):
58 to_encoding = [to_encoding]
59
27
60 for enc in to_encoding:
28 def __setstate__(self, dict):
61 try:
29 self.__dict__ = dict
62 return unicode_.encode(enc)
63 except UnicodeEncodeError:
64 pass
65
66 return unicode_.encode(to_encoding[0], 'replace')
67
68
69 def safe_unicode(str_, from_encoding=None):
70 """
71 safe unicode function. Does few trick to turn str_ into unicode
72
73 :param str_: string to decode
74 :param from_encoding: encode from this type UTF8 default
75 :rtype: unicode
76 :returns: unicode object
77 """
78 from_encoding = from_encoding or ['utf8']
79
30
80 if isinstance(str_, unicode):
81 return str_
82
83 if not isinstance(from_encoding, (list, tuple)):
84 from_encoding = [from_encoding]
85
86 try:
87 return unicode(str_)
88 except UnicodeDecodeError:
89 pass
90
91 for enc in from_encoding:
92 try:
93 return unicode(str_, enc)
94 except UnicodeDecodeError:
95 pass
96
97 return unicode(str_, from_encoding[0], 'replace')
98
99
100 class AttributeDict(dict):
101 def __getattr__(self, attr):
102 return self.get(attr, None)
103 __setattr__ = dict.__setitem__
31 __setattr__ = dict.__setitem__
104 __delattr__ = dict.__delitem__
32 __delattr__ = dict.__delitem__
105
33
106
34
35 class StrictAttributeDict(AttributeDictBase):
36 """
37 Strict Version of Attribute dict which raises an Attribute error when
38 requested attribute is not set
39 """
40 def __getattr__(self, attr):
41 try:
42 return self[attr]
43 except KeyError:
44 raise AttributeError('{} object has no attribute {}'.format(
45 self.__class__, attr))
46
47
48 class AttributeDict(AttributeDictBase):
49 def __getattr__(self, attr):
50 return self.get(attr, None)
51
52
107 def sha1(val):
53 def sha1(val):
108 return hashlib.sha1(val).hexdigest()
54 return hashlib.sha1(val).hexdigest()
109
110
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -17,17 +17,18 b''
17
17
18 from vcsserver.lib import rc_cache
18 from vcsserver.lib import rc_cache
19
19
20
20 class RemoteBase(object):
21 class RemoteBase(object):
21 EMPTY_COMMIT = '0' * 40
22 EMPTY_COMMIT = '0' * 40
22
23
23 def _region(self, wire):
24 def _region(self, wire):
24 cache_repo_id = wire.get('cache_repo_id', '')
25 cache_repo_id = wire.get('cache_repo_id', '')
25 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
26 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27
28
28 def _cache_on(self, wire):
29 def _cache_on(self, wire):
29 context = wire.get('context', '')
30 context = wire.get('context', '')
30 context_uid = '{}'.format(context)
31 context_uid = f'{context}'
31 repo_id = wire.get('repo_id', '')
32 repo_id = wire.get('repo_id', '')
32 cache = wire.get('cache', True)
33 cache = wire.get('cache', True)
33 cache_on = context and cache
34 cache_on = context and cache
@@ -37,10 +38,10 b' class RemoteBase(object):'
37 from vcsserver.lib import rc_cache
38 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
39 repo_id = wire.get('repo_id', '')
39 cache_repo_id = wire.get('cache_repo_id', '')
40 cache_repo_id = wire.get('cache_repo_id', '')
40 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
41 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
41
42
42 if delete:
43 if delete:
43 rc_cache.clear_cache_namespace(
44 rc_cache.clear_cache_namespace(
44 'repo_object', cache_namespace_uid, invalidate=True)
45 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
45
46
46 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
47 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
@@ -1,5 +1,5 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
@@ -23,19 +23,20 b' import io'
23 import logging
23 import logging
24 import os
24 import os
25
25
26 from vcsserver.str_utils import ascii_bytes
26
27
27 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
28
29
29 DEV_NULL = open(os.devnull)
30 DEV_NULL = open(os.devnull)
30
31
31
32
32 def _complete_environ(environ, input_data):
33 def _complete_environ(environ, input_data: bytes):
33 """Update the missing wsgi.* variables of a WSGI environment.
34 """Update the missing wsgi.* variables of a WSGI environment.
34
35
35 :param environ: WSGI environment to update
36 :param environ: WSGI environment to update
36 :type environ: dict
37 :type environ: dict
37 :param input_data: data to be read by the app
38 :param input_data: data to be read by the app
38 :type input_data: str
39 :type input_data: bytes
39 """
40 """
40 environ.update({
41 environ.update({
41 'wsgi.version': (1, 0),
42 'wsgi.version': (1, 0),
@@ -92,20 +93,19 b' class WSGIAppCaller(object):'
92 :param environ: WSGI environment to update
93 :param environ: WSGI environment to update
93 :type environ: dict
94 :type environ: dict
94 :param input_data: data to be read by the app
95 :param input_data: data to be read by the app
95 :type input_data: str
96 :type input_data: str/bytes
96
97
97 :returns: a tuple with the contents, status and headers
98 :returns: a tuple with the contents, status and headers
98 :rtype: (list<str>, str, list<(str, str)>)
99 :rtype: (list<str>, str, list<(str, str)>)
99 """
100 """
100 _complete_environ(environ, input_data)
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
101 start_response = _StartResponse()
102 start_response = _StartResponse()
102 log.debug("Calling wrapped WSGI application")
103 log.debug("Calling wrapped WSGI application")
103 responses = self.app(environ, start_response)
104 responses = self.app(environ, start_response)
104 responses_list = list(responses)
105 responses_list = list(responses)
105 existing_responses = start_response.content
106 existing_responses = start_response.content
106 if existing_responses:
107 if existing_responses:
107 log.debug(
108 log.debug("Adding returned response to response written via write()")
108 "Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (1102 lines changed) Show them Hide them
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now