##// END OF EJS Templates
python3: code change for py3 support...
super-admin -
r1048:742e21ae python3
parent child Browse files
Show More
@@ -0,0 +1,1 b''
1 import simplejson as json
@@ -0,0 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import pytest
19 from vcsserver.utils import ascii_bytes, ascii_str
20
21
22 @pytest.mark.parametrize('given, expected', [
23 ('a', b'a'),
24 (u'a', b'a'),
25 ])
26 def test_ascii_bytes(given, expected):
27 assert ascii_bytes(given) == expected
28
29
30 @pytest.mark.parametrize('given', [
31 'Ã¥',
32 'Ã¥'.encode('utf8')
33 ])
34 def test_ascii_bytes_raises(given):
35 with pytest.raises(ValueError):
36 ascii_bytes(given)
37
38
39 @pytest.mark.parametrize('given, expected', [
40 (b'a', 'a'),
41 ])
42 def test_ascii_str(given, expected):
43 assert ascii_str(given) == expected
44
45
46 @pytest.mark.parametrize('given', [
47 u'a',
48 'Ã¥'.encode('utf8'),
49 u'Ã¥'
50 ])
51 def test_ascii_str_raises(given):
52 with pytest.raises(ValueError):
53 ascii_str(given)
@@ -66,14 +66,16 b' def obfuscate_qs(query_string):'
66 66 k, '={}'.format(v) if v else '') for k, v in parsed)
67 67
68 68
69 def raise_from_original(new_type):
69 def raise_from_original(new_type, org_exc: Exception):
70 70 """
71 71 Raise a new exception type with original args and traceback.
72 72 """
73
73 74 exc_type, exc_value, exc_traceback = sys.exc_info()
74 75 new_exc = new_type(*exc_value.args)
76
75 77 # store the original traceback into the new exc
76 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
78 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
77 79
78 80 try:
79 81 raise new_exc.with_traceback(exc_traceback)
@@ -81,8 +81,9 b' class SettingsMaker(object):'
81 81
82 82 @classmethod
83 83 def _bool_func(cls, input_val):
84 if isinstance(input_val, unicode):
85 input_val = input_val.encode('utf8')
84 if isinstance(input_val, bytes):
85 # decode to str
86 input_val = input_val.decode('utf8')
86 87 return str2bool(input_val)
87 88
88 89 @classmethod
@@ -23,7 +23,7 b' class EchoApp(object):'
23 23 status = '200 OK'
24 24 headers = [('Content-Type', 'text/plain')]
25 25 start_response(status, headers)
26 return ["ECHO"]
26 return [b"ECHO"]
27 27
28 28
29 29 class EchoAppStream(object):
@@ -42,7 +42,7 b' class EchoAppStream(object):'
42 42
43 43 def generator():
44 44 for _ in range(1000000):
45 yield "ECHO"
45 yield b"ECHO_STREAM"
46 46 return generator()
47 47
48 48
@@ -42,4 +42,4 b' def _assert_valid_config(config):'
42 42 config = config.copy()
43 43
44 44 # This is what git needs from config at this stage
45 config.pop('git_update_server_info')
45 config.pop(b'git_update_server_info')
@@ -19,13 +19,13 b' import re'
19 19 import logging
20 20 from wsgiref.util import FileWrapper
21 21
22 import simplejson as json
23 22 from pyramid.config import Configurator
24 23 from pyramid.response import Response, FileIter
25 24 from pyramid.httpexceptions import (
26 25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 26 HTTPUnprocessableEntity)
28 27
28 from vcsserver.lib.rc_json import json
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.utils import safe_int
@@ -42,7 +42,7 b' def write_response_error(http_exception,'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.text = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
@@ -18,8 +18,9 b''
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 import simplejson as json
22 21
22 from vcsserver.lib.rc_json import json
23 from vcsserver.utils import safe_bytes
23 24 from vcsserver.git_lfs.app import create_app
24 25
25 26
@@ -121,7 +122,7 b' class TestLFSApplication(object):'
121 122 if not os.path.isdir(os.path.dirname(oid_path)):
122 123 os.makedirs(os.path.dirname(oid_path))
123 124 with open(oid_path, 'wb') as f:
124 f.write('OID_CONTENT')
125 f.write(safe_bytes('OID_CONTENT'))
125 126
126 127 params = {'operation': 'download',
127 128 'objects': [{'oid': oid, 'size': '1024'}]}
@@ -212,7 +213,7 b' class TestLFSApplication(object):'
212 213 if not os.path.isdir(os.path.dirname(oid_path)):
213 214 os.makedirs(os.path.dirname(oid_path))
214 215 with open(oid_path, 'wb') as f:
215 f.write('OID_CONTENT')
216 f.write(safe_bytes('OID_CONTENT'))
216 217
217 218 params = {'oid': oid, 'size': '1024'}
218 219 response = git_lfs_app.post_json(
@@ -228,7 +229,7 b' class TestLFSApplication(object):'
228 229 if not os.path.isdir(os.path.dirname(oid_path)):
229 230 os.makedirs(os.path.dirname(oid_path))
230 231 with open(oid_path, 'wb') as f:
231 f.write('OID_CONTENT')
232 f.write(safe_bytes('OID_CONTENT'))
232 233
233 234 params = {'oid': oid, 'size': 11}
234 235 response = git_lfs_app.post_json(
@@ -252,7 +253,7 b' class TestLFSApplication(object):'
252 253 if not os.path.isdir(os.path.dirname(oid_path)):
253 254 os.makedirs(os.path.dirname(oid_path))
254 255 with open(oid_path, 'wb') as f:
255 f.write('OID_CONTENT')
256 f.write(safe_bytes('OID_CONTENT'))
256 257
257 258 response = git_lfs_app.get(
258 259 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
@@ -17,6 +17,7 b''
17 17
18 18 import os
19 19 import pytest
20 from vcsserver.utils import safe_bytes
20 21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 22
22 23
@@ -70,7 +71,7 b' class TestOidHandler(object):'
70 71 os.makedirs(os.path.dirname(store.oid_path))
71 72
72 73 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
74 f.write(safe_bytes('CONTENT'))
74 75
75 76 response, has_errors = oid_handler.exec_operation('download')
76 77
@@ -86,7 +87,7 b' class TestOidHandler(object):'
86 87 os.makedirs(os.path.dirname(store.oid_path))
87 88
88 89 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
90 f.write(safe_bytes('CONTENT'))
90 91 oid_handler.obj_size = 7
91 92 response, has_errors = oid_handler.exec_operation('upload')
92 93 assert has_errors is None
@@ -98,7 +99,7 b' class TestOidHandler(object):'
98 99 os.makedirs(os.path.dirname(store.oid_path))
99 100
100 101 with open(store.oid_path, 'wb') as f:
101 f.write('CONTENT')
102 f.write(safe_bytes('CONTENT'))
102 103
103 104 oid_handler.obj_size = 10240
104 105 response, has_errors = oid_handler.exec_operation('upload')
@@ -127,7 +128,7 b' class TestLFSStore(object):'
127 128
128 129 engine = lfs_store.get_engine(mode='wb')
129 130 with engine as f:
130 f.write('CONTENT')
131 f.write(safe_bytes('CONTENT'))
131 132
132 133 assert os.path.isfile(oid_location)
133 134
@@ -136,6 +137,6 b' class TestLFSStore(object):'
136 137 assert lfs_store.has_oid() is False
137 138 engine = lfs_store.get_engine(mode='wb')
138 139 with engine as f:
139 f.write('CONTENT')
140 f.write(safe_bytes('CONTENT'))
140 141
141 assert lfs_store.has_oid() is True No newline at end of file
142 assert lfs_store.has_oid() is True
@@ -25,6 +25,7 b' import logging'
25 25 import pkg_resources
26 26
27 27 import vcsserver
28 from vcsserver.utils import safe_bytes
28 29
29 30 log = logging.getLogger(__name__)
30 31
@@ -70,11 +71,10 b' def install_git_hooks(repo_path, bare, e'
70 71 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 72 try:
72 73 with open(_hook_file, 'wb') as f:
73 template = template.replace(
74 '_TMPL_', vcsserver.__version__)
75 template = template.replace('_DATE_', timestamp)
76 template = template.replace('_ENV_', executable)
77 template = template.replace('_PATH_', path)
74 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
75 template = template.replace(b'_DATE_', safe_bytes(timestamp))
76 template = template.replace(b'_ENV_', safe_bytes(executable))
77 template = template.replace(b'_PATH_', safe_bytes(path))
78 78 f.write(template)
79 79 os.chmod(_hook_file, 0o755)
80 80 except IOError:
@@ -124,11 +124,10 b' def install_svn_hooks(repo_path, executa'
124 124
125 125 try:
126 126 with open(_hook_file, 'wb') as f:
127 template = template.replace(
128 '_TMPL_', vcsserver.__version__)
129 template = template.replace('_DATE_', timestamp)
130 template = template.replace('_ENV_', executable)
131 template = template.replace('_PATH_', path)
127 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.__version__))
128 template = template.replace(b'_DATE_', safe_bytes(timestamp))
129 template = template.replace(b'_ENV_', safe_bytes(executable))
130 template = template.replace(b'_PATH_', safe_bytes(path))
132 131
133 132 f.write(template)
134 133 os.chmod(_hook_file, 0o755)
@@ -141,16 +140,16 b' def install_svn_hooks(repo_path, executa'
141 140
142 141
143 142 def get_version_from_hook(hook_path):
144 version = ''
143 version = b''
145 144 hook_content = read_hook_content(hook_path)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
145 matches = re.search(rb'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 146 if matches:
148 147 try:
149 148 version = matches.groups()[0]
150 149 log.debug('got version %s from hooks.', version)
151 150 except Exception:
152 151 log.exception("Exception while reading the hook version.")
153 return version.replace("'", "")
152 return version.replace(b"'", b"")
154 153
155 154
156 155 def check_rhodecode_hook(hook_path):
@@ -24,20 +24,23 b' import logging'
24 24 import collections
25 25 import importlib
26 26 import base64
27 import msgpack
27 28
28 29 from http.client import HTTPConnection
29 30
30 31
31 32 import mercurial.scmutil
32 33 import mercurial.node
33 import simplejson as json
34 34
35 from vcsserver.lib.rc_json import json
35 36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.utils import safe_bytes
36 38
37 39 log = logging.getLogger(__name__)
38 40
39 41
40 42 class HooksHttpClient(object):
43 proto = 'msgpack.v1'
41 44 connection = None
42 45
43 46 def __init__(self, hooks_uri):
@@ -45,30 +48,33 b' class HooksHttpClient(object):'
45 48
46 49 def __call__(self, method, extras):
47 50 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
51 # binary msgpack body
52 headers, body = self._serialize(method, extras)
49 53 try:
50 connection.request('POST', '/', body)
51 except Exception:
52 log.error('Hooks calling Connection failed on %s', connection.__dict__)
54 connection.request('POST', '/', body, headers)
55 except Exception as error:
56 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
53 57 raise
54 58 response = connection.getresponse()
55
59 try:
60 return msgpack.load(response, raw=False)
61 except Exception:
56 62 response_data = response.read()
57
58 try:
59 return json.loads(response_data)
60 except Exception:
61 63 log.exception('Failed to decode hook response json data. '
62 64 'response_code:%s, raw_data:%s',
63 65 response.status, response_data)
64 66 raise
65 67
66 def _serialize(self, hook_name, extras):
68 @classmethod
69 def _serialize(cls, hook_name, extras):
67 70 data = {
68 71 'method': hook_name,
69 72 'extras': extras
70 73 }
71 return json.dumps(data)
74 headers = {
75 'rc-hooks-protocol': cls.proto
76 }
77 return headers, msgpack.packb(data)
72 78
73 79
74 80 class HooksDummyClient(object):
@@ -113,7 +119,7 b' class GitMessageWriter(RemoteMessageWrit'
113 119 self.stdout = stdout or sys.stdout
114 120
115 121 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
122 self.stdout.write(safe_bytes(message))
117 123
118 124
119 125 class SvnMessageWriter(RemoteMessageWriter):
@@ -439,15 +445,18 b' def git_pre_pull(extras):'
439 445 :return: status code of the hook. 0 for success.
440 446 :rtype: int
441 447 """
448
442 449 if 'pull' not in extras['hooks']:
443 450 return HookResponse(0, '')
444 451
445 452 stdout = io.BytesIO()
446 453 try:
447 454 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
455
448 456 except Exception as error:
457 log.exception('Failed to call pre_pull hook')
449 458 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
459 stdout.write(safe_bytes(f'ERROR: {error}\n'))
451 460
452 461 return HookResponse(status, stdout.getvalue())
453 462
@@ -470,7 +479,7 b' def git_post_pull(extras):'
470 479 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 480 except Exception as error:
472 481 status = 128
473 stdout.write('ERROR: %s\n' % error)
482 stdout.write(safe_bytes(f'ERROR: {error}\n'))
474 483
475 484 return HookResponse(status, stdout.getvalue())
476 485
@@ -15,6 +15,7 b''
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import io
18 19 import os
19 20 import sys
20 21 import base64
@@ -28,9 +29,7 b' import tempfile'
28 29 import psutil
29 30
30 31 from itertools import chain
31 from io import StringIO
32 32
33 import simplejson as json
34 33 import msgpack
35 34 import configparser
36 35
@@ -38,6 +37,7 b' from pyramid.config import Configurator'
38 37 from pyramid.wsgi import wsgiapp
39 38 from pyramid.response import Response
40 39
40 from vcsserver.lib.rc_json import json
41 41 from vcsserver.config.settings_maker import SettingsMaker
42 42 from vcsserver.utils import safe_int
43 43 from vcsserver.lib.statsd_client import StatsdClient
@@ -317,7 +317,8 b' class HTTPApplication(object):'
317 317
318 318 def _vcs_view_params(self, request):
319 319 remote = self._remotes[request.matchdict['backend']]
320 payload = msgpack.unpackb(request.body, use_list=True)
320 payload = msgpack.unpackb(request.body, use_list=True, raw=False)
321
321 322 method = payload.get('method')
322 323 params = payload['params']
323 324 wire = params.get('wire')
@@ -400,7 +401,7 b' class HTTPApplication(object):'
400 401 resp = {
401 402 'id': payload_id,
402 403 'error': {
403 'message': e.message,
404 'message': str(e),
404 405 'traceback': tb_info,
405 406 'org_exc': org_exc_name,
406 407 'org_exc_tb': org_exc_tb,
@@ -432,7 +433,7 b' class HTTPApplication(object):'
432 433 raise
433 434
434 435 def get_chunked_data(method_resp):
435 stream = StringIO(method_resp)
436 stream = io.BytesIO(method_resp)
436 437 while 1:
437 438 chunk = stream.read(chunk_size)
438 439 if not chunk:
@@ -577,7 +578,7 b' class HTTPApplication(object):'
577 578 repo_name = environ['HTTP_X_RC_REPO_NAME']
578 579 packed_config = base64.b64decode(
579 580 environ['HTTP_X_RC_REPO_CONFIG'])
580 config = msgpack.unpackb(packed_config)
581 config = msgpack.unpackb(packed_config, raw=False)
581 582
582 583 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
583 584 self.set_env_from_config(environ, config)
@@ -21,6 +21,7 b' import logging'
21 21
22 22 import msgpack
23 23 import redis
24 import pickle
24 25
25 26 from dogpile.cache.api import CachedValue
26 27 from dogpile.cache.backends import memory as memory_backend
@@ -32,7 +33,7 b' from dogpile.cache.util import memoized_'
32 33 from pyramid.settings import asbool
33 34
34 35 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str, safe_unicode
36 from vcsserver.utils import safe_str
36 37
37 38
38 39 _default_max_size = 1024
@@ -265,7 +266,7 b' class BaseRedisBackend(redis_backend.Red'
265 266
266 267 def get_mutex(self, key):
267 268 if self.distributed_lock:
268 lock_key = '_lock_{0}'.format(safe_unicode(key))
269 lock_key = '_lock_{0}'.format(safe_str(key))
269 270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
270 271 auto_renewal=self._lock_auto_renewal)
271 272 else:
@@ -23,7 +23,7 b' import decorator'
23 23
24 24 from dogpile.cache import CacheRegion
25 25
26 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_bytes, sha1
27 27 from vcsserver.lib.rc_cache import region_meta
28 28
29 29 log = logging.getLogger(__name__)
@@ -130,7 +130,7 b' def compute_key_from_params(*args):'
130 130 """
131 131 Helper to compute key from given params to be used in cache manager
132 132 """
133 return sha1("_".join(map(safe_str, args)))
133 return sha1(safe_bytes("_".join(map(str, args))))
134 134
135 135
136 136 def backend_key_generator(backend):
@@ -21,11 +21,13 b' import os'
21 21 import socket
22 22 import logging
23 23
24 import simplejson as json
25 24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 26 from webob import Request, Response, exc
27 27
28 from vcsserver.lib.rc_json import json
28 29 from vcsserver import hooks, subprocessio
30 from vcsserver.utils import ascii_bytes
29 31
30 32
31 33 log = logging.getLogger(__name__)
@@ -62,21 +64,20 b' class FileWrapper(object):'
62 64 class GitRepository(object):
63 65 """WSGI app for handling Git smart protocol endpoints."""
64 66
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
67 68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 valid_accepts = frozenset(('application/x-{}-result'.format(c) for c in commands))
70 70
71 71 # The last bytes are the SHA1 of the first 12 bytes.
72 72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 FLUSH_PACKET = b"0000"
77 77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 82 valid_dir_signature = self.git_folder_signature.issubset(files)
82 83
@@ -123,7 +124,7 b' class GitRepository(object):'
123 124 # It reads binary, per number of bytes specified.
124 125 # if you do add '\n' as part of data, count it.
125 126 server_advert = '# service=%s\n' % git_command
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 128 try:
128 129 gitenv = dict(os.environ)
129 130 # forget all configs
@@ -133,15 +134,15 b' class GitRepository(object):'
133 134 out = subprocessio.SubprocessIOChunker(
134 135 command,
135 136 env=gitenv,
136 starting_values=[packet_len + server_advert + '0000'],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 138 shell=False
138 139 )
139 except EnvironmentError:
140 except OSError:
140 141 log.exception('Error processing command')
141 142 raise exc.HTTPExpectationFailed()
142 143
143 144 resp = Response()
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 146 resp.charset = None
146 147 resp.app_iter = out
147 148
@@ -166,34 +167,100 b' class GitRepository(object):'
166 167 We also print in the error output a message explaining why the command
167 168 was aborted.
168 169
169 If aditionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
170 171 of the pre-pull hook.
171 172
172 173 Note that for clients not supporting side-band we just send them the
173 174 emtpy PACK file.
174 175 """
176
175 177 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 178 response = []
177 179 proto = dulwich.protocol.Protocol(None, response.append)
178 proto.write_pkt_line('NAK\n')
179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 capabilities)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
181
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
181 183 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 184 # produces a fatal error in the client:
183 185 # fatal: error in sideband demultiplexer
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 proto.write_sideband(1, self.EMPTY_PACK)
186 proto.write_sideband(
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
186 192
187 # writes 0000
193 # writes b"0000" as default
188 194 proto.write_pkt_line(None)
189 195
190 196 return response
191 197 else:
192 return [self.EMPTY_PACK]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
202 Given a list response we inject the post-pull messages.
203
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
206 0008NAK\n...0000
207
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
210 """
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
214
215 if not start_message and not end_message:
216 return response
217
218 try:
219 iter(response)
220 # iterator probably will work, we continue
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
226 def injected_response():
193 227
194 def _write_sideband_to_proto(self, data, proto, capabilities):
228 do_loop = 1
229 header_injected = 0
230 next_item = None
231 has_item = False
232 while do_loop:
233
234 try:
235 next_item = next(response)
236 except StopIteration:
237 do_loop = 0
238
239 if has_item:
240 # last item ! alter it now
241 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
242 new_response = [item[:-4]]
243 new_response.extend(self._get_messages(end_message, capabilities))
244 new_response.append(self.FLUSH_PACKET)
245 item = b''.join(new_response)
246
247 yield item
248 has_item = True
249 item = next_item
250
251 # alter item if it's the initial chunk
252 if not header_injected and item.startswith(b'0008NAK\n'):
253 new_response = [b'0008NAK\n']
254 new_response.extend(self._get_messages(start_message, capabilities))
255 new_response.append(item[8:])
256 item = b''.join(new_response)
257 header_injected = 1
258
259 return injected_response()
260
261 def _write_sideband_to_proto(self, proto, data, capabilities):
195 262 """
196 Write the data to the proto's sideband number 2.
263 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
197 264
198 265 We do not use dulwich's write_sideband directly as it only supports
199 266 side-band-64k.
@@ -204,68 +271,27 b' class GitRepository(object):'
204 271 # N.B.(skreft): The values below are explained in the pack protocol
205 272 # documentation, section Packfile Data.
206 273 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 if 'side-band-64k' in capabilities:
274 if CAPABILITY_SIDE_BAND_64K in capabilities:
208 275 chunk_size = 65515
209 elif 'side-band' in capabilities:
276 elif CAPABILITY_SIDE_BAND in capabilities:
210 277 chunk_size = 995
211 278 else:
212 279 return
213 280
214 chunker = (
215 data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
281 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
216 282
217 283 for chunk in chunker:
218 proto.write_sideband(2, chunk)
284 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
219 285
220 286 def _get_messages(self, data, capabilities):
221 287 """Return a list with packets for sending data in sideband number 2."""
222 288 response = []
223 289 proto = dulwich.protocol.Protocol(None, response.append)
224 290
225 self._write_sideband_to_proto(data, proto, capabilities)
291 self._write_sideband_to_proto(proto, data, capabilities)
226 292
227 293 return response
228 294
229 def _inject_messages_to_response(self, response, capabilities,
230 start_messages, end_messages):
231 """
232 Given a list response we inject the pre/post-pull messages.
233
234 We only inject the messages if the client supports sideband, and the
235 response has the format:
236 0008NAK\n...0000
237
238 Note that we do not check the no-progress capability as by default, git
239 sends it, which effectively would block all messages.
240 """
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 return response
243
244 if not start_messages and not end_messages:
245 return response
246
247 # make a list out of response if it's an iterator
248 # so we can investigate it for message injection.
249 if hasattr(response, '__iter__'):
250 response = list(response)
251
252 if (not response[0].startswith('0008NAK\n') or
253 not response[-1].endswith('0000')):
254 return response
255
256 new_response = ['0008NAK\n']
257 new_response.extend(self._get_messages(start_messages, capabilities))
258 if len(response) == 1:
259 new_response.append(response[0][8:-4])
260 else:
261 new_response.append(response[0][8:])
262 new_response.extend(response[1:-1])
263 new_response.append(response[-1][:-4])
264 new_response.extend(self._get_messages(end_messages, capabilities))
265 new_response.append('0000')
266
267 return new_response
268
269 295 def backend(self, request, environ):
270 296 """
271 297 WSGI Response producer for HTTP POST Git Smart HTTP requests.
@@ -304,11 +330,11 b' class GitRepository(object):'
304 330 inputstream = request.body_file_seekable
305 331
306 332 resp = Response()
307 resp.content_type = ('application/x-%s-result' %
308 git_command.encode('utf8'))
333 resp.content_type = 'application/x-{}-result'.format(git_command)
309 334 resp.charset = None
310 335
311 336 pre_pull_messages = ''
337 # Upload-pack == clone
312 338 if git_command == 'git-upload-pack':
313 339 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 340 if status != 0:
@@ -326,7 +352,7 b' class GitRepository(object):'
326 352
327 353 out = subprocessio.SubprocessIOChunker(
328 354 cmd,
329 inputstream=inputstream,
355 input_stream=inputstream,
330 356 env=gitenv,
331 357 cwd=self.content_path,
332 358 shell=False,
@@ -346,7 +372,7 b' class GitRepository(object):'
346 372 log.debug('handling cmd %s', cmd)
347 373 output = subprocessio.SubprocessIOChunker(
348 374 cmd,
349 inputstream=inputstream,
375 input_stream=inputstream,
350 376 env=gitenv,
351 377 cwd=self.content_path,
352 378 shell=False,
@@ -357,10 +383,11 b' class GitRepository(object):'
357 383 for _ in output:
358 384 pass
359 385
386 # Upload-pack == clone
360 387 if git_command == 'git-upload-pack':
361 388 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 resp.app_iter = self._inject_messages_to_response(
363 out, capabilities, pre_pull_messages, post_pull_messages)
389
390 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
364 391 else:
365 392 resp.app_iter = out
366 393
@@ -40,7 +40,7 b' from dulwich.repo import Repo as Dulwich'
40 40 from dulwich.server import update_server_info
41 41
42 42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int
44 44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 45 from vcsserver.hgcompat import (
46 46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
@@ -56,13 +56,6 b" PEELED_REF_MARKER = '^{}'"
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 def str_to_dulwich(value):
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
63 return value.decode(settings.WIRE_ENCODING)
64
65
66 59 def reraise_safe_exceptions(func):
67 60 """Converts Dulwich exceptions to something neutral."""
68 61
@@ -116,7 +109,7 b' class GitFactory(RepoFactory):'
116 109 if use_libgit2:
117 110 return Repository(wire['path'])
118 111 else:
119 repo_path = str_to_dulwich(wire['path'])
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
120 113 return Repo(repo_path)
121 114
122 115 def repo(self, wire, create=False, use_libgit2=False):
@@ -160,7 +153,7 b' class GitRemote(RemoteBase):'
160 153 def discover_git_version(self):
161 154 stdout, _ = self.run_git_command(
162 155 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
156 prefix = b'git version'
164 157 if stdout.startswith(prefix):
165 158 stdout = stdout[len(prefix):]
166 159 return stdout.strip()
@@ -186,6 +179,7 b' class GitRemote(RemoteBase):'
186 179 def assert_correct_path(self, wire):
187 180 cache_on, context_uid, repo_id = self._cache_on(wire)
188 181 region = self._region(wire)
182
189 183 @region.conditional_cache_on_arguments(condition=cache_on)
190 184 def _assert_correct_path(_context_uid, _repo_id):
191 185 try:
@@ -219,6 +213,7 b' class GitRemote(RemoteBase):'
219 213 def blob_raw_length(self, wire, sha):
220 214 cache_on, context_uid, repo_id = self._cache_on(wire)
221 215 region = self._region(wire)
216
222 217 @region.conditional_cache_on_arguments(condition=cache_on)
223 218 def _blob_raw_length(_repo_id, _sha):
224 219
@@ -230,10 +225,10 b' class GitRemote(RemoteBase):'
230 225 return _blob_raw_length(repo_id, sha)
231 226
232 227 def _parse_lfs_pointer(self, raw_content):
228 spec_string = b'version https://git-lfs.github.com/spec'
229 if raw_content and raw_content.startswith(spec_string):
233 230
234 spec_string = 'version https://git-lfs.github.com/spec'
235 if raw_content and raw_content.startswith(spec_string):
236 pattern = re.compile(r"""
231 pattern = re.compile(rb"""
237 232 (?:\n)?
238 233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
239 234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
@@ -249,8 +244,8 b' class GitRemote(RemoteBase):'
249 244 @reraise_safe_exceptions
250 245 def is_large_file(self, wire, commit_id):
251 246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 region = self._region(wire)
252 248
253 region = self._region(wire)
254 249 @region.conditional_cache_on_arguments(condition=cache_on)
255 250 def _is_large_file(_repo_id, _sha):
256 251 repo_init = self._factory.repo_libgit2(wire)
@@ -266,8 +261,8 b' class GitRemote(RemoteBase):'
266 261 @reraise_safe_exceptions
267 262 def is_binary(self, wire, tree_id):
268 263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 region = self._region(wire)
269 265
270 region = self._region(wire)
271 266 @region.conditional_cache_on_arguments(condition=cache_on)
272 267 def _is_binary(_repo_id, _tree_id):
273 268 repo_init = self._factory.repo_libgit2(wire)
@@ -311,6 +306,7 b' class GitRemote(RemoteBase):'
311 306 def bulk_request(self, wire, rev, pre_load):
312 307 cache_on, context_uid, repo_id = self._cache_on(wire)
313 308 region = self._region(wire)
309
314 310 @region.conditional_cache_on_arguments(condition=cache_on)
315 311 def _bulk_request(_repo_id, _rev, _pre_load):
316 312 result = {}
@@ -341,12 +337,12 b' class GitRemote(RemoteBase):'
341 337
342 338 return urllib.request.build_opener(*handlers)
343 339
344 def _type_id_to_name(self, type_id):
340 def _type_id_to_name(self, type_id: int):
345 341 return {
346 1: b'commit',
347 2: b'tree',
348 3: b'blob',
349 4: b'tag'
342 1: 'commit',
343 2: 'tree',
344 3: 'blob',
345 4: 'tag'
350 346 }[type_id]
351 347
352 348 @reraise_safe_exceptions
@@ -674,7 +670,7 b' class GitRemote(RemoteBase):'
674 670 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
675 671 fetch_refs_chunks = list(chunk)
676 672 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
677 _out, _err = self.run_git_command(
673 self.run_git_command(
678 674 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
679 675 fail_on_stderr=False,
680 676 _copts=self._remote_conf(config),
@@ -722,6 +718,7 b' class GitRemote(RemoteBase):'
722 718 def get_object(self, wire, sha, maybe_unreachable=False):
723 719 cache_on, context_uid, repo_id = self._cache_on(wire)
724 720 region = self._region(wire)
721
725 722 @region.conditional_cache_on_arguments(condition=cache_on)
726 723 def _get_object(_context_uid, _repo_id, _sha):
727 724 repo_init = self._factory.repo_libgit2(wire)
@@ -766,7 +763,7 b' class GitRemote(RemoteBase):'
766 763 raise exceptions.LookupException(e)(missing_commit_err)
767 764
768 765 commit_id = commit.hex
769 type_id = commit.type_str
766 type_id = commit.type
770 767
771 768 return {
772 769 'id': commit_id,
@@ -781,6 +778,7 b' class GitRemote(RemoteBase):'
781 778 def get_refs(self, wire):
782 779 cache_on, context_uid, repo_id = self._cache_on(wire)
783 780 region = self._region(wire)
781
784 782 @region.conditional_cache_on_arguments(condition=cache_on)
785 783 def _get_refs(_context_uid, _repo_id):
786 784
@@ -796,6 +794,7 b' class GitRemote(RemoteBase):'
796 794 def get_branch_pointers(self, wire):
797 795 cache_on, context_uid, repo_id = self._cache_on(wire)
798 796 region = self._region(wire)
797
799 798 @region.conditional_cache_on_arguments(condition=cache_on)
800 799 def _get_branch_pointers(_context_uid, _repo_id):
801 800
@@ -811,6 +810,7 b' class GitRemote(RemoteBase):'
811 810 def head(self, wire, show_exc=True):
812 811 cache_on, context_uid, repo_id = self._cache_on(wire)
813 812 region = self._region(wire)
813
814 814 @region.conditional_cache_on_arguments(condition=cache_on)
815 815 def _head(_context_uid, _repo_id, _show_exc):
816 816 repo_init = self._factory.repo_libgit2(wire)
@@ -837,6 +837,7 b' class GitRemote(RemoteBase):'
837 837
838 838 cache_on, context_uid, repo_id = self._cache_on(wire)
839 839 region = self._region(wire)
840
840 841 @region.conditional_cache_on_arguments(condition=cache_on)
841 842 def _revision(_context_uid, _repo_id, _rev):
842 843 repo_init = self._factory.repo_libgit2(wire)
@@ -856,6 +857,7 b' class GitRemote(RemoteBase):'
856 857 def date(self, wire, commit_id):
857 858 cache_on, context_uid, repo_id = self._cache_on(wire)
858 859 region = self._region(wire)
860
859 861 @region.conditional_cache_on_arguments(condition=cache_on)
860 862 def _date(_repo_id, _commit_id):
861 863 repo_init = self._factory.repo_libgit2(wire)
@@ -876,6 +878,7 b' class GitRemote(RemoteBase):'
876 878 def author(self, wire, commit_id):
877 879 cache_on, context_uid, repo_id = self._cache_on(wire)
878 880 region = self._region(wire)
881
879 882 @region.conditional_cache_on_arguments(condition=cache_on)
880 883 def _author(_repo_id, _commit_id):
881 884 repo_init = self._factory.repo_libgit2(wire)
@@ -893,7 +896,7 b' class GitRemote(RemoteBase):'
893 896 try:
894 897 return "{}".format(author.name)
895 898 except Exception:
896 return "{}".format(safe_unicode(author.raw_name))
899 return "{}".format(safe_str(author.raw_name))
897 900
898 901 return _author(repo_id, commit_id)
899 902
@@ -930,6 +933,7 b' class GitRemote(RemoteBase):'
930 933 def children(self, wire, commit_id):
931 934 cache_on, context_uid, repo_id = self._cache_on(wire)
932 935 region = self._region(wire)
936
933 937 @region.conditional_cache_on_arguments(condition=cache_on)
934 938 def _children(_repo_id, _commit_id):
935 939 output, __ = self.run_git_command(
@@ -990,6 +994,7 b' class GitRemote(RemoteBase):'
990 994
991 995 cache_on, context_uid, repo_id = self._cache_on(wire)
992 996 region = self._region(wire)
997
993 998 @region.conditional_cache_on_arguments(condition=cache_on)
994 999 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
995 1000 repo_init = self._factory.repo_libgit2(wire)
@@ -1008,6 +1013,7 b' class GitRemote(RemoteBase):'
1008 1013 def tree_items(self, wire, tree_id):
1009 1014 cache_on, context_uid, repo_id = self._cache_on(wire)
1010 1015 region = self._region(wire)
1016
1011 1017 @region.conditional_cache_on_arguments(condition=cache_on)
1012 1018 def _tree_items(_repo_id, _tree_id):
1013 1019
@@ -1066,7 +1072,7 b' class GitRemote(RemoteBase):'
1066 1072 lines = diff.splitlines()
1067 1073 x = 0
1068 1074 for line in lines:
1069 if line.startswith('diff'):
1075 if line.startswith(b'diff'):
1070 1076 break
1071 1077 x += 1
1072 1078 # Append new line just like 'diff' command do
@@ -1110,6 +1116,7 b' class GitRemote(RemoteBase):'
1110 1116 def node_history(self, wire, commit_id, path, limit):
1111 1117 cache_on, context_uid, repo_id = self._cache_on(wire)
1112 1118 region = self._region(wire)
1119
1113 1120 @region.conditional_cache_on_arguments(condition=cache_on)
1114 1121 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1115 1122 # optimize for n==1, rev-list is much faster for that use-case
@@ -1122,14 +1129,14 b' class GitRemote(RemoteBase):'
1122 1129 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1123 1130
1124 1131 output, __ = self.run_git_command(wire, cmd)
1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1132 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1126 1133
1127 1134 return [x for x in commit_ids]
1128 1135 return _node_history(context_uid, repo_id, commit_id, path, limit)
1129 1136
1130 1137 @reraise_safe_exceptions
1131 def node_annotate(self, wire, commit_id, path):
1132
1138 def node_annotate_legacy(self, wire, commit_id, path):
1139 #note: replaced by pygit2 impelementation
1133 1140 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1134 1141 # -l ==> outputs long shas (and we need all 40 characters)
1135 1142 # --root ==> doesn't put '^' character for boundaries
@@ -1137,13 +1144,31 b' class GitRemote(RemoteBase):'
1137 1144 output, __ = self.run_git_command(wire, cmd)
1138 1145
1139 1146 result = []
1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1147 for i, blame_line in enumerate(output.splitlines()[:-1]):
1141 1148 line_no = i + 1
1142 commit_id, line = re.split(r' ', blame_line, 1)
1143 result.append((line_no, commit_id, line))
1149 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1150 result.append((line_no, blame_commit_id, line))
1151
1144 1152 return result
1145 1153
1146 1154 @reraise_safe_exceptions
1155 def node_annotate(self, wire, commit_id, path):
1156
1157 result_libgit = []
1158 repo_init = self._factory.repo_libgit2(wire)
1159 with repo_init as repo:
1160 commit = repo[commit_id]
1161 blame_obj = repo.blame(path, newest_commit=commit_id)
1162 for i, line in enumerate(commit.tree[path].data.splitlines()):
1163 line_no = i + 1
1164 hunk = blame_obj.for_line(line_no)
1165 blame_commit_id = hunk.final_commit_id.hex
1166
1167 result_libgit.append((line_no, blame_commit_id, line))
1168
1169 return result_libgit
1170
1171 @reraise_safe_exceptions
1147 1172 def update_server_info(self, wire):
1148 1173 repo = self._factory.repo(wire)
1149 1174 update_server_info(repo)
@@ -1153,6 +1178,7 b' class GitRemote(RemoteBase):'
1153 1178
1154 1179 cache_on, context_uid, repo_id = self._cache_on(wire)
1155 1180 region = self._region(wire)
1181
1156 1182 @region.conditional_cache_on_arguments(condition=cache_on)
1157 1183 def _get_all_commit_ids(_context_uid, _repo_id):
1158 1184
@@ -1163,6 +1189,16 b' class GitRemote(RemoteBase):'
1163 1189 except Exception:
1164 1190 # Can be raised for empty repositories
1165 1191 return []
1192
1193 @region.conditional_cache_on_arguments(condition=cache_on)
1194 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1195 repo_init = self._factory.repo_libgit2(wire)
1196 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1197 results = []
1198 with repo_init as repo:
1199 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1200 results.append(commit.id.hex)
1201
1166 1202 return _get_all_commit_ids(context_uid, repo_id)
1167 1203
1168 1204 @reraise_safe_exceptions
@@ -1203,8 +1239,8 b' class GitRemote(RemoteBase):'
1203 1239 _opts.update(opts)
1204 1240 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1205 1241
1206 return ''.join(proc), ''.join(proc.error)
1207 except (EnvironmentError, OSError) as err:
1242 return b''.join(proc), b''.join(proc.stderr)
1243 except OSError as err:
1208 1244 cmd = ' '.join(cmd) # human friendly CMD
1209 1245 tb_err = ("Couldn't run git command (%s).\n"
1210 1246 "Original error was:%s\n"
@@ -39,6 +39,7 b' from vcsserver.hgcompat import ('
39 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 40 RepoLookupError, InterventionRequired, RequirementError,
41 41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 from vcsserver.utils import ascii_bytes, ascii_str, safe_str
42 43 from vcsserver.vcs_base import RemoteBase
43 44
44 45 log = logging.getLogger(__name__)
@@ -47,25 +48,31 b' log = logging.getLogger(__name__)'
47 48 def make_ui_from_config(repo_config):
48 49
49 50 class LoggingUI(ui.ui):
51
50 52 def status(self, *msg, **opts):
51 log.info(' '.join(msg).rstrip('\n'))
52 super(LoggingUI, self).status(*msg, **opts)
53 str_msg = map(safe_str, msg)
54 log.info(' '.join(str_msg).rstrip('\n'))
55 #super(LoggingUI, self).status(*msg, **opts)
53 56
54 57 def warn(self, *msg, **opts):
55 log.warn(' '.join(msg).rstrip('\n'))
56 super(LoggingUI, self).warn(*msg, **opts)
58 str_msg = map(safe_str, msg)
59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 #super(LoggingUI, self).warn(*msg, **opts)
57 61
58 62 def error(self, *msg, **opts):
59 log.error(' '.join(msg).rstrip('\n'))
60 super(LoggingUI, self).error(*msg, **opts)
63 str_msg = map(safe_str, msg)
64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 #super(LoggingUI, self).error(*msg, **opts)
61 66
62 67 def note(self, *msg, **opts):
63 log.info(' '.join(msg).rstrip('\n'))
64 super(LoggingUI, self).note(*msg, **opts)
68 str_msg = map(safe_str, msg)
69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 #super(LoggingUI, self).note(*msg, **opts)
65 71
66 72 def debug(self, *msg, **opts):
67 log.debug(' '.join(msg).rstrip('\n'))
68 super(LoggingUI, self).debug(*msg, **opts)
73 str_msg = map(safe_str, msg)
74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 #super(LoggingUI, self).debug(*msg, **opts)
69 76
70 77 baseui = LoggingUI()
71 78
@@ -75,26 +82,26 b' def make_ui_from_config(repo_config):'
75 82 baseui._tcfg = hgconfig.config()
76 83
77 84 for section, option, value in repo_config:
78 baseui.setconfig(section, option, value)
85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
79 86
80 87 # make our hgweb quiet so it doesn't print output
81 baseui.setconfig('ui', 'quiet', 'true')
88 baseui.setconfig(b'ui', b'quiet', b'true')
82 89
83 baseui.setconfig('ui', 'paginate', 'never')
90 baseui.setconfig(b'ui', b'paginate', b'never')
84 91 # for better Error reporting of Mercurial
85 baseui.setconfig('ui', 'message-output', 'stderr')
92 baseui.setconfig(b'ui', b'message-output', b'stderr')
86 93
87 94 # force mercurial to only use 1 thread, otherwise it may try to set a
88 95 # signal in a non-main thread, thus generating a ValueError.
89 baseui.setconfig('worker', 'numcpus', 1)
96 baseui.setconfig(b'worker', b'numcpus', 1)
90 97
91 98 # If there is no config for the largefiles extension, we explicitly disable
92 99 # it here. This overrides settings from repositories hgrc file. Recent
93 100 # mercurial versions enable largefiles in hgrc on clone from largefile
94 101 # repo.
95 if not baseui.hasconfig('extensions', 'largefiles'):
102 if not baseui.hasconfig(b'extensions', b'largefiles'):
96 103 log.debug('Explicitly disable largefiles extension for repo.')
97 baseui.setconfig('extensions', 'largefiles', '!')
104 baseui.setconfig(b'extensions', b'largefiles', b'!')
98 105
99 106 return baseui
100 107
@@ -106,19 +113,19 b' def reraise_safe_exceptions(func):'
106 113 try:
107 114 return func(*args, **kwargs)
108 115 except (Abort, InterventionRequired) as e:
109 raise_from_original(exceptions.AbortException(e))
116 raise_from_original(exceptions.AbortException(e), e)
110 117 except RepoLookupError as e:
111 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e), e)
112 119 except RequirementError as e:
113 raise_from_original(exceptions.RequirementException(e))
120 raise_from_original(exceptions.RequirementException(e), e)
114 121 except RepoError as e:
115 raise_from_original(exceptions.VcsException(e))
122 raise_from_original(exceptions.VcsException(e), e)
116 123 except LookupError as e:
117 raise_from_original(exceptions.LookupException(e))
124 raise_from_original(exceptions.LookupException(e), e)
118 125 except Exception as e:
119 126 if not hasattr(e, '_vcs_kind'):
120 127 log.exception("Unhandled exception in hg remote call")
121 raise_from_original(exceptions.UnhandledException(e))
128 raise_from_original(exceptions.UnhandledException(e), e)
122 129
123 130 raise
124 131 return wrapper
@@ -144,7 +151,7 b' class MercurialFactory(RepoFactory):'
144 151
145 152 def _create_repo(self, wire, create):
146 153 baseui = self._create_config(wire["config"])
147 return instance(baseui, wire["path"], create)
154 return instance(baseui, ascii_bytes(wire["path"]), create)
148 155
149 156 def repo(self, wire, create=False):
150 157 """
@@ -154,7 +161,7 b' class MercurialFactory(RepoFactory):'
154 161
155 162
156 163 def patch_ui_message_output(baseui):
157 baseui.setconfig('ui', 'quiet', 'false')
164 baseui.setconfig(b'ui', b'quiet', b'false')
158 165 output = io.BytesIO()
159 166
160 167 def write(data, **unused_kwargs):
@@ -466,6 +473,7 b' class HgRemote(RemoteBase):'
466 473 def node_history(self, wire, revision, path, limit):
467 474 cache_on, context_uid, repo_id = self._cache_on(wire)
468 475 region = self._region(wire)
476
469 477 @region.conditional_cache_on_arguments(condition=cache_on)
470 478 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 479 repo = self._factory.repo(wire)
@@ -497,6 +505,7 b' class HgRemote(RemoteBase):'
497 505 def node_history_untill(self, wire, revision, path, limit):
498 506 cache_on, context_uid, repo_id = self._cache_on(wire)
499 507 region = self._region(wire)
508
500 509 @region.conditional_cache_on_arguments(condition=cache_on)
501 510 def _node_history_until(_context_uid, _repo_id):
502 511 repo = self._factory.repo(wire)
@@ -530,7 +539,7 b' class HgRemote(RemoteBase):'
530 539 repo = self._factory.repo(wire)
531 540 ctx = self._get_ctx(repo, revision)
532 541 fctx = ctx.filectx(path)
533 return fctx.data()
542 return fctx.data_queue()
534 543
535 544 @reraise_safe_exceptions
536 545 def fctx_flags(self, wire, commit_id, path):
@@ -561,11 +570,12 b' class HgRemote(RemoteBase):'
561 570 def get_all_commit_ids(self, wire, name):
562 571 cache_on, context_uid, repo_id = self._cache_on(wire)
563 572 region = self._region(wire)
573
564 574 @region.conditional_cache_on_arguments(condition=cache_on)
565 575 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 576 repo = self._factory.repo(wire)
567 577 repo = repo.filtered(name)
568 revs = [hex(x[7]) for x in repo.changelog.index]
578 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
569 579 return revs
570 580 return _get_all_commit_ids(context_uid, repo_id, name)
571 581
@@ -578,6 +588,7 b' class HgRemote(RemoteBase):'
578 588 def is_large_file(self, wire, commit_id, path):
579 589 cache_on, context_uid, repo_id = self._cache_on(wire)
580 590 region = self._region(wire)
591
581 592 @region.conditional_cache_on_arguments(condition=cache_on)
582 593 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 594 return largefiles.lfutil.isstandin(path)
@@ -587,8 +598,8 b' class HgRemote(RemoteBase):'
587 598 @reraise_safe_exceptions
588 599 def is_binary(self, wire, revision, path):
589 600 cache_on, context_uid, repo_id = self._cache_on(wire)
601 region = self._region(wire)
590 602
591 region = self._region(wire)
592 603 @region.conditional_cache_on_arguments(condition=cache_on)
593 604 def _is_binary(_repo_id, _sha, _path):
594 605 repo = self._factory.repo(wire)
@@ -666,12 +677,12 b' class HgRemote(RemoteBase):'
666 677 repo = self._factory.repo(wire)
667 678
668 679 # Disable any prompts for this repo
669 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
680 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
670 681
671 682 bookmarks = list(dict(repo._bookmarks).keys())
672 683 remote = peer(repo, {}, url)
673 684 # Disable any prompts for this remote
674 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
685 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
675 686
676 687 return exchange.push(
677 688 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
@@ -878,11 +889,11 b' class HgRemote(RemoteBase):'
878 889 def pull(self, wire, url, commit_ids=None):
879 890 repo = self._factory.repo(wire)
880 891 # Disable any prompts for this repo
881 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
892 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
882 893
883 894 remote = peer(repo, {}, url)
884 895 # Disable any prompts for this remote
885 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
896 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
886 897
887 898 if commit_ids:
888 899 commit_ids = [bin(commit_id) for commit_id in commit_ids]
@@ -942,25 +953,25 b' class HgRemote(RemoteBase):'
942 953 def merge(self, wire, revision):
943 954 repo = self._factory.repo(wire)
944 955 baseui = self._factory._create_config(wire['config'])
945 repo.ui.setconfig('ui', 'merge', 'internal:dump')
956 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
946 957
947 958 # In case of sub repositories are used mercurial prompts the user in
948 959 # case of merge conflicts or different sub repository sources. By
949 960 # setting the interactive flag to `False` mercurial doesn't prompt the
950 961 # used but instead uses a default value.
951 repo.ui.setconfig('ui', 'interactive', False)
962 repo.ui.setconfig(b'ui', b'interactive', False)
952 963 commands.merge(baseui, repo, rev=revision)
953 964
954 965 @reraise_safe_exceptions
955 966 def merge_state(self, wire):
956 967 repo = self._factory.repo(wire)
957 repo.ui.setconfig('ui', 'merge', 'internal:dump')
968 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
958 969
959 970 # In case of sub repositories are used mercurial prompts the user in
960 971 # case of merge conflicts or different sub repository sources. By
961 972 # setting the interactive flag to `False` mercurial doesn't prompt the
962 973 # used but instead uses a default value.
963 repo.ui.setconfig('ui', 'interactive', False)
974 repo.ui.setconfig(b'ui', b'interactive', False)
964 975 ms = hg_merge.mergestate(repo)
965 976 return [x for x in ms.unresolved()]
966 977
@@ -968,19 +979,19 b' class HgRemote(RemoteBase):'
968 979 def commit(self, wire, message, username, close_branch=False):
969 980 repo = self._factory.repo(wire)
970 981 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig('ui', 'username', username)
982 repo.ui.setconfig(b'ui', b'username', username)
972 983 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 984
974 985 @reraise_safe_exceptions
975 986 def rebase(self, wire, source=None, dest=None, abort=False):
976 987 repo = self._factory.repo(wire)
977 988 baseui = self._factory._create_config(wire['config'])
978 repo.ui.setconfig('ui', 'merge', 'internal:dump')
989 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
979 990 # In case of sub repositories are used mercurial prompts the user in
980 991 # case of merge conflicts or different sub repository sources. By
981 992 # setting the interactive flag to `False` mercurial doesn't prompt the
982 993 # used but instead uses a default value.
983 repo.ui.setconfig('ui', 'interactive', False)
994 repo.ui.setconfig(b'ui', b'interactive', False)
984 995 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 996
986 997 @reraise_safe_exceptions
@@ -1039,7 +1050,7 b' class HgRemote(RemoteBase):'
1039 1050 mode = b'x' in flags and 0o755 or 0o644
1040 1051 is_link = b'l' in flags
1041 1052
1042 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1053 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data_queue)
1043 1054
1044 1055 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1045 1056 archive_dir_name, commit_id)
@@ -407,14 +407,15 b' class SvnRemote(RemoteBase):'
407 407 log.debug('Return process ended with code: %s', rdump.returncode)
408 408 if rdump.returncode != 0:
409 409 errors = rdump.stderr.read()
410 log.error('svnrdump dump failed: statuscode %s: message: %s',
411 rdump.returncode, errors)
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411
412 412 reason = 'UNKNOWN'
413 if 'svnrdump: E230001:' in errors:
413 if b'svnrdump: E230001:' in errors:
414 414 reason = 'INVALID_CERTIFICATE'
415 415
416 416 if reason == 'UNKNOWN':
417 reason = 'UNKNOWN:{}'.format(errors)
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418
418 419 raise Exception(
419 420 'Failed to dump the remote repository from %s. Reason:%s' % (
420 421 src_url, reason))
@@ -496,10 +497,10 b' class SvnRemote(RemoteBase):'
496 497
497 498 try:
498 499 _opts.update(opts)
499 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
500 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
500 501
501 return ''.join(p), ''.join(p.error)
502 except (EnvironmentError, OSError) as err:
502 return b''.join(proc), b''.join(proc.stderr)
503 except OSError as err:
503 504 if safe_call:
504 505 return '', safe_str(err).strip()
505 506 else:
@@ -27,7 +27,7 b' import mercurial.hgweb.hgweb_mod'
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30 from vcsserver.utils import ascii_bytes
31 31
32 32 log = logging.getLogger(__name__)
33 33
@@ -115,10 +115,10 b' def make_hg_ui_from_config(repo_config):'
115 115 baseui._tcfg = mercurial.config.config()
116 116
117 117 for section, option, value in repo_config:
118 baseui.setconfig(section, option, value)
118 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
119 119
120 120 # make our hgweb quiet so it doesn't print output
121 baseui.setconfig('ui', 'quiet', 'true')
121 baseui.setconfig(b'ui', b'quiet', b'true')
122 122
123 123 return baseui
124 124
@@ -135,7 +135,7 b' def update_hg_ui_from_hgrc(baseui, repo_'
135 135 for section in HG_UI_SECTIONS:
136 136 for k, v in cfg.items(section):
137 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 baseui.setconfig(section, k, v)
138 baseui.setconfig(ascii_bytes(section), ascii_bytes(k), ascii_bytes(v))
139 139
140 140
141 141 def create_hg_wsgi_app(repo_path, repo_name, config):
@@ -225,10 +225,10 b' class GitLFSHandler(object):'
225 225
226 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 227 git_path = settings.GIT_EXECUTABLE
228 update_server_info = config.pop('git_update_server_info')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
228 update_server_info = config.pop(b'git_update_server_info')
229 git_lfs_enabled = config.pop(b'git_lfs_enabled')
230 git_lfs_store_path = config.pop(b'git_lfs_store_path')
231 git_lfs_http_scheme = config.pop(b'git_lfs_http_scheme', 'http')
232 232 app = GitLFSHandler(
233 233 repo_path, repo_name, git_path, update_server_info, config)
234 234
@@ -23,15 +23,15 b' along with git_http_backend.py Project.'
23 23 If not, see <http://www.gnu.org/licenses/>.
24 24 """
25 25 import os
26 import collections
26 27 import logging
27 28 import subprocess
28 from collections import deque
29 from threading import Event, Thread
29 import threading
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 class StreamFeeder(Thread):
34 class StreamFeeder(threading.Thread):
35 35 """
36 36 Normal writing into pipe-like is blocking once the buffer is filled.
37 37 This thread allows a thread to seep data from a file-like into a pipe
@@ -47,17 +47,11 b' class StreamFeeder(Thread):'
47 47 if type(source) in (type(''), bytes, bytearray): # string-like
48 48 self.bytes = bytes(source)
49 49 else: # can be either file pointer or file-like
50 if type(source) in (int, int): # file pointer it is
50 if isinstance(source, int): # file pointer it is
51 51 # converting file descriptor (int) stdin into file-like
52 try:
53 52 source = os.fdopen(source, 'rb', 16384)
54 except Exception:
55 pass
56 53 # let's see if source is file-like by now
57 try:
58 filelike = source.read
59 except Exception:
60 pass
54 filelike = hasattr(source, 'read')
61 55 if not filelike and not self.bytes:
62 56 raise TypeError("StreamFeeder's source object must be a readable "
63 57 "file-like, a file descriptor, or a string-like.")
@@ -65,25 +59,28 b' class StreamFeeder(Thread):'
65 59 self.readiface, self.writeiface = os.pipe()
66 60
67 61 def run(self):
68 t = self.writeiface
62 writer = self.writeiface
69 63 try:
70 64 if self.bytes:
71 os.write(t, self.bytes)
65 os.write(writer, self.bytes)
72 66 else:
73 67 s = self.source
74 b = s.read(4096)
75 while b:
76 os.write(t, b)
77 b = s.read(4096)
68
69 while 1:
70 _bytes = s.read(4096)
71 if not _bytes:
72 break
73 os.write(writer, _bytes)
74
78 75 finally:
79 os.close(t)
76 os.close(writer)
80 77
81 78 @property
82 79 def output(self):
83 80 return self.readiface
84 81
85 82
86 class InputStreamChunker(Thread):
83 class InputStreamChunker(threading.Thread):
87 84 def __init__(self, source, target, buffer_size, chunk_size):
88 85
89 86 super(InputStreamChunker, self).__init__()
@@ -95,16 +92,16 b' class InputStreamChunker(Thread):'
95 92 self.chunk_count_max = int(buffer_size / chunk_size) + 1
96 93 self.chunk_size = chunk_size
97 94
98 self.data_added = Event()
95 self.data_added = threading.Event()
99 96 self.data_added.clear()
100 97
101 self.keep_reading = Event()
98 self.keep_reading = threading.Event()
102 99 self.keep_reading.set()
103 100
104 self.EOF = Event()
101 self.EOF = threading.Event()
105 102 self.EOF.clear()
106 103
107 self.go = Event()
104 self.go = threading.Event()
108 105 self.go.set()
109 106
110 107 def stop(self):
@@ -146,7 +143,7 b' class InputStreamChunker(Thread):'
146 143
147 144 try:
148 145 b = s.read(cs)
149 except ValueError:
146 except ValueError: # probably "I/O operation on closed file"
150 147 b = ''
151 148
152 149 self.EOF.set()
@@ -166,18 +163,20 b' class BufferedGenerator(object):'
166 163 StopIteration after the last chunk of data is yielded.
167 164 """
168 165
169 def __init__(self, source, buffer_size=65536, chunk_size=4096,
166 def __init__(self, name, source, buffer_size=65536, chunk_size=4096,
170 167 starting_values=None, bottomless=False):
171 168 starting_values = starting_values or []
169 self.name = name
170 self.buffer_size = buffer_size
171 self.chunk_size = chunk_size
172 172
173 173 if bottomless:
174 174 maxlen = int(buffer_size / chunk_size)
175 175 else:
176 176 maxlen = None
177 177
178 self.data = deque(starting_values, maxlen)
179 self.worker = InputStreamChunker(source, self.data, buffer_size,
180 chunk_size)
178 self.data_queue = collections.deque(starting_values, maxlen)
179 self.worker = InputStreamChunker(source, self.data_queue, buffer_size, chunk_size)
181 180 if starting_values:
182 181 self.worker.data_added.set()
183 182 self.worker.start()
@@ -185,17 +184,21 b' class BufferedGenerator(object):'
185 184 ####################
186 185 # Generator's methods
187 186 ####################
187 def __str__(self):
188 return f'BufferedGenerator(name={self.name} chunk: {self.chunk_size} on buffer: {self.buffer_size})'
188 189
189 190 def __iter__(self):
190 191 return self
191 192
192 193 def __next__(self):
193 while not len(self.data) and not self.worker.EOF.is_set():
194
195 while not self.length and not self.worker.EOF.is_set():
194 196 self.worker.data_added.clear()
195 197 self.worker.data_added.wait(0.2)
196 if len(self.data):
198
199 if self.length:
197 200 self.worker.keep_reading.set()
198 return bytes(self.data.popleft())
201 return bytes(self.data_queue.popleft())
199 202 elif self.worker.EOF.is_set():
200 203 raise StopIteration
201 204
@@ -249,7 +252,7 b' class BufferedGenerator(object):'
249 252 @property
250 253 def done_reading(self):
251 254 """
252 Done_reding does not mean that the iterator's buffer is empty.
255 Done_reading does not mean that the iterator's buffer is empty.
253 256 Iterator might have done reading from underlying source, but the read
254 257 chunks might still be available for serving through .next() method.
255 258
@@ -262,31 +265,31 b' class BufferedGenerator(object):'
262 265 """
263 266 returns int.
264 267
265 This is the lenght of the que of chunks, not the length of
268 This is the length of the queue of chunks, not the length of
266 269 the combined contents in those chunks.
267 270
268 271 __len__() cannot be meaningfully implemented because this
269 reader is just flying throuh a bottomless pit content and
270 can only know the lenght of what it already saw.
272 reader is just flying through a bottomless pit content and
273 can only know the length of what it already saw.
271 274
272 275 If __len__() on WSGI server per PEP 3333 returns a value,
273 the responce's length will be set to that. In order not to
276 the response's length will be set to that. In order not to
274 277 confuse WSGI PEP3333 servers, we will not implement __len__
275 278 at all.
276 279 """
277 return len(self.data)
280 return len(self.data_queue)
278 281
279 282 def prepend(self, x):
280 self.data.appendleft(x)
283 self.data_queue.appendleft(x)
281 284
282 285 def append(self, x):
283 self.data.append(x)
286 self.data_queue.append(x)
284 287
285 288 def extend(self, o):
286 self.data.extend(o)
289 self.data_queue.extend(o)
287 290
288 291 def __getitem__(self, i):
289 return self.data[i]
292 return self.data_queue[i]
290 293
291 294
292 295 class SubprocessIOChunker(object):
@@ -314,7 +317,7 b' class SubprocessIOChunker(object):'
314 317
315 318 - We are multithreaded. Writing in and reading out, err are all sep threads.
316 319 - We support concurrent (in and out) stream processing.
317 - The output is not a stream. It's a queue of read string (bytes, not unicode)
320 - The output is not a stream. It's a queue of read string (bytes, not str)
318 321 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
319 322 - We are non-blocking in more respects than communicate()
320 323 (reading from subprocess out pauses when internal buffer is full, but
@@ -323,16 +326,16 b' class SubprocessIOChunker(object):'
323 326 does not block the parallel inpipe reading occurring parallel thread.)
324 327
325 328 The purpose of the object is to allow us to wrap subprocess interactions into
326 and interable that can be passed to a WSGI server as the application's return
329 an iterable that can be passed to a WSGI server as the application's return
327 330 value. Because of stream-processing-ability, WSGI does not have to read ALL
328 331 of the subprocess's output and buffer it, before handing it to WSGI server for
329 332 HTTP response. Instead, the class initializer reads just a bit of the stream
330 to figure out if error ocurred or likely to occur and if not, just hands the
333 to figure out if error occurred or likely to occur and if not, just hands the
331 334 further iteration over subprocess output to the server for completion of HTTP
332 335 response.
333 336
334 337 The real or perceived subprocess error is trapped and raised as one of
335 EnvironmentError family of exceptions
338 OSError family of exceptions
336 339
337 340 Example usage:
338 341 # try:
@@ -342,7 +345,7 b' class SubprocessIOChunker(object):'
342 345 # buffer_size = 65536,
343 346 # chunk_size = 4096
344 347 # )
345 # except (EnvironmentError) as e:
348 # except (OSError) as e:
346 349 # print str(e)
347 350 # raise e
348 351 #
@@ -358,15 +361,17 b' class SubprocessIOChunker(object):'
358 361 _close_input_fd = None
359 362
360 363 _closed = False
364 _stdout = None
365 _stderr = None
361 366
362 def __init__(self, cmd, inputstream=None, buffer_size=65536,
367 def __init__(self, cmd, input_stream=None, buffer_size=65536,
363 368 chunk_size=4096, starting_values=None, fail_on_stderr=True,
364 369 fail_on_return_code=True, **kwargs):
365 370 """
366 371 Initializes SubprocessIOChunker
367 372
368 373 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
369 :param inputstream: (Default: None) A file-like, string, or file pointer.
374 :param input_stream: (Default: None) A file-like, string, or file pointer.
370 375 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
371 376 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
372 377 :param starting_values: (Default: []) An array of strings to put in front of output que.
@@ -376,66 +381,81 b' class SubprocessIOChunker(object):'
376 381 exception if the return code is not 0.
377 382 """
378 383
384 kwargs['shell'] = kwargs.get('shell', True)
385
379 386 starting_values = starting_values or []
380 if inputstream:
381 input_streamer = StreamFeeder(inputstream)
387 if input_stream:
388 input_streamer = StreamFeeder(input_stream)
382 389 input_streamer.start()
383 inputstream = input_streamer.output
384 self._close_input_fd = inputstream
390 input_stream = input_streamer.output
391 self._close_input_fd = input_stream
385 392
386 393 self._fail_on_stderr = fail_on_stderr
387 394 self._fail_on_return_code = fail_on_return_code
388
389 _shell = kwargs.get('shell', True)
390 kwargs['shell'] = _shell
395 self.cmd = cmd
391 396
392 _p = subprocess.Popen(cmd, bufsize=-1,
393 stdin=inputstream,
394 stdout=subprocess.PIPE,
395 stderr=subprocess.PIPE,
397 _p = subprocess.Popen(cmd, bufsize=-1, stdin=input_stream, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
396 398 **kwargs)
399 self.process = _p
397 400
398 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
399 starting_values)
400 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
401 bg_out = BufferedGenerator('stdout', _p.stdout, buffer_size, chunk_size, starting_values)
402 bg_err = BufferedGenerator('stderr', _p.stderr, 10240, 1, bottomless=True)
401 403
402 404 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
403 405 # doing this until we reach either end of file, or end of buffer.
404 bg_out.data_added_event.wait(1)
406 bg_out.data_added_event.wait(0.2)
405 407 bg_out.data_added_event.clear()
406 408
407 409 # at this point it's still ambiguous if we are done reading or just full buffer.
408 410 # Either way, if error (returned by ended process, or implied based on
409 411 # presence of stuff in stderr output) we error out.
410 412 # Else, we are happy.
411 _returncode = _p.poll()
413 return_code = _p.poll()
414 ret_code_ok = return_code in [None, 0]
415 ret_code_fail = return_code is not None and return_code != 0
416 if (
417 (ret_code_fail and fail_on_return_code) or
418 (ret_code_ok and fail_on_stderr and bg_err.length)
419 ):
412 420
413 if ((_returncode and fail_on_return_code) or
414 (fail_on_stderr and _returncode is None and bg_err.length)):
415 421 try:
416 422 _p.terminate()
417 423 except Exception:
418 424 pass
425
419 426 bg_out.stop()
427 out = b''.join(bg_out)
428 self._stdout = out
429
420 430 bg_err.stop()
421 if fail_on_stderr:
422 err = ''.join(bg_err)
423 raise EnvironmentError(
424 "Subprocess exited due to an error:\n" + err)
425 if _returncode and fail_on_return_code:
426 err = ''.join(bg_err)
431 err = b''.join(bg_err)
432 self._stderr = err
433
434 # code from https://github.com/schacon/grack/pull/7
435 if err.strip() == b'fatal: The remote end hung up unexpectedly' and out.startswith(b'0034shallow '):
436 bg_out = iter([out])
437 _p = None
438 elif err and fail_on_stderr:
439 text_err = err.decode()
440 raise OSError(
441 "Subprocess exited due to an error:\n{}".format(text_err))
442
443 if ret_code_fail and fail_on_return_code:
444 text_err = err.decode()
427 445 if not err:
428 446 # maybe get empty stderr, try stdout instead
429 447 # in many cases git reports the errors on stdout too
430 err = ''.join(bg_out)
431 raise EnvironmentError(
432 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
433 _returncode, err))
448 text_err = out.decode()
449 raise OSError(
450 "Subprocess exited with non 0 ret code:{}: stderr:{}".format(return_code, text_err))
434 451
435 self.process = _p
436 self.output = bg_out
437 self.error = bg_err
438 self.inputstream = inputstream
452 self.stdout = bg_out
453 self.stderr = bg_err
454 self.inputstream = input_stream
455
456 def __str__(self):
457 proc = getattr(self, 'process', 'NO_PROCESS')
458 return f'SubprocessIOChunker: {proc}'
439 459
440 460 def __iter__(self):
441 461 return self
@@ -449,27 +469,31 b' class SubprocessIOChunker(object):'
449 469 result = None
450 470 stop_iteration = None
451 471 try:
452 result = next(self.output)
472 result = next(self.stdout)
453 473 except StopIteration as e:
454 474 stop_iteration = e
455 475
456 if self.process.poll() and self._fail_on_return_code:
457 err = '%s' % ''.join(self.error)
458 raise EnvironmentError(
459 "Subprocess exited due to an error:\n" + err)
476 if self.process:
477 return_code = self.process.poll()
478 ret_code_fail = return_code is not None and return_code != 0
479 if ret_code_fail and self._fail_on_return_code:
480 self.stop_streams()
481 err = self.get_stderr()
482 raise OSError(
483 "Subprocess exited (exit_code:{}) due to an error during iteration:\n{}".format(return_code, err))
460 484
461 485 if stop_iteration:
462 486 raise stop_iteration
463 487 return result
464 488
465 def throw(self, type, value=None, traceback=None):
466 if self.output.length or not self.output.done_reading:
467 raise type(value)
489 def throw(self, exc_type, value=None, traceback=None):
490 if self.stdout.length or not self.stdout.done_reading:
491 raise exc_type(value)
468 492
469 493 def close(self):
470 494 if self._closed:
471 495 return
472 self._closed = True
496
473 497 try:
474 498 self.process.terminate()
475 499 except Exception:
@@ -477,11 +501,11 b' class SubprocessIOChunker(object):'
477 501 if self._close_input_fd:
478 502 os.close(self._close_input_fd)
479 503 try:
480 self.output.close()
504 self.stdout.close()
481 505 except Exception:
482 506 pass
483 507 try:
484 self.error.close()
508 self.stderr.close()
485 509 except Exception:
486 510 pass
487 511 try:
@@ -489,6 +513,24 b' class SubprocessIOChunker(object):'
489 513 except Exception:
490 514 pass
491 515
516 self._closed = True
517
518 def stop_streams(self):
519 getattr(self.stdout, 'stop', lambda: None)()
520 getattr(self.stderr, 'stop', lambda: None)()
521
522 def get_stdout(self):
523 if self._stdout:
524 return self._stdout
525 else:
526 return b''.join(self.stdout)
527
528 def get_stderr(self):
529 if self._stderr:
530 return self._stderr
531 else:
532 return b''.join(self.stderr)
533
492 534
493 535 def run_command(arguments, env=None):
494 536 """
@@ -506,8 +548,8 b' def run_command(arguments, env=None):'
506 548 if env:
507 549 _opts.update({'env': env})
508 550 proc = SubprocessIOChunker(cmd, **_opts)
509 return ''.join(proc), ''.join(proc.error)
510 except (EnvironmentError, OSError) as err:
551 return b''.join(proc), b''.join(proc.stderr)
552 except OSError as err:
511 553 cmd = ' '.join(cmd) # human friendly CMD
512 554 tb_err = ("Couldn't run subprocessio command (%s).\n"
513 555 "Original error was:%s\n" % (cmd, err))
@@ -48,7 +48,7 b' def test_discover_git_version(git_remote'
48 48
49 49
50 50 class TestGitFetch(object):
51 def setup(self):
51 def setup_method(self):
52 52 self.mock_repo = Mock()
53 53 factory = Mock()
54 54 factory.repo = Mock(return_value=self.mock_repo)
@@ -92,7 +92,7 b' class TestGitFetch(object):'
92 92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 93 }
94 94
95 with patch('vcsserver.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
96 96 mock_repo().get_refs.return_value = sample_refs
97 97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 98 mock_repo().get_refs.assert_called_once_with()
@@ -137,10 +137,13 b' class TestReraiseSafeExceptions(object):'
137 137 class TestDulwichRepoWrapper(object):
138 138 def test_calls_close_on_delete(self):
139 139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 141 with isdir_patcher:
141 142 repo = git.Repo('/tmp/abcde')
142 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 del repo
143 assert repo is not None
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
144 147 close_mock.assert_called_once_with()
145 148
146 149
@@ -33,8 +33,8 b' class TestDiff(object):'
33 33 factory = Mock()
34 34 hg_remote = hg.HgRemote(factory)
35 35 with patch('mercurial.patch.diff') as diff_mock:
36 diff_mock.side_effect = LookupError(
37 'deadbeef', 'index', 'message')
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
37
38 38 with pytest.raises(Exception) as exc_info:
39 39 hg_remote.diff(
40 40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
@@ -55,10 +55,10 b' class TestReraiseSafeExceptions(object):'
55 55 assert method.__func__.__code__ == decorator.__code__
56 56
57 57 @pytest.mark.parametrize('side_effect, expected_type', [
58 (hgcompat.Abort(), 'abort'),
59 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.Abort('failed-abort'), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
60 60 (hgcompat.RepoLookupError(), 'lookup'),
61 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
62 62 (hgcompat.RepoError(), 'error'),
63 63 (hgcompat.RequirementError(), 'requirement'),
64 64 ])
@@ -76,10 +76,9 b' class TestReraiseSafeExceptions(object):'
76 76 @hg.reraise_safe_exceptions
77 77 def fake_method():
78 78 try:
79 raise hgcompat.Abort()
79 raise hgcompat.Abort('test-abort')
80 80 except:
81 self.original_traceback = traceback.format_tb(
82 sys.exc_info()[2])
81 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
83 82 raise
84 83
85 84 try:
@@ -21,7 +21,7 b' import pytest'
21 21 from vcsserver import hgcompat, hgpatches
22 22
23 23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = b'largefiles=serve'
25 25
26 26
27 27 def test_patch_largefiles_capabilities_applies_patch(
@@ -72,11 +72,6 b' def test_dynamic_capabilities_uses_large'
72 72 assert LARGEFILES_CAPABILITY in caps
73 73
74 74
75 def test_hgsubversion_import():
76 from hgsubversion import svnrepo
77 assert svnrepo
78
79
80 75 @pytest.fixture
81 76 def patched_capabilities(request):
82 77 """
@@ -15,17 +15,17 b''
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import contextlib
19 import io
20 18 import threading
19 import msgpack
20
21 21 from http.server import BaseHTTPRequestHandler
22 22 from socketserver import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 import simplejson as json
28 27
28 from vcsserver.lib.rc_json import json
29 29 from vcsserver import hooks
30 30
31 31
@@ -44,7 +44,7 b' def get_hg_ui(extras=None):'
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
@@ -67,6 +67,7 b' def test_git_post_receive_is_disabled():'
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70
70 71 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 72 hooks.git_post_receive(
72 73 None, '', {'RC_SCM_DATA': json.dumps(extras)})
@@ -81,6 +82,7 b' def test_git_post_receive_calls_repo_siz'
81 82
82 83 def test_git_post_receive_does_not_call_disabled_repo_size():
83 84 extras = {'hooks': ['push']}
85
84 86 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 87 hooks.git_post_receive(
86 88 None, '', {'RC_SCM_DATA': json.dumps(extras)})
@@ -149,18 +151,19 b' class TestHooksHttpClient(object):'
149 151 client = hooks.HooksHttpClient(uri)
150 152 assert client.hooks_uri == uri
151 153
152 def test_serialize_returns_json_string(self):
154 def test_serialize_returns_serialized_string(self):
153 155 client = hooks.HooksHttpClient('localhost:3000')
154 156 hook_name = 'test'
155 157 extras = {
156 158 'first': 1,
157 159 'second': 'two'
158 160 }
159 result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
161 hooks_proto, result = client._serialize(hook_name, extras)
162 expected_result = msgpack.packb({
161 163 'method': hook_name,
162 'extras': extras
164 'extras': extras,
163 165 })
166 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1'}
164 167 assert result == expected_result
165 168
166 169 def test_call_queries_http_server(self, http_mirror):
@@ -171,10 +174,10 b' class TestHooksHttpClient(object):'
171 174 'second': 'two'
172 175 }
173 176 result = client(hook_name, extras)
174 expected_result = {
177 expected_result = msgpack.unpackb(msgpack.packb({
175 178 'method': hook_name,
176 179 'extras': extras
177 }
180 }), raw=False)
178 181 assert result == expected_result
179 182
180 183
@@ -211,9 +214,10 b' def http_mirror(request):'
211 214
212 215
213 216 class MirrorHttpHandler(BaseHTTPRequestHandler):
217
214 218 def do_POST(self):
215 219 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
220 body = self.rfile.read(length)
217 221 self.send_response(200)
218 222 self.end_headers()
219 223 self.wfile.write(body)
@@ -30,13 +30,13 b' def data():'
30 30
31 31 def test_http_app_streaming_with_data(data, repeat, vcs_app):
32 32 app = vcs_app
33 for x in range(repeat / 10):
33 for x in range(repeat // 10):
34 34 response = app.post('/stream/git/', params=data)
35 35 assert response.status_code == 200
36 36
37 37
38 38 def test_http_app_streaming_no_data(repeat, vcs_app):
39 39 app = vcs_app
40 for x in range(repeat / 10):
40 for x in range(repeat // 10):
41 41 response = app.post('/stream/git/')
42 42 assert response.status_code == 200
@@ -23,7 +23,7 b' import vcsserver'
23 23 import tempfile
24 24 from vcsserver import hook_utils
25 25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.utils import AttributeDict
26 from vcsserver.utils import AttributeDict, safe_bytes, safe_str
27 27
28 28
29 29 class TestCheckRhodecodeHook(object):
@@ -31,7 +31,7 b' class TestCheckRhodecodeHook(object):'
31 31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 33 with open(hook, 'wb') as f:
34 f.write('dummy test')
34 f.write(b'dummy test')
35 35 result = hook_utils.check_rhodecode_hook(hook)
36 36 assert result is False
37 37
@@ -47,7 +47,7 b' class TestCheckRhodecodeHook(object):'
47 47 def test_signatures(self, file_content, expected_result, tmpdir):
48 48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 49 with open(hook, 'wb') as f:
50 f.write(file_content)
50 f.write(safe_bytes(file_content))
51 51
52 52 result = hook_utils.check_rhodecode_hook(hook)
53 53
@@ -71,8 +71,7 b' class BaseInstallHooks(object):'
71 71 content = hook_file.read()
72 72
73 73 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 vcsserver.__version__)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(safe_str(vcsserver.__version__))
76 75 assert content.strip().startswith(expected_env)
77 76 assert expected_rc_version in content
78 77
@@ -42,8 +42,7 b' def test_applies_largefiles_patch_only_i'
42 42 ('bad', 'bad'),
43 43 ('query&foo=bar', 'query&foo=bar'),
44 44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret', 'a;b;c;query&foo=bar&auth_token=*****'),
47 46 ('', ''),
48 47 (None, None),
49 48 ('foo=bar', 'foo=bar'),
@@ -16,6 +16,7 b''
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 import more_itertools
19 20
20 21 import dulwich.protocol
21 22 import mock
@@ -26,6 +27,7 b' import webtest'
26 27 from vcsserver import hooks, pygrack
27 28
28 29 # pylint: disable=redefined-outer-name,protected-access
30 from vcsserver.utils import ascii_bytes
29 31
30 32
31 33 @pytest.fixture()
@@ -75,8 +77,7 b' def test_pre_pull_hook_fails_with_sideba'
75 77 '0000',
76 78 '0009done\n',
77 79 ])
78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 return_value=hooks.HookResponse(1, 'foo')):
80 with mock.patch('vcsserver.hooks.git_pre_pull', return_value=hooks.HookResponse(1, 'foo')):
80 81 response = pygrack_app.post(
81 82 '/git-upload-pack', params=request,
82 83 content_type='application/x-git-upload-pack')
@@ -86,8 +87,8 b' def test_pre_pull_hook_fails_with_sideba'
86 87 packets = list(proto.read_pkt_seq())
87 88
88 89 expected_packets = [
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
90 b'NAK\n', b'\x02foo', b'\x02Pre pull hook failed: aborting\n',
91 b'\x01' + pygrack.GitRepository.EMPTY_PACK,
91 92 ]
92 93 assert packets == expected_packets
93 94
@@ -120,7 +121,7 b' def test_pull_has_hook_messages(pygrack_'
120 121 with mock.patch('vcsserver.hooks.git_post_pull',
121 122 return_value=hooks.HookResponse(1, 'bar')):
122 123 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 return_value=['0008NAK\n0009subp\n0000']):
124 return_value=more_itertools.always_iterable([b'0008NAK\n0009subp\n0000'])):
124 125 response = pygrack_app.post(
125 126 '/git-upload-pack', params=request,
126 127 content_type='application/x-git-upload-pack')
@@ -129,13 +130,13 b' def test_pull_has_hook_messages(pygrack_'
129 130 proto = dulwich.protocol.Protocol(data.read, None)
130 131 packets = list(proto.read_pkt_seq())
131 132
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133 assert packets == [b'NAK\n', b'\x02foo', b'subp\n', b'\x02bar']
133 134
134 135
135 136 def test_get_want_capabilities(pygrack_instance):
136 137 data = io.BytesIO(
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
138 b'0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
139 b'multi_ack side-band-64k ofs-delta\n00000009done\n')
139 140
140 141 request = webob.Request({
141 142 'wsgi.input': data,
@@ -146,20 +147,20 b' def test_get_want_capabilities(pygrack_i'
146 147 capabilities = pygrack_instance._get_want_capabilities(request)
147 148
148 149 assert capabilities == frozenset(
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 (b'ofs-delta', b'multi_ack', b'side-band-64k'))
150 151 assert data.tell() == 0
151 152
152 153
153 154 @pytest.mark.parametrize('data,capabilities,expected', [
154 155 ('foo', [], []),
155 ('', ['side-band-64k'], []),
156 ('', ['side-band'], []),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
156 ('', [pygrack.CAPABILITY_SIDE_BAND_64K], []),
157 ('', [pygrack.CAPABILITY_SIDE_BAND], []),
158 ('foo', [pygrack.CAPABILITY_SIDE_BAND_64K], [b'0008\x02foo']),
159 ('foo', [pygrack.CAPABILITY_SIDE_BAND], [b'0008\x02foo']),
160 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'03ed\x02' + b'f' * 1000]),
161 ('f'*1000, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995, b'000a\x02fffff']),
162 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND_64K], [b'fff0\x02' + b'f' * 65515, b'000a\x02fffff']),
163 ('f'*65520, [pygrack.CAPABILITY_SIDE_BAND], [b'03e8\x02' + b'f' * 995] * 65 + [b'0352\x02' + b'f' * 845]),
163 164 ], ids=[
164 165 'foo-empty',
165 166 'empty-64k', 'empty',
@@ -174,54 +175,59 b' def test_get_messages(pygrack_instance, '
174 175
175 176 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 177 # Unexpected response
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 ([b'unexpected_response[no_initial_header]'], [pygrack.CAPABILITY_SIDE_BAND_64K], 'foo', 'bar'),
178 179 # No sideband
179 ('no-sideband', [], 'foo', 'bar'),
180 ([b'no-sideband'], [], 'foo', 'bar'),
180 181 # No messages
181 ('no-messages', ['side-band-64k'], '', ''),
182 ([b'no-messages'], [pygrack.CAPABILITY_SIDE_BAND_64K], '', ''),
182 183 ])
183 184 def test_inject_messages_to_response_nothing_to_do(
184 pygrack_instance, response, capabilities, pre_pull_messages,
185 post_pull_messages):
186 new_response = pygrack_instance._inject_messages_to_response(
187 response, capabilities, pre_pull_messages, post_pull_messages)
185 pygrack_instance, response, capabilities, pre_pull_messages, post_pull_messages):
188 186
189 assert new_response == response
187 new_response = pygrack_instance._build_post_pull_response(
188 more_itertools.always_iterable(response), capabilities, pre_pull_messages, post_pull_messages)
189
190 assert list(new_response) == response
190 191
191 192
192 193 @pytest.mark.parametrize('capabilities', [
193 ['side-band'],
194 ['side-band-64k'],
194 [pygrack.CAPABILITY_SIDE_BAND],
195 [pygrack.CAPABILITY_SIDE_BAND_64K],
195 196 ])
196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 capabilities):
198 response = ['0008NAK\n0009subp\n0000']
199 new_response = pygrack_instance._inject_messages_to_response(
200 response, capabilities, 'foo', 'bar')
197 def test_inject_messages_to_response_single_element(pygrack_instance, capabilities):
198 response = [b'0008NAK\n0009subp\n0000']
199 new_response = pygrack_instance._build_post_pull_response(
200 more_itertools.always_iterable(response), capabilities, 'foo', 'bar')
201 201
202 expected_response = [
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
202 expected_response = b''.join([
203 b'0008NAK\n',
204 b'0008\x02foo',
205 b'0009subp\n',
206 b'0008\x02bar',
207 b'0000'])
204 208
205 assert new_response == expected_response
209 assert b''.join(new_response) == expected_response
206 210
207 211
208 212 @pytest.mark.parametrize('capabilities', [
209 ['side-band'],
210 ['side-band-64k'],
213 [pygrack.CAPABILITY_SIDE_BAND],
214 [pygrack.CAPABILITY_SIDE_BAND_64K],
215 ])
216 def test_inject_messages_to_response_multi_element(pygrack_instance, capabilities):
217 response = more_itertools.always_iterable([
218 b'0008NAK\n000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n0000'
211 219 ])
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 capabilities):
214 response = [
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 new_response = pygrack_instance._inject_messages_to_response(
217 response, capabilities, 'foo', 'bar')
220 new_response = pygrack_instance._build_post_pull_response(response, capabilities, 'foo', 'bar')
218 221
219 expected_response = [
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 '000asubp4\n', '0008\x02bar', '0000'
222 ]
222 expected_response = b''.join([
223 b'0008NAK\n',
224 b'0008\x02foo',
225 b'000asubp1\n', b'000asubp2\n', b'000asubp3\n', b'000asubp4\n',
226 b'0008\x02bar',
227 b'0000'
228 ])
223 229
224 assert new_response == expected_response
230 assert b''.join(new_response) == expected_response
225 231
226 232
227 233 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
@@ -231,19 +237,52 b' def test_build_failed_pre_pull_response_'
231 237
232 238
233 239 @pytest.mark.parametrize('capabilities', [
234 ['side-band'],
235 ['side-band-64k'],
236 ['side-band-64k', 'no-progress'],
240 [pygrack.CAPABILITY_SIDE_BAND],
241 [pygrack.CAPABILITY_SIDE_BAND_64K],
242 [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'],
237 243 ])
238 244 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 response = pygrack_instance._build_failed_pre_pull_response(
240 capabilities, 'foo')
245 response = pygrack_instance._build_failed_pre_pull_response(capabilities, 'foo')
241 246
242 247 expected_response = [
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 pygrack.GitRepository.EMPTY_PACK),
246 '0000',
248 b'0008NAK\n', b'0008\x02foo', b'0024\x02Pre pull hook failed: aborting\n',
249 b'%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5, pygrack.GitRepository.EMPTY_PACK),
250 pygrack.GitRepository.FLUSH_PACKET,
247 251 ]
248 252
249 253 assert response == expected_response
254
255
256 def test_inject_messages_to_response_generator(pygrack_instance):
257
258 def response_generator():
259 response = [
260 # protocol start
261 b'0008NAK\n',
262 ]
263 response += [ascii_bytes(f'000asubp{x}\n') for x in range(1000)]
264 response += [
265 # protocol end
266 pygrack.GitRepository.FLUSH_PACKET
267 ]
268 for elem in response:
269 yield elem
270
271 new_response = pygrack_instance._build_post_pull_response(
272 response_generator(), [pygrack.CAPABILITY_SIDE_BAND_64K, b'no-progress'], 'PRE_PULL_MSG\n', 'POST_PULL_MSG\n')
273
274 assert iter(new_response)
275
276 expected_response = b''.join([
277 # start
278 b'0008NAK\n0012\x02PRE_PULL_MSG\n',
279 ] + [
280 # ... rest
281 ascii_bytes(f'000asubp{x}\n') for x in range(1000)
282 ] + [
283 # final message,
284 b'0013\x02POST_PULL_MSG\n0000',
285
286 ])
287
288 assert b''.join(new_response) == expected_response
@@ -25,10 +25,11 b' import pytest'
25 25 import webtest
26 26
27 27 from vcsserver import scm_app
28 from vcsserver.utils import ascii_bytes
28 29
29 30
30 31 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
32 33 app = webtest.TestApp(scm_app.HgWeb(repo))
33 34
34 35 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
@@ -37,7 +38,7 b' def test_hg_does_not_accept_invalid_cmd('
37 38
38 39
39 40 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 repo = mercurial.hg.repository(mercurial.ui.ui(), ascii_bytes(str(tmpdir)), create=True)
41 42 config = (
42 43 ('paths', 'default', ''),
43 44 )
@@ -22,12 +22,13 b' import sys'
22 22 import pytest
23 23
24 24 from vcsserver import subprocessio
25 from vcsserver.utils import ascii_bytes
25 26
26 27
27 class KindaFilelike(object): # pragma: no cover
28 class FileLikeObj(object): # pragma: no cover
28 29
29 def __init__(self, data, size):
30 chunks = size / len(data)
30 def __init__(self, data: bytes, size):
31 chunks = size // len(data)
31 32
32 33 self.stream = self._get_stream(data, chunks)
33 34
@@ -37,7 +38,7 b' class KindaFilelike(object): # pragma: '
37 38
38 39 def read(self, n):
39 40
40 buffer_stream = ''
41 buffer_stream = b''
41 42 for chunk in self.stream:
42 43 buffer_stream += chunk
43 44 if len(buffer_stream) >= n:
@@ -63,93 +64,92 b' def _get_python_args(script):'
63 64
64 65
65 66 def test_raise_exception_on_non_zero_return_code(environ):
66 args = _get_python_args('sys.exit(1)')
67 with pytest.raises(EnvironmentError):
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
67 call_args = _get_python_args('raise ValueError("fail")')
68 with pytest.raises(OSError):
69 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
69 70
70 71
71 72 def test_does_not_fail_on_non_zero_return_code(environ):
72 args = _get_python_args('sys.exit(1)')
73 output = ''.join(
74 subprocessio.SubprocessIOChunker(
75 args, shell=False, fail_on_return_code=False, env=environ
76 )
77 )
73 call_args = _get_python_args('sys.stdout.write("hello"); sys.exit(1)')
74 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
75 output = b''.join(proc)
78 76
79 assert output == ''
77 assert output == b'hello'
80 78
81 79
82 80 def test_raise_exception_on_stderr(environ):
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
84 with pytest.raises(EnvironmentError) as excinfo:
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
81 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); time.sleep(1);')
86 82
87 assert 'exited due to an error:\nX' in str(excinfo.value)
83 with pytest.raises(OSError) as excinfo:
84 b''.join(subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ))
85
86 assert 'exited due to an error:\nWRITE_TO_STDERR' in str(excinfo.value)
88 87
89 88
90 89 def test_does_not_fail_on_stderr(environ):
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
92 output = ''.join(
93 subprocessio.SubprocessIOChunker(
94 args, shell=False, fail_on_stderr=False, env=environ
95 )
96 )
90 call_args = _get_python_args('sys.stderr.write("WRITE_TO_STDERR"); sys.stderr.flush; time.sleep(2);')
91 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_stderr=False, env=environ)
92 output = b''.join(proc)
97 93
98 assert output == ''
94 assert output == b''
99 95
100 96
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
97 @pytest.mark.parametrize('size', [
98 1,
99 10 ** 5
100 ])
102 101 def test_output_with_no_input(size, environ):
103 print((type(environ)))
104 data = 'X'
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
102 call_args = _get_python_args(f'sys.stdout.write("X" * {size});')
103 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, env=environ)
104 output = b''.join(proc)
107 105
108 assert output == data * size
106 assert output == ascii_bytes("X" * size)
109 107
110 108
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
109 @pytest.mark.parametrize('size', [
110 1,
111 10 ** 5
112 ])
112 113 def test_output_with_no_input_does_not_fail(size, environ):
113 data = 'X'
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 output = ''.join(
116 subprocessio.SubprocessIOChunker(
117 args, shell=False, fail_on_return_code=False, env=environ
118 )
119 )
120 114
121 print(("{} {}".format(len(data * size), len(output))))
122 assert output == data * size
115 call_args = _get_python_args(f'sys.stdout.write("X" * {size}); sys.exit(1)')
116 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, fail_on_return_code=False, env=environ)
117 output = b''.join(proc)
118
119 assert output == ascii_bytes("X" * size)
123 120
124 121
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
122 @pytest.mark.parametrize('size', [
123 1,
124 10 ** 5
125 ])
126 126 def test_output_with_input(size, environ):
127 127 data_len = size
128 inputstream = KindaFilelike('X', size)
128 inputstream = FileLikeObj(b'X', size)
129 129
130 130 # This acts like the cat command.
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 output = ''.join(
133 subprocessio.SubprocessIOChunker(
134 args, shell=False, inputstream=inputstream, env=environ
135 )
131 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 # note: in this tests we explicitly don't assign chunker to a variable and let it stream directly
133 output = b''.join(
134 subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
136 135 )
137 136
138 137 assert len(output) == data_len
139 138
140 139
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
140 @pytest.mark.parametrize('size', [
141 1,
142 10 ** 5
143 ])
142 144 def test_output_with_input_skipping_iterator(size, environ):
143 145 data_len = size
144 inputstream = KindaFilelike('X', size)
146 inputstream = FileLikeObj(b'X', size)
145 147
146 148 # This acts like the cat command.
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
149 call_args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148 150
149 151 # Note: assigning the chunker makes sure that it is not deleted too early
150 chunker = subprocessio.SubprocessIOChunker(
151 args, shell=False, inputstream=inputstream, env=environ
152 )
153 output = ''.join(chunker.output)
152 proc = subprocessio.SubprocessIOChunker(call_args, shell=False, input_stream=inputstream, env=environ)
153 output = b''.join(proc.stdout)
154 154
155 155 assert len(output) == data_len
@@ -20,10 +20,12 b' import mock'
20 20 import pytest
21 21 import sys
22 22
23 from vcsserver.utils import ascii_bytes
24
23 25
24 26 class MockPopen(object):
25 27 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
28 self.stdout = io.BytesIO(b'')
27 29 self.stderr = io.BytesIO(stderr)
28 30 self.returncode = 1
29 31
@@ -52,14 +54,13 b' def test_import_remote_repository_certif'
52 54 remote.is_path_valid_repository = lambda wire, path: True
53 55
54 56 with mock.patch('subprocess.Popen',
55 return_value=MockPopen(stderr)):
57 return_value=MockPopen(ascii_bytes(stderr))):
56 58 with pytest.raises(Exception) as excinfo:
57 59 remote.import_remote_repository({'path': 'path'}, 'url')
58 60
59 expected_error_args = (
60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
61 62
62 assert excinfo.value.args == expected_error_args
63 assert excinfo.value.args[0] == expected_error_args
63 64
64 65
65 66 def test_svn_libraries_can_be_imported():
@@ -84,3 +85,19 b' def test_username_password_extraction_fr'
84 85 remote.is_path_valid_repository = lambda wire, path: True
85 86
86 87 assert remote.get_url_and_credentials(example_url) == parts
88
89
90 @pytest.mark.parametrize('call_url', [
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 ])
95 def test_check_url(call_url):
96 from vcsserver.remote import svn
97 factory = mock.Mock()
98 factory.repo = mock.Mock(return_value=mock.Mock())
99
100 remote = svn.SvnRemote(factory)
101 remote.is_path_valid_repository = lambda wire, path: True
102 assert remote.check_url(call_url)
103
@@ -19,25 +19,26 b' import wsgiref.simple_server'
19 19 import wsgiref.validate
20 20
21 21 from vcsserver import wsgi_app_caller
22
23
24 # pylint: disable=protected-access,too-many-public-methods
22 from vcsserver.utils import ascii_bytes, safe_str
25 23
26 24
27 25 @wsgiref.validate.validator
28 26 def demo_app(environ, start_response):
29 27 """WSGI app used for testing."""
28
29 input_data = safe_str(environ['wsgi.input'].read(1024))
30
30 31 data = [
31 'Hello World!\n',
32 'input_data=%s\n' % environ['wsgi.input'].read(),
32 f'Hello World!\n',
33 f'input_data={input_data}\n',
33 34 ]
34 35 for key, value in sorted(environ.items()):
35 data.append('%s=%s\n' % (key, value))
36 data.append(f'{key}={value}\n')
36 37
37 38 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write('Old school write method\n')
39 write('***********************\n')
40 return data
39 write(b'Old school write method\n')
40 write(b'***********************\n')
41 return list(map(ascii_bytes, data))
41 42
42 43
43 44 BASE_ENVIRON = {
@@ -53,11 +54,11 b' BASE_ENVIRON = {'
53 54
54 55 def test_complete_environ():
55 56 environ = dict(BASE_ENVIRON)
56 data = "data"
57 data = b"data"
57 58 wsgi_app_caller._complete_environ(environ, data)
58 59 wsgiref.validate.check_environ(environ)
59 60
60 assert data == environ['wsgi.input'].read()
61 assert data == environ['wsgi.input'].read(1024)
61 62
62 63
63 64 def test_start_response():
@@ -81,16 +82,17 b' def test_start_response_with_error():'
81 82
82 83
83 84 def test_wsgi_app_caller():
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 85 environ = dict(BASE_ENVIRON)
86 86 input_data = 'some text'
87
88 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
87 89 responses, status, headers = caller.handle(environ, input_data)
88 response = ''.join(responses)
90 response = b''.join(responses)
89 91
90 92 assert status == '200 OK'
91 93 assert headers == [('Content-Type', 'text/plain')]
92 assert response.startswith(
93 'Old school write method\n***********************\n')
94 assert 'Hello World!\n' in response
95 assert 'foo.var=bla\n' in response
96 assert 'input_data=%s\n' % input_data in response
94 assert response.startswith(b'Old school write method\n***********************\n')
95 assert b'Hello World!\n' in response
96 assert b'foo.var=bla\n' in response
97
98 assert ascii_bytes(f'input_data={input_data}\n') in response
@@ -19,8 +19,7 b' import time'
19 19 import logging
20 20
21 21 import vcsserver
22 from vcsserver.utils import safe_str
23
22 from vcsserver.utils import safe_str, ascii_str
24 23
25 24 log = logging.getLogger(__name__)
26 25
@@ -62,7 +61,7 b' class RequestWrapperTween(object):'
62 61 response = self.handler(request)
63 62 finally:
64 63 count = request.request_count()
65 _ver_ = vcsserver.__version__
64 _ver_ = ascii_str(vcsserver.__version__)
66 65 _path = safe_str(get_access_path(request.environ))
67 66 ip = '127.0.0.1'
68 67 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
@@ -70,7 +69,7 b' class RequestWrapperTween(object):'
70 69
71 70 total = time.time() - start
72 71
73 _view_path = "{}/{}@{}".format(_path, vcs_method, repo_name)
72 _view_path = f"{repo_name}@{_path}/{vcs_method}"
74 73 log.info(
75 74 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
76 75 count, ip, request.environ.get('REQUEST_METHOD'),
@@ -91,7 +91,39 b' def safe_bytes(str_, from_encoding=None)'
91 91 except UnicodeDecodeError:
92 92 pass
93 93
94 return unicode(str_, from_encoding[0], 'replace')
94 return str_.encode(from_encoding[0], 'replace')
95
96
97 def ascii_bytes(str_, allow_bytes=False) -> bytes:
98 """
99 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
100 Fails with UnicodeError on invalid input.
101 This should be used where encoding and "safe" ambiguity should be avoided.
102 Where strings already have been encoded in other ways but still are unicode
103 string - for example to hex, base64, json, urlencoding, or are known to be
104 identifiers.
105 """
106 if allow_bytes and isinstance(str_, bytes):
107 return str_
108
109 if not isinstance(str_, str):
110 raise ValueError('ascii_bytes cannot convert other types than str: got: {}'.format(type(str_)))
111 return str_.encode('ascii')
112
113
114 def ascii_str(str_):
115 """
116 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
117 Fails with UnicodeError on invalid input.
118 This should be used where encoding and "safe" ambiguity should be avoided.
119 Where strings are encoded but also in other ways are known to be ASCII, and
120 where a unicode string is wanted without caring about encoding. For example
121 to hex, base64, urlencoding, or are known to be identifiers.
122 """
123
124 if not isinstance(str_, bytes):
125 raise ValueError('ascii_str cannot convert other types than bytes: got: {}'.format(type(str_)))
126 return str_.decode('ascii')
95 127
96 128
97 129 class AttributeDict(dict):
@@ -103,5 +135,3 b' class AttributeDict(dict):'
103 135
104 136 def sha1(val):
105 137 return hashlib.sha1(val).hexdigest()
106
107
@@ -17,6 +17,7 b''
17 17
18 18 from vcsserver.lib import rc_cache
19 19
20
20 21 class RemoteBase(object):
21 22 EMPTY_COMMIT = '0' * 40
22 23
@@ -23,19 +23,20 b' import io'
23 23 import logging
24 24 import os
25 25
26 from vcsserver.utils import ascii_bytes
26 27
27 28 log = logging.getLogger(__name__)
28 29
29 30 DEV_NULL = open(os.devnull)
30 31
31 32
32 def _complete_environ(environ, input_data):
33 def _complete_environ(environ, input_data: bytes):
33 34 """Update the missing wsgi.* variables of a WSGI environment.
34 35
35 36 :param environ: WSGI environment to update
36 37 :type environ: dict
37 38 :param input_data: data to be read by the app
38 :type input_data: str
39 :type input_data: bytes
39 40 """
40 41 environ.update({
41 42 'wsgi.version': (1, 0),
@@ -92,20 +93,19 b' class WSGIAppCaller(object):'
92 93 :param environ: WSGI environment to update
93 94 :type environ: dict
94 95 :param input_data: data to be read by the app
95 :type input_data: str
96 :type input_data: str/bytes
96 97
97 98 :returns: a tuple with the contents, status and headers
98 99 :rtype: (list<str>, str, list<(str, str)>)
99 100 """
100 _complete_environ(environ, input_data)
101 _complete_environ(environ, ascii_bytes(input_data, allow_bytes=True))
101 102 start_response = _StartResponse()
102 103 log.debug("Calling wrapped WSGI application")
103 104 responses = self.app(environ, start_response)
104 105 responses_list = list(responses)
105 106 existing_responses = start_response.content
106 107 if existing_responses:
107 log.debug(
108 "Adding returned response to response written via write()")
108 log.debug("Adding returned response to response written via write()")
109 109 existing_responses.extend(responses_list)
110 110 responses_list = existing_responses
111 111 if hasattr(responses, 'close'):
General Comments 0
You need to be logged in to leave comments. Login now