##// END OF EJS Templates
chore(refactor): renamed rc_json to ext_json for ce compat
super-admin -
r1243:d32b737d default
parent child Browse files
Show More
@@ -1,296 +1,296 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20
20
21 from pyramid.config import Configurator
21 from pyramid.config import Configurator
22 from pyramid.response import Response, FileIter
22 from pyramid.response import Response, FileIter
23 from pyramid.httpexceptions import (
23 from pyramid.httpexceptions import (
24 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
24 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
25 HTTPUnprocessableEntity)
25 HTTPUnprocessableEntity)
26
26
27 from vcsserver.lib.rc_json import json
27 from vcsserver.lib.ext_json import json
28 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
28 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
29 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.str_utils import safe_int
30 from vcsserver.str_utils import safe_int
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
35 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
36 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
36 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37
37
38
38
39 def write_response_error(http_exception, text=None):
39 def write_response_error(http_exception, text=None):
40 content_type = GIT_LFS_CONTENT_TYPE + '+json'
40 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 _exception = http_exception(content_type=content_type)
41 _exception = http_exception(content_type=content_type)
42 _exception.content_type = content_type
42 _exception.content_type = content_type
43 if text:
43 if text:
44 _exception.body = json.dumps({'message': text})
44 _exception.body = json.dumps({'message': text})
45 log.debug('LFS: writing response of type %s to client with text:%s',
45 log.debug('LFS: writing response of type %s to client with text:%s',
46 http_exception, text)
46 http_exception, text)
47 return _exception
47 return _exception
48
48
49
49
50 class AuthHeaderRequired:
50 class AuthHeaderRequired:
51 """
51 """
52 Decorator to check if request has proper auth-header
52 Decorator to check if request has proper auth-header
53 """
53 """
54
54
55 def __call__(self, func):
55 def __call__(self, func):
56 return get_cython_compat_decorator(self.__wrapper, func)
56 return get_cython_compat_decorator(self.__wrapper, func)
57
57
58 def __wrapper(self, func, *fargs, **fkwargs):
58 def __wrapper(self, func, *fargs, **fkwargs):
59 request = fargs[1]
59 request = fargs[1]
60 auth = request.authorization
60 auth = request.authorization
61 if not auth:
61 if not auth:
62 return write_response_error(HTTPForbidden)
62 return write_response_error(HTTPForbidden)
63 return func(*fargs[1:], **fkwargs)
63 return func(*fargs[1:], **fkwargs)
64
64
65
65
66 # views
66 # views
67
67
68 def lfs_objects(request):
68 def lfs_objects(request):
69 # indicate not supported, V1 API
69 # indicate not supported, V1 API
70 log.warning('LFS: v1 api not supported, reporting it back to client')
70 log.warning('LFS: v1 api not supported, reporting it back to client')
71 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
71 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
72
72
73
73
74 @AuthHeaderRequired()
74 @AuthHeaderRequired()
75 def lfs_objects_batch(request):
75 def lfs_objects_batch(request):
76 """
76 """
77 The client sends the following information to the Batch endpoint to transfer some objects:
77 The client sends the following information to the Batch endpoint to transfer some objects:
78
78
79 operation - Should be download or upload.
79 operation - Should be download or upload.
80 transfers - An optional Array of String identifiers for transfer
80 transfers - An optional Array of String identifiers for transfer
81 adapters that the client has configured. If omitted, the basic
81 adapters that the client has configured. If omitted, the basic
82 transfer adapter MUST be assumed by the server.
82 transfer adapter MUST be assumed by the server.
83 objects - An Array of objects to download.
83 objects - An Array of objects to download.
84 oid - String OID of the LFS object.
84 oid - String OID of the LFS object.
85 size - Integer byte size of the LFS object. Must be at least zero.
85 size - Integer byte size of the LFS object. Must be at least zero.
86 """
86 """
87 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
87 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
88 auth = request.authorization
88 auth = request.authorization
89 repo = request.matchdict.get('repo')
89 repo = request.matchdict.get('repo')
90 data = request.json
90 data = request.json
91 operation = data.get('operation')
91 operation = data.get('operation')
92 http_scheme = request.registry.git_lfs_http_scheme
92 http_scheme = request.registry.git_lfs_http_scheme
93
93
94 if operation not in ('download', 'upload'):
94 if operation not in ('download', 'upload'):
95 log.debug('LFS: unsupported operation:%s', operation)
95 log.debug('LFS: unsupported operation:%s', operation)
96 return write_response_error(
96 return write_response_error(
97 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
97 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
98
98
99 if 'objects' not in data:
99 if 'objects' not in data:
100 log.debug('LFS: missing objects data')
100 log.debug('LFS: missing objects data')
101 return write_response_error(
101 return write_response_error(
102 HTTPBadRequest, 'missing objects data')
102 HTTPBadRequest, 'missing objects data')
103
103
104 log.debug('LFS: handling operation of type: %s', operation)
104 log.debug('LFS: handling operation of type: %s', operation)
105
105
106 objects = []
106 objects = []
107 for o in data['objects']:
107 for o in data['objects']:
108 try:
108 try:
109 oid = o['oid']
109 oid = o['oid']
110 obj_size = o['size']
110 obj_size = o['size']
111 except KeyError:
111 except KeyError:
112 log.exception('LFS, failed to extract data')
112 log.exception('LFS, failed to extract data')
113 return write_response_error(
113 return write_response_error(
114 HTTPBadRequest, 'unsupported data in objects')
114 HTTPBadRequest, 'unsupported data in objects')
115
115
116 obj_data = {'oid': oid}
116 obj_data = {'oid': oid}
117 if http_scheme == 'http':
117 if http_scheme == 'http':
118 # Note(marcink): when using http, we might have a custom port
118 # Note(marcink): when using http, we might have a custom port
119 # so we skip setting it to http, url dispatch then wont generate a port in URL
119 # so we skip setting it to http, url dispatch then wont generate a port in URL
120 # for development we need this
120 # for development we need this
121 http_scheme = None
121 http_scheme = None
122
122
123 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
123 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
124 _scheme=http_scheme)
124 _scheme=http_scheme)
125 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
125 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
126 _scheme=http_scheme)
126 _scheme=http_scheme)
127 store = LFSOidStore(
127 store = LFSOidStore(
128 oid, repo, store_location=request.registry.git_lfs_store_path)
128 oid, repo, store_location=request.registry.git_lfs_store_path)
129 handler = OidHandler(
129 handler = OidHandler(
130 store, repo, auth, oid, obj_size, obj_data,
130 store, repo, auth, oid, obj_size, obj_data,
131 obj_href, obj_verify_href)
131 obj_href, obj_verify_href)
132
132
133 # this verifies also OIDs
133 # this verifies also OIDs
134 actions, errors = handler.exec_operation(operation)
134 actions, errors = handler.exec_operation(operation)
135 if errors:
135 if errors:
136 log.warning('LFS: got following errors: %s', errors)
136 log.warning('LFS: got following errors: %s', errors)
137 obj_data['errors'] = errors
137 obj_data['errors'] = errors
138
138
139 if actions:
139 if actions:
140 obj_data['actions'] = actions
140 obj_data['actions'] = actions
141
141
142 obj_data['size'] = obj_size
142 obj_data['size'] = obj_size
143 obj_data['authenticated'] = True
143 obj_data['authenticated'] = True
144 objects.append(obj_data)
144 objects.append(obj_data)
145
145
146 result = {'objects': objects, 'transfer': 'basic'}
146 result = {'objects': objects, 'transfer': 'basic'}
147 log.debug('LFS Response %s', safe_result(result))
147 log.debug('LFS Response %s', safe_result(result))
148
148
149 return result
149 return result
150
150
151
151
152 def lfs_objects_oid_upload(request):
152 def lfs_objects_oid_upload(request):
153 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
153 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
154 repo = request.matchdict.get('repo')
154 repo = request.matchdict.get('repo')
155 oid = request.matchdict.get('oid')
155 oid = request.matchdict.get('oid')
156 store = LFSOidStore(
156 store = LFSOidStore(
157 oid, repo, store_location=request.registry.git_lfs_store_path)
157 oid, repo, store_location=request.registry.git_lfs_store_path)
158 engine = store.get_engine(mode='wb')
158 engine = store.get_engine(mode='wb')
159 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
159 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
160
160
161 body = request.environ['wsgi.input']
161 body = request.environ['wsgi.input']
162
162
163 with engine as f:
163 with engine as f:
164 blksize = 64 * 1024 # 64kb
164 blksize = 64 * 1024 # 64kb
165 while True:
165 while True:
166 # read in chunks as stream comes in from Gunicorn
166 # read in chunks as stream comes in from Gunicorn
167 # this is a specific Gunicorn support function.
167 # this is a specific Gunicorn support function.
168 # might work differently on waitress
168 # might work differently on waitress
169 chunk = body.read(blksize)
169 chunk = body.read(blksize)
170 if not chunk:
170 if not chunk:
171 break
171 break
172 f.write(chunk)
172 f.write(chunk)
173
173
174 return {'upload': 'ok'}
174 return {'upload': 'ok'}
175
175
176
176
177 def lfs_objects_oid_download(request):
177 def lfs_objects_oid_download(request):
178 repo = request.matchdict.get('repo')
178 repo = request.matchdict.get('repo')
179 oid = request.matchdict.get('oid')
179 oid = request.matchdict.get('oid')
180
180
181 store = LFSOidStore(
181 store = LFSOidStore(
182 oid, repo, store_location=request.registry.git_lfs_store_path)
182 oid, repo, store_location=request.registry.git_lfs_store_path)
183 if not store.has_oid():
183 if not store.has_oid():
184 log.debug('LFS: oid %s does not exists in store', oid)
184 log.debug('LFS: oid %s does not exists in store', oid)
185 return write_response_error(
185 return write_response_error(
186 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
186 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
187
187
188 # TODO(marcink): support range header ?
188 # TODO(marcink): support range header ?
189 # Range: bytes=0-, `bytes=(\d+)\-.*`
189 # Range: bytes=0-, `bytes=(\d+)\-.*`
190
190
191 f = open(store.oid_path, 'rb')
191 f = open(store.oid_path, 'rb')
192 response = Response(
192 response = Response(
193 content_type='application/octet-stream', app_iter=FileIter(f))
193 content_type='application/octet-stream', app_iter=FileIter(f))
194 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
194 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
195 return response
195 return response
196
196
197
197
198 def lfs_objects_verify(request):
198 def lfs_objects_verify(request):
199 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
199 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
200 repo = request.matchdict.get('repo')
200 repo = request.matchdict.get('repo')
201
201
202 data = request.json
202 data = request.json
203 oid = data.get('oid')
203 oid = data.get('oid')
204 size = safe_int(data.get('size'))
204 size = safe_int(data.get('size'))
205
205
206 if not (oid and size):
206 if not (oid and size):
207 return write_response_error(
207 return write_response_error(
208 HTTPBadRequest, 'missing oid and size in request data')
208 HTTPBadRequest, 'missing oid and size in request data')
209
209
210 store = LFSOidStore(
210 store = LFSOidStore(
211 oid, repo, store_location=request.registry.git_lfs_store_path)
211 oid, repo, store_location=request.registry.git_lfs_store_path)
212 if not store.has_oid():
212 if not store.has_oid():
213 log.debug('LFS: oid %s does not exists in store', oid)
213 log.debug('LFS: oid %s does not exists in store', oid)
214 return write_response_error(
214 return write_response_error(
215 HTTPNotFound, f'oid `{oid}` does not exists in store')
215 HTTPNotFound, f'oid `{oid}` does not exists in store')
216
216
217 store_size = store.size_oid()
217 store_size = store.size_oid()
218 if store_size != size:
218 if store_size != size:
219 msg = 'requested file size mismatch store size:{} requested:{}'.format(
219 msg = 'requested file size mismatch store size:{} requested:{}'.format(
220 store_size, size)
220 store_size, size)
221 return write_response_error(
221 return write_response_error(
222 HTTPUnprocessableEntity, msg)
222 HTTPUnprocessableEntity, msg)
223
223
224 return {'message': {'size': 'ok', 'in_store': 'ok'}}
224 return {'message': {'size': 'ok', 'in_store': 'ok'}}
225
225
226
226
227 def lfs_objects_lock(request):
227 def lfs_objects_lock(request):
228 return write_response_error(
228 return write_response_error(
229 HTTPNotImplemented, 'GIT LFS locking api not supported')
229 HTTPNotImplemented, 'GIT LFS locking api not supported')
230
230
231
231
232 def not_found(request):
232 def not_found(request):
233 return write_response_error(
233 return write_response_error(
234 HTTPNotFound, 'request path not found')
234 HTTPNotFound, 'request path not found')
235
235
236
236
237 def lfs_disabled(request):
237 def lfs_disabled(request):
238 return write_response_error(
238 return write_response_error(
239 HTTPNotImplemented, 'GIT LFS disabled for this repo')
239 HTTPNotImplemented, 'GIT LFS disabled for this repo')
240
240
241
241
242 def git_lfs_app(config):
242 def git_lfs_app(config):
243
243
244 # v1 API deprecation endpoint
244 # v1 API deprecation endpoint
245 config.add_route('lfs_objects',
245 config.add_route('lfs_objects',
246 '/{repo:.*?[^/]}/info/lfs/objects')
246 '/{repo:.*?[^/]}/info/lfs/objects')
247 config.add_view(lfs_objects, route_name='lfs_objects',
247 config.add_view(lfs_objects, route_name='lfs_objects',
248 request_method='POST', renderer='json')
248 request_method='POST', renderer='json')
249
249
250 # locking API
250 # locking API
251 config.add_route('lfs_objects_lock',
251 config.add_route('lfs_objects_lock',
252 '/{repo:.*?[^/]}/info/lfs/locks')
252 '/{repo:.*?[^/]}/info/lfs/locks')
253 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
253 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
254 request_method=('POST', 'GET'), renderer='json')
254 request_method=('POST', 'GET'), renderer='json')
255
255
256 config.add_route('lfs_objects_lock_verify',
256 config.add_route('lfs_objects_lock_verify',
257 '/{repo:.*?[^/]}/info/lfs/locks/verify')
257 '/{repo:.*?[^/]}/info/lfs/locks/verify')
258 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
258 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
259 request_method=('POST', 'GET'), renderer='json')
259 request_method=('POST', 'GET'), renderer='json')
260
260
261 # batch API
261 # batch API
262 config.add_route('lfs_objects_batch',
262 config.add_route('lfs_objects_batch',
263 '/{repo:.*?[^/]}/info/lfs/objects/batch')
263 '/{repo:.*?[^/]}/info/lfs/objects/batch')
264 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
264 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
265 request_method='POST', renderer='json')
265 request_method='POST', renderer='json')
266
266
267 # oid upload/download API
267 # oid upload/download API
268 config.add_route('lfs_objects_oid',
268 config.add_route('lfs_objects_oid',
269 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
269 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
270 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
270 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
271 request_method='PUT', renderer='json')
271 request_method='PUT', renderer='json')
272 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
272 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
273 request_method='GET', renderer='json')
273 request_method='GET', renderer='json')
274
274
275 # verification API
275 # verification API
276 config.add_route('lfs_objects_verify',
276 config.add_route('lfs_objects_verify',
277 '/{repo:.*?[^/]}/info/lfs/verify')
277 '/{repo:.*?[^/]}/info/lfs/verify')
278 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
278 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
279 request_method='POST', renderer='json')
279 request_method='POST', renderer='json')
280
280
281 # not found handler for API
281 # not found handler for API
282 config.add_notfound_view(not_found, renderer='json')
282 config.add_notfound_view(not_found, renderer='json')
283
283
284
284
285 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
285 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
286 config = Configurator()
286 config = Configurator()
287 if git_lfs_enabled:
287 if git_lfs_enabled:
288 config.include(git_lfs_app)
288 config.include(git_lfs_app)
289 config.registry.git_lfs_store_path = git_lfs_store_path
289 config.registry.git_lfs_store_path = git_lfs_store_path
290 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
290 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
291 else:
291 else:
292 # not found handler for API, reporting disabled LFS support
292 # not found handler for API, reporting disabled LFS support
293 config.add_notfound_view(lfs_disabled, renderer='json')
293 config.add_notfound_view(lfs_disabled, renderer='json')
294
294
295 app = config.make_wsgi_app()
295 app = config.make_wsgi_app()
296 return app
296 return app
@@ -1,274 +1,274 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21
21
22 from vcsserver.lib.rc_json import json
22 from vcsserver.lib.ext_json import json
23 from vcsserver.str_utils import safe_bytes
23 from vcsserver.str_utils import safe_bytes
24 from vcsserver.git_lfs.app import create_app
24 from vcsserver.git_lfs.app import create_app
25 from vcsserver.git_lfs.lib import LFSOidStore
25 from vcsserver.git_lfs.lib import LFSOidStore
26
26
27
27
28 @pytest.fixture(scope='function')
28 @pytest.fixture(scope='function')
29 def git_lfs_app(tmpdir):
29 def git_lfs_app(tmpdir):
30 custom_app = WebObTestApp(create_app(
30 custom_app = WebObTestApp(create_app(
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
32 git_lfs_http_scheme='http'))
32 git_lfs_http_scheme='http'))
33 custom_app._store = str(tmpdir)
33 custom_app._store = str(tmpdir)
34 return custom_app
34 return custom_app
35
35
36
36
37 @pytest.fixture(scope='function')
37 @pytest.fixture(scope='function')
38 def git_lfs_https_app(tmpdir):
38 def git_lfs_https_app(tmpdir):
39 custom_app = WebObTestApp(create_app(
39 custom_app = WebObTestApp(create_app(
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
41 git_lfs_http_scheme='https'))
41 git_lfs_http_scheme='https'))
42 custom_app._store = str(tmpdir)
42 custom_app._store = str(tmpdir)
43 return custom_app
43 return custom_app
44
44
45
45
46 @pytest.fixture()
46 @pytest.fixture()
47 def http_auth():
47 def http_auth():
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
49
49
50
50
51 class TestLFSApplication:
51 class TestLFSApplication:
52
52
53 def test_app_wrong_path(self, git_lfs_app):
53 def test_app_wrong_path(self, git_lfs_app):
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
55
55
56 def test_app_deprecated_endpoint(self, git_lfs_app):
56 def test_app_deprecated_endpoint(self, git_lfs_app):
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
58 assert response.status_code == 501
58 assert response.status_code == 501
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
60
60
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
63 assert response.status_code == 501
63 assert response.status_code == 501
64 assert json.loads(response.text) == {
64 assert json.loads(response.text) == {
65 'message': 'GIT LFS locking api not supported'}
65 'message': 'GIT LFS locking api not supported'}
66
66
67 def test_app_lock_api_not_available(self, git_lfs_app):
67 def test_app_lock_api_not_available(self, git_lfs_app):
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
69 assert response.status_code == 501
69 assert response.status_code == 501
70 assert json.loads(response.text) == {
70 assert json.loads(response.text) == {
71 'message': 'GIT LFS locking api not supported'}
71 'message': 'GIT LFS locking api not supported'}
72
72
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
74 git_lfs_app.post_json(
74 git_lfs_app.post_json(
75 '/repo/info/lfs/objects/batch', params={}, status=403)
75 '/repo/info/lfs/objects/batch', params={}, status=403)
76
76
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
78 response = git_lfs_app.post_json(
78 response = git_lfs_app.post_json(
79 '/repo/info/lfs/objects/batch', params={}, status=400,
79 '/repo/info/lfs/objects/batch', params={}, status=400,
80 extra_environ=http_auth)
80 extra_environ=http_auth)
81 assert json.loads(response.text) == {
81 assert json.loads(response.text) == {
82 'message': 'unsupported operation mode: `None`'}
82 'message': 'unsupported operation mode: `None`'}
83
83
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
85 response = git_lfs_app.post_json(
85 response = git_lfs_app.post_json(
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
87 status=400, extra_environ=http_auth)
87 status=400, extra_environ=http_auth)
88 assert json.loads(response.text) == {
88 assert json.loads(response.text) == {
89 'message': 'missing objects data'}
89 'message': 'missing objects data'}
90
90
91 def test_app_batch_api_unsupported_data_in_objects(
91 def test_app_batch_api_unsupported_data_in_objects(
92 self, git_lfs_app, http_auth):
92 self, git_lfs_app, http_auth):
93 params = {'operation': 'download',
93 params = {'operation': 'download',
94 'objects': [{}]}
94 'objects': [{}]}
95 response = git_lfs_app.post_json(
95 response = git_lfs_app.post_json(
96 '/repo/info/lfs/objects/batch', params=params, status=400,
96 '/repo/info/lfs/objects/batch', params=params, status=400,
97 extra_environ=http_auth)
97 extra_environ=http_auth)
98 assert json.loads(response.text) == {
98 assert json.loads(response.text) == {
99 'message': 'unsupported data in objects'}
99 'message': 'unsupported data in objects'}
100
100
101 def test_app_batch_api_download_missing_object(
101 def test_app_batch_api_download_missing_object(
102 self, git_lfs_app, http_auth):
102 self, git_lfs_app, http_auth):
103 params = {'operation': 'download',
103 params = {'operation': 'download',
104 'objects': [{'oid': '123', 'size': '1024'}]}
104 'objects': [{'oid': '123', 'size': '1024'}]}
105 response = git_lfs_app.post_json(
105 response = git_lfs_app.post_json(
106 '/repo/info/lfs/objects/batch', params=params,
106 '/repo/info/lfs/objects/batch', params=params,
107 extra_environ=http_auth)
107 extra_environ=http_auth)
108
108
109 expected_objects = [
109 expected_objects = [
110 {'authenticated': True,
110 {'authenticated': True,
111 'errors': {'error': {
111 'errors': {'error': {
112 'code': 404,
112 'code': 404,
113 'message': 'object: 123 does not exist in store'}},
113 'message': 'object: 123 does not exist in store'}},
114 'oid': '123',
114 'oid': '123',
115 'size': '1024'}
115 'size': '1024'}
116 ]
116 ]
117 assert json.loads(response.text) == {
117 assert json.loads(response.text) == {
118 'objects': expected_objects, 'transfer': 'basic'}
118 'objects': expected_objects, 'transfer': 'basic'}
119
119
120 def test_app_batch_api_download(self, git_lfs_app, http_auth):
120 def test_app_batch_api_download(self, git_lfs_app, http_auth):
121 oid = '456'
121 oid = '456'
122 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
122 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
123 if not os.path.isdir(os.path.dirname(oid_path)):
123 if not os.path.isdir(os.path.dirname(oid_path)):
124 os.makedirs(os.path.dirname(oid_path))
124 os.makedirs(os.path.dirname(oid_path))
125 with open(oid_path, 'wb') as f:
125 with open(oid_path, 'wb') as f:
126 f.write(safe_bytes('OID_CONTENT'))
126 f.write(safe_bytes('OID_CONTENT'))
127
127
128 params = {'operation': 'download',
128 params = {'operation': 'download',
129 'objects': [{'oid': oid, 'size': '1024'}]}
129 'objects': [{'oid': oid, 'size': '1024'}]}
130 response = git_lfs_app.post_json(
130 response = git_lfs_app.post_json(
131 '/repo/info/lfs/objects/batch', params=params,
131 '/repo/info/lfs/objects/batch', params=params,
132 extra_environ=http_auth)
132 extra_environ=http_auth)
133
133
134 expected_objects = [
134 expected_objects = [
135 {'authenticated': True,
135 {'authenticated': True,
136 'actions': {
136 'actions': {
137 'download': {
137 'download': {
138 'header': {'Authorization': 'Basic XXXXX'},
138 'header': {'Authorization': 'Basic XXXXX'},
139 'href': 'http://localhost/repo/info/lfs/objects/456'},
139 'href': 'http://localhost/repo/info/lfs/objects/456'},
140 },
140 },
141 'oid': '456',
141 'oid': '456',
142 'size': '1024'}
142 'size': '1024'}
143 ]
143 ]
144 assert json.loads(response.text) == {
144 assert json.loads(response.text) == {
145 'objects': expected_objects, 'transfer': 'basic'}
145 'objects': expected_objects, 'transfer': 'basic'}
146
146
147 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
147 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
148 params = {'operation': 'upload',
148 params = {'operation': 'upload',
149 'objects': [{'oid': '123', 'size': '1024'}]}
149 'objects': [{'oid': '123', 'size': '1024'}]}
150 response = git_lfs_app.post_json(
150 response = git_lfs_app.post_json(
151 '/repo/info/lfs/objects/batch', params=params,
151 '/repo/info/lfs/objects/batch', params=params,
152 extra_environ=http_auth)
152 extra_environ=http_auth)
153 expected_objects = [
153 expected_objects = [
154 {'authenticated': True,
154 {'authenticated': True,
155 'actions': {
155 'actions': {
156 'upload': {
156 'upload': {
157 'header': {'Authorization': 'Basic XXXXX',
157 'header': {'Authorization': 'Basic XXXXX',
158 'Transfer-Encoding': 'chunked'},
158 'Transfer-Encoding': 'chunked'},
159 'href': 'http://localhost/repo/info/lfs/objects/123'},
159 'href': 'http://localhost/repo/info/lfs/objects/123'},
160 'verify': {
160 'verify': {
161 'header': {'Authorization': 'Basic XXXXX'},
161 'header': {'Authorization': 'Basic XXXXX'},
162 'href': 'http://localhost/repo/info/lfs/verify'}
162 'href': 'http://localhost/repo/info/lfs/verify'}
163 },
163 },
164 'oid': '123',
164 'oid': '123',
165 'size': '1024'}
165 'size': '1024'}
166 ]
166 ]
167 assert json.loads(response.text) == {
167 assert json.loads(response.text) == {
168 'objects': expected_objects, 'transfer': 'basic'}
168 'objects': expected_objects, 'transfer': 'basic'}
169
169
170 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
170 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
171 params = {'operation': 'upload',
171 params = {'operation': 'upload',
172 'objects': [{'oid': '123', 'size': '1024'}]}
172 'objects': [{'oid': '123', 'size': '1024'}]}
173 response = git_lfs_https_app.post_json(
173 response = git_lfs_https_app.post_json(
174 '/repo/info/lfs/objects/batch', params=params,
174 '/repo/info/lfs/objects/batch', params=params,
175 extra_environ=http_auth)
175 extra_environ=http_auth)
176 expected_objects = [
176 expected_objects = [
177 {'authenticated': True,
177 {'authenticated': True,
178 'actions': {
178 'actions': {
179 'upload': {
179 'upload': {
180 'header': {'Authorization': 'Basic XXXXX',
180 'header': {'Authorization': 'Basic XXXXX',
181 'Transfer-Encoding': 'chunked'},
181 'Transfer-Encoding': 'chunked'},
182 'href': 'https://localhost/repo/info/lfs/objects/123'},
182 'href': 'https://localhost/repo/info/lfs/objects/123'},
183 'verify': {
183 'verify': {
184 'header': {'Authorization': 'Basic XXXXX'},
184 'header': {'Authorization': 'Basic XXXXX'},
185 'href': 'https://localhost/repo/info/lfs/verify'}
185 'href': 'https://localhost/repo/info/lfs/verify'}
186 },
186 },
187 'oid': '123',
187 'oid': '123',
188 'size': '1024'}
188 'size': '1024'}
189 ]
189 ]
190 assert json.loads(response.text) == {
190 assert json.loads(response.text) == {
191 'objects': expected_objects, 'transfer': 'basic'}
191 'objects': expected_objects, 'transfer': 'basic'}
192
192
193 def test_app_verify_api_missing_data(self, git_lfs_app):
193 def test_app_verify_api_missing_data(self, git_lfs_app):
194 params = {'oid': 'missing'}
194 params = {'oid': 'missing'}
195 response = git_lfs_app.post_json(
195 response = git_lfs_app.post_json(
196 '/repo/info/lfs/verify', params=params,
196 '/repo/info/lfs/verify', params=params,
197 status=400)
197 status=400)
198
198
199 assert json.loads(response.text) == {
199 assert json.loads(response.text) == {
200 'message': 'missing oid and size in request data'}
200 'message': 'missing oid and size in request data'}
201
201
202 def test_app_verify_api_missing_obj(self, git_lfs_app):
202 def test_app_verify_api_missing_obj(self, git_lfs_app):
203 params = {'oid': 'missing', 'size': '1024'}
203 params = {'oid': 'missing', 'size': '1024'}
204 response = git_lfs_app.post_json(
204 response = git_lfs_app.post_json(
205 '/repo/info/lfs/verify', params=params,
205 '/repo/info/lfs/verify', params=params,
206 status=404)
206 status=404)
207
207
208 assert json.loads(response.text) == {
208 assert json.loads(response.text) == {
209 'message': 'oid `missing` does not exists in store'}
209 'message': 'oid `missing` does not exists in store'}
210
210
211 def test_app_verify_api_size_mismatch(self, git_lfs_app):
211 def test_app_verify_api_size_mismatch(self, git_lfs_app):
212 oid = 'existing'
212 oid = 'existing'
213 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
213 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
214 if not os.path.isdir(os.path.dirname(oid_path)):
214 if not os.path.isdir(os.path.dirname(oid_path)):
215 os.makedirs(os.path.dirname(oid_path))
215 os.makedirs(os.path.dirname(oid_path))
216 with open(oid_path, 'wb') as f:
216 with open(oid_path, 'wb') as f:
217 f.write(safe_bytes('OID_CONTENT'))
217 f.write(safe_bytes('OID_CONTENT'))
218
218
219 params = {'oid': oid, 'size': '1024'}
219 params = {'oid': oid, 'size': '1024'}
220 response = git_lfs_app.post_json(
220 response = git_lfs_app.post_json(
221 '/repo/info/lfs/verify', params=params, status=422)
221 '/repo/info/lfs/verify', params=params, status=422)
222
222
223 assert json.loads(response.text) == {
223 assert json.loads(response.text) == {
224 'message': 'requested file size mismatch '
224 'message': 'requested file size mismatch '
225 'store size:11 requested:1024'}
225 'store size:11 requested:1024'}
226
226
227 def test_app_verify_api(self, git_lfs_app):
227 def test_app_verify_api(self, git_lfs_app):
228 oid = 'existing'
228 oid = 'existing'
229 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
229 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
230 if not os.path.isdir(os.path.dirname(oid_path)):
230 if not os.path.isdir(os.path.dirname(oid_path)):
231 os.makedirs(os.path.dirname(oid_path))
231 os.makedirs(os.path.dirname(oid_path))
232 with open(oid_path, 'wb') as f:
232 with open(oid_path, 'wb') as f:
233 f.write(safe_bytes('OID_CONTENT'))
233 f.write(safe_bytes('OID_CONTENT'))
234
234
235 params = {'oid': oid, 'size': 11}
235 params = {'oid': oid, 'size': 11}
236 response = git_lfs_app.post_json(
236 response = git_lfs_app.post_json(
237 '/repo/info/lfs/verify', params=params)
237 '/repo/info/lfs/verify', params=params)
238
238
239 assert json.loads(response.text) == {
239 assert json.loads(response.text) == {
240 'message': {'size': 'ok', 'in_store': 'ok'}}
240 'message': {'size': 'ok', 'in_store': 'ok'}}
241
241
242 def test_app_download_api_oid_not_existing(self, git_lfs_app):
242 def test_app_download_api_oid_not_existing(self, git_lfs_app):
243 oid = 'missing'
243 oid = 'missing'
244
244
245 response = git_lfs_app.get(
245 response = git_lfs_app.get(
246 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
246 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
247
247
248 assert json.loads(response.text) == {
248 assert json.loads(response.text) == {
249 'message': 'requested file with oid `missing` not found in store'}
249 'message': 'requested file with oid `missing` not found in store'}
250
250
251 def test_app_download_api(self, git_lfs_app):
251 def test_app_download_api(self, git_lfs_app):
252 oid = 'existing'
252 oid = 'existing'
253 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
253 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
254 if not os.path.isdir(os.path.dirname(oid_path)):
254 if not os.path.isdir(os.path.dirname(oid_path)):
255 os.makedirs(os.path.dirname(oid_path))
255 os.makedirs(os.path.dirname(oid_path))
256 with open(oid_path, 'wb') as f:
256 with open(oid_path, 'wb') as f:
257 f.write(safe_bytes('OID_CONTENT'))
257 f.write(safe_bytes('OID_CONTENT'))
258
258
259 response = git_lfs_app.get(
259 response = git_lfs_app.get(
260 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
260 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
261 assert response
261 assert response
262
262
263 def test_app_upload(self, git_lfs_app):
263 def test_app_upload(self, git_lfs_app):
264 oid = 'uploaded'
264 oid = 'uploaded'
265
265
266 response = git_lfs_app.put(
266 response = git_lfs_app.put(
267 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
267 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
268
268
269 assert json.loads(response.text) == {'upload': 'ok'}
269 assert json.loads(response.text) == {'upload': 'ok'}
270
270
271 # verify that we actually wrote that OID
271 # verify that we actually wrote that OID
272 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
272 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
273 assert os.path.isfile(oid_path)
273 assert os.path.isfile(oid_path)
274 assert 'CONTENT' == open(oid_path).read()
274 assert 'CONTENT' == open(oid_path).read()
@@ -1,832 +1,832 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import base64
23 import base64
24 import msgpack
24 import msgpack
25 import dataclasses
25 import dataclasses
26 import pygit2
26 import pygit2
27
27
28 import http.client
28 import http.client
29 from celery import Celery
29 from celery import Celery
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33
33
34 from vcsserver.lib.rc_json import json
34 from vcsserver.lib.ext_json import json
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.str_utils import ascii_str, safe_str
36 from vcsserver.str_utils import ascii_str, safe_str
37 from vcsserver.remote.git_remote import Repository
37 from vcsserver.remote.git_remote import Repository
38
38
39 celery_app = Celery('__vcsserver__')
39 celery_app = Celery('__vcsserver__')
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HooksHttpClient:
43 class HooksHttpClient:
44 proto = 'msgpack.v1'
44 proto = 'msgpack.v1'
45 connection = None
45 connection = None
46
46
47 def __init__(self, hooks_uri):
47 def __init__(self, hooks_uri):
48 self.hooks_uri = hooks_uri
48 self.hooks_uri = hooks_uri
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
52
53 def __call__(self, method, extras):
53 def __call__(self, method, extras):
54 connection = http.client.HTTPConnection(self.hooks_uri)
54 connection = http.client.HTTPConnection(self.hooks_uri)
55 # binary msgpack body
55 # binary msgpack body
56 headers, body = self._serialize(method, extras)
56 headers, body = self._serialize(method, extras)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58
58
59 try:
59 try:
60 try:
60 try:
61 connection.request('POST', '/', body, headers)
61 connection.request('POST', '/', body, headers)
62 except Exception as error:
62 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
64 raise
65
65
66 response = connection.getresponse()
66 response = connection.getresponse()
67 try:
67 try:
68 return msgpack.load(response)
68 return msgpack.load(response)
69 except Exception:
69 except Exception:
70 response_data = response.read()
70 response_data = response.read()
71 log.exception('Failed to decode hook response json data. '
71 log.exception('Failed to decode hook response json data. '
72 'response_code:%s, raw_data:%s',
72 'response_code:%s, raw_data:%s',
73 response.status, response_data)
73 response.status, response_data)
74 raise
74 raise
75 finally:
75 finally:
76 connection.close()
76 connection.close()
77
77
78 @classmethod
78 @classmethod
79 def _serialize(cls, hook_name, extras):
79 def _serialize(cls, hook_name, extras):
80 data = {
80 data = {
81 'method': hook_name,
81 'method': hook_name,
82 'extras': extras
82 'extras': extras
83 }
83 }
84 headers = {
84 headers = {
85 "rc-hooks-protocol": cls.proto,
85 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
86 "Connection": "keep-alive"
87 }
87 }
88 return headers, msgpack.packb(data)
88 return headers, msgpack.packb(data)
89
89
90
90
91 class HooksCeleryClient:
91 class HooksCeleryClient:
92 TASK_TIMEOUT = 60 # time in seconds
92 TASK_TIMEOUT = 60 # time in seconds
93
93
94 def __init__(self, queue, backend):
94 def __init__(self, queue, backend):
95 celery_app.config_from_object({
95 celery_app.config_from_object({
96 'broker_url': queue, 'result_backend': backend,
96 'broker_url': queue, 'result_backend': backend,
97 'broker_connection_retry_on_startup': True,
97 'broker_connection_retry_on_startup': True,
98 'task_serializer': 'json',
98 'task_serializer': 'json',
99 'accept_content': ['json', 'msgpack'],
99 'accept_content': ['json', 'msgpack'],
100 'result_serializer': 'json',
100 'result_serializer': 'json',
101 'result_accept_content': ['json', 'msgpack']
101 'result_accept_content': ['json', 'msgpack']
102 })
102 })
103 self.celery_app = celery_app
103 self.celery_app = celery_app
104
104
105 def __call__(self, method, extras):
105 def __call__(self, method, extras):
106 inquired_task = self.celery_app.signature(
106 inquired_task = self.celery_app.signature(
107 f'rhodecode.lib.celerylib.tasks.{method}'
107 f'rhodecode.lib.celerylib.tasks.{method}'
108 )
108 )
109 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
109 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
110
110
111
111
112 class HooksShadowRepoClient:
112 class HooksShadowRepoClient:
113
113
114 def __call__(self, hook_name, extras):
114 def __call__(self, hook_name, extras):
115 return {'output': '', 'status': 0}
115 return {'output': '', 'status': 0}
116
116
117
117
118 class RemoteMessageWriter:
118 class RemoteMessageWriter:
119 """Writer base class."""
119 """Writer base class."""
120 def write(self, message):
120 def write(self, message):
121 raise NotImplementedError()
121 raise NotImplementedError()
122
122
123
123
124 class HgMessageWriter(RemoteMessageWriter):
124 class HgMessageWriter(RemoteMessageWriter):
125 """Writer that knows how to send messages to mercurial clients."""
125 """Writer that knows how to send messages to mercurial clients."""
126
126
127 def __init__(self, ui):
127 def __init__(self, ui):
128 self.ui = ui
128 self.ui = ui
129
129
130 def write(self, message: str):
130 def write(self, message: str):
131 # TODO: Check why the quiet flag is set by default.
131 # TODO: Check why the quiet flag is set by default.
132 old = self.ui.quiet
132 old = self.ui.quiet
133 self.ui.quiet = False
133 self.ui.quiet = False
134 self.ui.status(message.encode('utf-8'))
134 self.ui.status(message.encode('utf-8'))
135 self.ui.quiet = old
135 self.ui.quiet = old
136
136
137
137
138 class GitMessageWriter(RemoteMessageWriter):
138 class GitMessageWriter(RemoteMessageWriter):
139 """Writer that knows how to send messages to git clients."""
139 """Writer that knows how to send messages to git clients."""
140
140
141 def __init__(self, stdout=None):
141 def __init__(self, stdout=None):
142 self.stdout = stdout or sys.stdout
142 self.stdout = stdout or sys.stdout
143
143
144 def write(self, message: str):
144 def write(self, message: str):
145 self.stdout.write(message)
145 self.stdout.write(message)
146
146
147
147
148 class SvnMessageWriter(RemoteMessageWriter):
148 class SvnMessageWriter(RemoteMessageWriter):
149 """Writer that knows how to send messages to svn clients."""
149 """Writer that knows how to send messages to svn clients."""
150
150
151 def __init__(self, stderr=None):
151 def __init__(self, stderr=None):
152 # SVN needs data sent to stderr for back-to-client messaging
152 # SVN needs data sent to stderr for back-to-client messaging
153 self.stderr = stderr or sys.stderr
153 self.stderr = stderr or sys.stderr
154
154
155 def write(self, message):
155 def write(self, message):
156 self.stderr.write(message)
156 self.stderr.write(message)
157
157
158
158
159 def _handle_exception(result):
159 def _handle_exception(result):
160 exception_class = result.get('exception')
160 exception_class = result.get('exception')
161 exception_traceback = result.get('exception_traceback')
161 exception_traceback = result.get('exception_traceback')
162 log.debug('Handling hook-call exception: %s', exception_class)
162 log.debug('Handling hook-call exception: %s', exception_class)
163
163
164 if exception_traceback:
164 if exception_traceback:
165 log.error('Got traceback from remote call:%s', exception_traceback)
165 log.error('Got traceback from remote call:%s', exception_traceback)
166
166
167 if exception_class == 'HTTPLockedRC':
167 if exception_class == 'HTTPLockedRC':
168 raise exceptions.RepositoryLockedException()(*result['exception_args'])
168 raise exceptions.RepositoryLockedException()(*result['exception_args'])
169 elif exception_class == 'HTTPBranchProtected':
169 elif exception_class == 'HTTPBranchProtected':
170 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
170 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
171 elif exception_class == 'RepositoryError':
171 elif exception_class == 'RepositoryError':
172 raise exceptions.VcsException()(*result['exception_args'])
172 raise exceptions.VcsException()(*result['exception_args'])
173 elif exception_class:
173 elif exception_class:
174 raise Exception(
174 raise Exception(
175 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
175 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
176 )
176 )
177
177
178
178
179 def _get_hooks_client(extras):
179 def _get_hooks_client(extras):
180 hooks_uri = extras.get('hooks_uri')
180 hooks_uri = extras.get('hooks_uri')
181 task_queue = extras.get('task_queue')
181 task_queue = extras.get('task_queue')
182 task_backend = extras.get('task_backend')
182 task_backend = extras.get('task_backend')
183 is_shadow_repo = extras.get('is_shadow_repo')
183 is_shadow_repo = extras.get('is_shadow_repo')
184
184
185 if hooks_uri:
185 if hooks_uri:
186 return HooksHttpClient(hooks_uri)
186 return HooksHttpClient(hooks_uri)
187 elif task_queue and task_backend:
187 elif task_queue and task_backend:
188 return HooksCeleryClient(task_queue, task_backend)
188 return HooksCeleryClient(task_queue, task_backend)
189 elif is_shadow_repo:
189 elif is_shadow_repo:
190 return HooksShadowRepoClient()
190 return HooksShadowRepoClient()
191 else:
191 else:
192 raise Exception("Hooks client not found!")
192 raise Exception("Hooks client not found!")
193
193
194
194
195 def _call_hook(hook_name, extras, writer):
195 def _call_hook(hook_name, extras, writer):
196 hooks_client = _get_hooks_client(extras)
196 hooks_client = _get_hooks_client(extras)
197 log.debug('Hooks, using client:%s', hooks_client)
197 log.debug('Hooks, using client:%s', hooks_client)
198 result = hooks_client(hook_name, extras)
198 result = hooks_client(hook_name, extras)
199 log.debug('Hooks got result: %s', result)
199 log.debug('Hooks got result: %s', result)
200 _handle_exception(result)
200 _handle_exception(result)
201 writer.write(result['output'])
201 writer.write(result['output'])
202
202
203 return result['status']
203 return result['status']
204
204
205
205
206 def _extras_from_ui(ui):
206 def _extras_from_ui(ui):
207 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
207 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
208 if not hook_data:
208 if not hook_data:
209 # maybe it's inside environ ?
209 # maybe it's inside environ ?
210 env_hook_data = os.environ.get('RC_SCM_DATA')
210 env_hook_data = os.environ.get('RC_SCM_DATA')
211 if env_hook_data:
211 if env_hook_data:
212 hook_data = env_hook_data
212 hook_data = env_hook_data
213
213
214 extras = {}
214 extras = {}
215 if hook_data:
215 if hook_data:
216 extras = json.loads(hook_data)
216 extras = json.loads(hook_data)
217 return extras
217 return extras
218
218
219
219
220 def _rev_range_hash(repo, node, check_heads=False):
220 def _rev_range_hash(repo, node, check_heads=False):
221 from vcsserver.hgcompat import get_ctx
221 from vcsserver.hgcompat import get_ctx
222
222
223 commits = []
223 commits = []
224 revs = []
224 revs = []
225 start = get_ctx(repo, node).rev()
225 start = get_ctx(repo, node).rev()
226 end = len(repo)
226 end = len(repo)
227 for rev in range(start, end):
227 for rev in range(start, end):
228 revs.append(rev)
228 revs.append(rev)
229 ctx = get_ctx(repo, rev)
229 ctx = get_ctx(repo, rev)
230 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
230 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
231 branch = safe_str(ctx.branch())
231 branch = safe_str(ctx.branch())
232 commits.append((commit_id, branch))
232 commits.append((commit_id, branch))
233
233
234 parent_heads = []
234 parent_heads = []
235 if check_heads:
235 if check_heads:
236 parent_heads = _check_heads(repo, start, end, revs)
236 parent_heads = _check_heads(repo, start, end, revs)
237 return commits, parent_heads
237 return commits, parent_heads
238
238
239
239
240 def _check_heads(repo, start, end, commits):
240 def _check_heads(repo, start, end, commits):
241 from vcsserver.hgcompat import get_ctx
241 from vcsserver.hgcompat import get_ctx
242 changelog = repo.changelog
242 changelog = repo.changelog
243 parents = set()
243 parents = set()
244
244
245 for new_rev in commits:
245 for new_rev in commits:
246 for p in changelog.parentrevs(new_rev):
246 for p in changelog.parentrevs(new_rev):
247 if p == mercurial.node.nullrev:
247 if p == mercurial.node.nullrev:
248 continue
248 continue
249 if p < start:
249 if p < start:
250 parents.add(p)
250 parents.add(p)
251
251
252 for p in parents:
252 for p in parents:
253 branch = get_ctx(repo, p).branch()
253 branch = get_ctx(repo, p).branch()
254 # The heads descending from that parent, on the same branch
254 # The heads descending from that parent, on the same branch
255 parent_heads = {p}
255 parent_heads = {p}
256 reachable = {p}
256 reachable = {p}
257 for x in range(p + 1, end):
257 for x in range(p + 1, end):
258 if get_ctx(repo, x).branch() != branch:
258 if get_ctx(repo, x).branch() != branch:
259 continue
259 continue
260 for pp in changelog.parentrevs(x):
260 for pp in changelog.parentrevs(x):
261 if pp in reachable:
261 if pp in reachable:
262 reachable.add(x)
262 reachable.add(x)
263 parent_heads.discard(pp)
263 parent_heads.discard(pp)
264 parent_heads.add(x)
264 parent_heads.add(x)
265 # More than one head? Suggest merging
265 # More than one head? Suggest merging
266 if len(parent_heads) > 1:
266 if len(parent_heads) > 1:
267 return list(parent_heads)
267 return list(parent_heads)
268
268
269 return []
269 return []
270
270
271
271
272 def _get_git_env():
272 def _get_git_env():
273 env = {}
273 env = {}
274 for k, v in os.environ.items():
274 for k, v in os.environ.items():
275 if k.startswith('GIT'):
275 if k.startswith('GIT'):
276 env[k] = v
276 env[k] = v
277
277
278 # serialized version
278 # serialized version
279 return [(k, v) for k, v in env.items()]
279 return [(k, v) for k, v in env.items()]
280
280
281
281
282 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
282 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
283 env = {}
283 env = {}
284 for k, v in os.environ.items():
284 for k, v in os.environ.items():
285 if k.startswith('HG'):
285 if k.startswith('HG'):
286 env[k] = v
286 env[k] = v
287
287
288 env['HG_NODE'] = old_rev
288 env['HG_NODE'] = old_rev
289 env['HG_NODE_LAST'] = new_rev
289 env['HG_NODE_LAST'] = new_rev
290 env['HG_TXNID'] = txnid
290 env['HG_TXNID'] = txnid
291 env['HG_PENDING'] = repo_path
291 env['HG_PENDING'] = repo_path
292
292
293 return [(k, v) for k, v in env.items()]
293 return [(k, v) for k, v in env.items()]
294
294
295
295
296 def _fix_hooks_executables(ini_path=''):
296 def _fix_hooks_executables(ini_path=''):
297 """
297 """
298 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
298 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
299 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
299 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
300 because svn is not on PATH
300 because svn is not on PATH
301 """
301 """
302 from vcsserver.http_main import sanitize_settings_and_apply_defaults
302 from vcsserver.http_main import sanitize_settings_and_apply_defaults
303 from vcsserver.lib.config_utils import get_app_config_lightweight
303 from vcsserver.lib.config_utils import get_app_config_lightweight
304
304
305 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
305 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
306 if ini_path:
306 if ini_path:
307
307
308 ini_settings = get_app_config_lightweight(ini_path)
308 ini_settings = get_app_config_lightweight(ini_path)
309 ini_settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, ini_settings)
309 ini_settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, ini_settings)
310 core_binary_dir = ini_settings['core.binary_dir']
310 core_binary_dir = ini_settings['core.binary_dir']
311
311
312 settings.BINARY_DIR = core_binary_dir
312 settings.BINARY_DIR = core_binary_dir
313
313
314
314
315 def repo_size(ui, repo, **kwargs):
315 def repo_size(ui, repo, **kwargs):
316 extras = _extras_from_ui(ui)
316 extras = _extras_from_ui(ui)
317 return _call_hook('repo_size', extras, HgMessageWriter(ui))
317 return _call_hook('repo_size', extras, HgMessageWriter(ui))
318
318
319
319
320 def pre_pull(ui, repo, **kwargs):
320 def pre_pull(ui, repo, **kwargs):
321 extras = _extras_from_ui(ui)
321 extras = _extras_from_ui(ui)
322 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
322 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
323
323
324
324
325 def pre_pull_ssh(ui, repo, **kwargs):
325 def pre_pull_ssh(ui, repo, **kwargs):
326 extras = _extras_from_ui(ui)
326 extras = _extras_from_ui(ui)
327 if extras and extras.get('SSH'):
327 if extras and extras.get('SSH'):
328 return pre_pull(ui, repo, **kwargs)
328 return pre_pull(ui, repo, **kwargs)
329 return 0
329 return 0
330
330
331
331
332 def post_pull(ui, repo, **kwargs):
332 def post_pull(ui, repo, **kwargs):
333 extras = _extras_from_ui(ui)
333 extras = _extras_from_ui(ui)
334 return _call_hook('post_pull', extras, HgMessageWriter(ui))
334 return _call_hook('post_pull', extras, HgMessageWriter(ui))
335
335
336
336
337 def post_pull_ssh(ui, repo, **kwargs):
337 def post_pull_ssh(ui, repo, **kwargs):
338 extras = _extras_from_ui(ui)
338 extras = _extras_from_ui(ui)
339 if extras and extras.get('SSH'):
339 if extras and extras.get('SSH'):
340 return post_pull(ui, repo, **kwargs)
340 return post_pull(ui, repo, **kwargs)
341 return 0
341 return 0
342
342
343
343
344 def pre_push(ui, repo, node=None, **kwargs):
344 def pre_push(ui, repo, node=None, **kwargs):
345 """
345 """
346 Mercurial pre_push hook
346 Mercurial pre_push hook
347 """
347 """
348 extras = _extras_from_ui(ui)
348 extras = _extras_from_ui(ui)
349 detect_force_push = extras.get('detect_force_push')
349 detect_force_push = extras.get('detect_force_push')
350
350
351 rev_data = []
351 rev_data = []
352 hook_type: str = safe_str(kwargs.get('hooktype'))
352 hook_type: str = safe_str(kwargs.get('hooktype'))
353
353
354 if node and hook_type == 'pretxnchangegroup':
354 if node and hook_type == 'pretxnchangegroup':
355 branches = collections.defaultdict(list)
355 branches = collections.defaultdict(list)
356 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
356 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
357 for commit_id, branch in commits:
357 for commit_id, branch in commits:
358 branches[branch].append(commit_id)
358 branches[branch].append(commit_id)
359
359
360 for branch, commits in branches.items():
360 for branch, commits in branches.items():
361 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
361 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
362 rev_data.append({
362 rev_data.append({
363 'total_commits': len(commits),
363 'total_commits': len(commits),
364 'old_rev': old_rev,
364 'old_rev': old_rev,
365 'new_rev': commits[-1],
365 'new_rev': commits[-1],
366 'ref': '',
366 'ref': '',
367 'type': 'branch',
367 'type': 'branch',
368 'name': branch,
368 'name': branch,
369 })
369 })
370
370
371 for push_ref in rev_data:
371 for push_ref in rev_data:
372 push_ref['multiple_heads'] = _heads
372 push_ref['multiple_heads'] = _heads
373
373
374 repo_path = os.path.join(
374 repo_path = os.path.join(
375 extras.get('repo_store', ''), extras.get('repository', ''))
375 extras.get('repo_store', ''), extras.get('repository', ''))
376 push_ref['hg_env'] = _get_hg_env(
376 push_ref['hg_env'] = _get_hg_env(
377 old_rev=push_ref['old_rev'],
377 old_rev=push_ref['old_rev'],
378 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
378 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
379 repo_path=repo_path)
379 repo_path=repo_path)
380
380
381 extras['hook_type'] = hook_type or 'pre_push'
381 extras['hook_type'] = hook_type or 'pre_push'
382 extras['commit_ids'] = rev_data
382 extras['commit_ids'] = rev_data
383
383
384 return _call_hook('pre_push', extras, HgMessageWriter(ui))
384 return _call_hook('pre_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def pre_push_ssh(ui, repo, node=None, **kwargs):
387 def pre_push_ssh(ui, repo, node=None, **kwargs):
388 extras = _extras_from_ui(ui)
388 extras = _extras_from_ui(ui)
389 if extras.get('SSH'):
389 if extras.get('SSH'):
390 return pre_push(ui, repo, node, **kwargs)
390 return pre_push(ui, repo, node, **kwargs)
391
391
392 return 0
392 return 0
393
393
394
394
395 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
395 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
396 """
396 """
397 Mercurial pre_push hook for SSH
397 Mercurial pre_push hook for SSH
398 """
398 """
399 extras = _extras_from_ui(ui)
399 extras = _extras_from_ui(ui)
400 if extras.get('SSH'):
400 if extras.get('SSH'):
401 permission = extras['SSH_PERMISSIONS']
401 permission = extras['SSH_PERMISSIONS']
402
402
403 if 'repository.write' == permission or 'repository.admin' == permission:
403 if 'repository.write' == permission or 'repository.admin' == permission:
404 return 0
404 return 0
405
405
406 # non-zero ret code
406 # non-zero ret code
407 return 1
407 return 1
408
408
409 return 0
409 return 0
410
410
411
411
412 def post_push(ui, repo, node, **kwargs):
412 def post_push(ui, repo, node, **kwargs):
413 """
413 """
414 Mercurial post_push hook
414 Mercurial post_push hook
415 """
415 """
416 extras = _extras_from_ui(ui)
416 extras = _extras_from_ui(ui)
417
417
418 commit_ids = []
418 commit_ids = []
419 branches = []
419 branches = []
420 bookmarks = []
420 bookmarks = []
421 tags = []
421 tags = []
422 hook_type: str = safe_str(kwargs.get('hooktype'))
422 hook_type: str = safe_str(kwargs.get('hooktype'))
423
423
424 commits, _heads = _rev_range_hash(repo, node)
424 commits, _heads = _rev_range_hash(repo, node)
425 for commit_id, branch in commits:
425 for commit_id, branch in commits:
426 commit_ids.append(commit_id)
426 commit_ids.append(commit_id)
427 if branch not in branches:
427 if branch not in branches:
428 branches.append(branch)
428 branches.append(branch)
429
429
430 if hasattr(ui, '_rc_pushkey_bookmarks'):
430 if hasattr(ui, '_rc_pushkey_bookmarks'):
431 bookmarks = ui._rc_pushkey_bookmarks
431 bookmarks = ui._rc_pushkey_bookmarks
432
432
433 extras['hook_type'] = hook_type or 'post_push'
433 extras['hook_type'] = hook_type or 'post_push'
434 extras['commit_ids'] = commit_ids
434 extras['commit_ids'] = commit_ids
435
435
436 extras['new_refs'] = {
436 extras['new_refs'] = {
437 'branches': branches,
437 'branches': branches,
438 'bookmarks': bookmarks,
438 'bookmarks': bookmarks,
439 'tags': tags
439 'tags': tags
440 }
440 }
441
441
442 return _call_hook('post_push', extras, HgMessageWriter(ui))
442 return _call_hook('post_push', extras, HgMessageWriter(ui))
443
443
444
444
445 def post_push_ssh(ui, repo, node, **kwargs):
445 def post_push_ssh(ui, repo, node, **kwargs):
446 """
446 """
447 Mercurial post_push hook for SSH
447 Mercurial post_push hook for SSH
448 """
448 """
449 if _extras_from_ui(ui).get('SSH'):
449 if _extras_from_ui(ui).get('SSH'):
450 return post_push(ui, repo, node, **kwargs)
450 return post_push(ui, repo, node, **kwargs)
451 return 0
451 return 0
452
452
453
453
454 def key_push(ui, repo, **kwargs):
454 def key_push(ui, repo, **kwargs):
455 from vcsserver.hgcompat import get_ctx
455 from vcsserver.hgcompat import get_ctx
456
456
457 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
457 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
458 # store new bookmarks in our UI object propagated later to post_push
458 # store new bookmarks in our UI object propagated later to post_push
459 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
459 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
460 return
460 return
461
461
462
462
463 # backward compat
463 # backward compat
464 log_pull_action = post_pull
464 log_pull_action = post_pull
465
465
466 # backward compat
466 # backward compat
467 log_push_action = post_push
467 log_push_action = post_push
468
468
469
469
470 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
470 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
471 """
471 """
472 Old hook name: keep here for backward compatibility.
472 Old hook name: keep here for backward compatibility.
473
473
474 This is only required when the installed git hooks are not upgraded.
474 This is only required when the installed git hooks are not upgraded.
475 """
475 """
476 pass
476 pass
477
477
478
478
479 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
479 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
480 """
480 """
481 Old hook name: keep here for backward compatibility.
481 Old hook name: keep here for backward compatibility.
482
482
483 This is only required when the installed git hooks are not upgraded.
483 This is only required when the installed git hooks are not upgraded.
484 """
484 """
485 pass
485 pass
486
486
487
487
488 @dataclasses.dataclass
488 @dataclasses.dataclass
489 class HookResponse:
489 class HookResponse:
490 status: int
490 status: int
491 output: str
491 output: str
492
492
493
493
494 def git_pre_pull(extras) -> HookResponse:
494 def git_pre_pull(extras) -> HookResponse:
495 """
495 """
496 Pre pull hook.
496 Pre pull hook.
497
497
498 :param extras: dictionary containing the keys defined in simplevcs
498 :param extras: dictionary containing the keys defined in simplevcs
499 :type extras: dict
499 :type extras: dict
500
500
501 :return: status code of the hook. 0 for success.
501 :return: status code of the hook. 0 for success.
502 :rtype: int
502 :rtype: int
503 """
503 """
504
504
505 if 'pull' not in extras['hooks']:
505 if 'pull' not in extras['hooks']:
506 return HookResponse(0, '')
506 return HookResponse(0, '')
507
507
508 stdout = io.StringIO()
508 stdout = io.StringIO()
509 try:
509 try:
510 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
510 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
511
511
512 except Exception as error:
512 except Exception as error:
513 log.exception('Failed to call pre_pull hook')
513 log.exception('Failed to call pre_pull hook')
514 status_code = 128
514 status_code = 128
515 stdout.write(f'ERROR: {error}\n')
515 stdout.write(f'ERROR: {error}\n')
516
516
517 return HookResponse(status_code, stdout.getvalue())
517 return HookResponse(status_code, stdout.getvalue())
518
518
519
519
520 def git_post_pull(extras) -> HookResponse:
520 def git_post_pull(extras) -> HookResponse:
521 """
521 """
522 Post pull hook.
522 Post pull hook.
523
523
524 :param extras: dictionary containing the keys defined in simplevcs
524 :param extras: dictionary containing the keys defined in simplevcs
525 :type extras: dict
525 :type extras: dict
526
526
527 :return: status code of the hook. 0 for success.
527 :return: status code of the hook. 0 for success.
528 :rtype: int
528 :rtype: int
529 """
529 """
530 if 'pull' not in extras['hooks']:
530 if 'pull' not in extras['hooks']:
531 return HookResponse(0, '')
531 return HookResponse(0, '')
532
532
533 stdout = io.StringIO()
533 stdout = io.StringIO()
534 try:
534 try:
535 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
535 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
536 except Exception as error:
536 except Exception as error:
537 status = 128
537 status = 128
538 stdout.write(f'ERROR: {error}\n')
538 stdout.write(f'ERROR: {error}\n')
539
539
540 return HookResponse(status, stdout.getvalue())
540 return HookResponse(status, stdout.getvalue())
541
541
542
542
543 def _parse_git_ref_lines(revision_lines):
543 def _parse_git_ref_lines(revision_lines):
544 rev_data = []
544 rev_data = []
545 for revision_line in revision_lines or []:
545 for revision_line in revision_lines or []:
546 old_rev, new_rev, ref = revision_line.strip().split(' ')
546 old_rev, new_rev, ref = revision_line.strip().split(' ')
547 ref_data = ref.split('/', 2)
547 ref_data = ref.split('/', 2)
548 if ref_data[1] in ('tags', 'heads'):
548 if ref_data[1] in ('tags', 'heads'):
549 rev_data.append({
549 rev_data.append({
550 # NOTE(marcink):
550 # NOTE(marcink):
551 # we're unable to tell total_commits for git at this point
551 # we're unable to tell total_commits for git at this point
552 # but we set the variable for consistency with GIT
552 # but we set the variable for consistency with GIT
553 'total_commits': -1,
553 'total_commits': -1,
554 'old_rev': old_rev,
554 'old_rev': old_rev,
555 'new_rev': new_rev,
555 'new_rev': new_rev,
556 'ref': ref,
556 'ref': ref,
557 'type': ref_data[1],
557 'type': ref_data[1],
558 'name': ref_data[2],
558 'name': ref_data[2],
559 })
559 })
560 return rev_data
560 return rev_data
561
561
562
562
563 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
563 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
564 """
564 """
565 Pre push hook.
565 Pre push hook.
566
566
567 :return: status code of the hook. 0 for success.
567 :return: status code of the hook. 0 for success.
568 """
568 """
569 extras = json.loads(env['RC_SCM_DATA'])
569 extras = json.loads(env['RC_SCM_DATA'])
570 rev_data = _parse_git_ref_lines(revision_lines)
570 rev_data = _parse_git_ref_lines(revision_lines)
571 if 'push' not in extras['hooks']:
571 if 'push' not in extras['hooks']:
572 return 0
572 return 0
573 _fix_hooks_executables()
573 _fix_hooks_executables()
574
574
575 empty_commit_id = '0' * 40
575 empty_commit_id = '0' * 40
576
576
577 detect_force_push = extras.get('detect_force_push')
577 detect_force_push = extras.get('detect_force_push')
578
578
579 for push_ref in rev_data:
579 for push_ref in rev_data:
580 # store our git-env which holds the temp store
580 # store our git-env which holds the temp store
581 push_ref['git_env'] = _get_git_env()
581 push_ref['git_env'] = _get_git_env()
582 push_ref['pruned_sha'] = ''
582 push_ref['pruned_sha'] = ''
583 if not detect_force_push:
583 if not detect_force_push:
584 # don't check for forced-push when we don't need to
584 # don't check for forced-push when we don't need to
585 continue
585 continue
586
586
587 type_ = push_ref['type']
587 type_ = push_ref['type']
588 new_branch = push_ref['old_rev'] == empty_commit_id
588 new_branch = push_ref['old_rev'] == empty_commit_id
589 delete_branch = push_ref['new_rev'] == empty_commit_id
589 delete_branch = push_ref['new_rev'] == empty_commit_id
590 if type_ == 'heads' and not (new_branch or delete_branch):
590 if type_ == 'heads' and not (new_branch or delete_branch):
591 old_rev = push_ref['old_rev']
591 old_rev = push_ref['old_rev']
592 new_rev = push_ref['new_rev']
592 new_rev = push_ref['new_rev']
593 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
593 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
594 stdout, stderr = subprocessio.run_command(
594 stdout, stderr = subprocessio.run_command(
595 cmd, env=os.environ.copy())
595 cmd, env=os.environ.copy())
596 # means we're having some non-reachable objects, this forced push was used
596 # means we're having some non-reachable objects, this forced push was used
597 if stdout:
597 if stdout:
598 push_ref['pruned_sha'] = stdout.splitlines()
598 push_ref['pruned_sha'] = stdout.splitlines()
599
599
600 extras['hook_type'] = 'pre_receive'
600 extras['hook_type'] = 'pre_receive'
601 extras['commit_ids'] = rev_data
601 extras['commit_ids'] = rev_data
602
602
603 stdout = sys.stdout
603 stdout = sys.stdout
604 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
604 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
605
605
606 return status_code
606 return status_code
607
607
608
608
609 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
609 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
610 """
610 """
611 Post push hook.
611 Post push hook.
612
612
613 :return: status code of the hook. 0 for success.
613 :return: status code of the hook. 0 for success.
614 """
614 """
615 extras = json.loads(env['RC_SCM_DATA'])
615 extras = json.loads(env['RC_SCM_DATA'])
616 if 'push' not in extras['hooks']:
616 if 'push' not in extras['hooks']:
617 return 0
617 return 0
618
618
619 _fix_hooks_executables()
619 _fix_hooks_executables()
620
620
621 rev_data = _parse_git_ref_lines(revision_lines)
621 rev_data = _parse_git_ref_lines(revision_lines)
622
622
623 git_revs = []
623 git_revs = []
624
624
625 # N.B.(skreft): it is ok to just call git, as git before calling a
625 # N.B.(skreft): it is ok to just call git, as git before calling a
626 # subcommand sets the PATH environment variable so that it point to the
626 # subcommand sets the PATH environment variable so that it point to the
627 # correct version of the git executable.
627 # correct version of the git executable.
628 empty_commit_id = '0' * 40
628 empty_commit_id = '0' * 40
629 branches = []
629 branches = []
630 tags = []
630 tags = []
631 for push_ref in rev_data:
631 for push_ref in rev_data:
632 type_ = push_ref['type']
632 type_ = push_ref['type']
633
633
634 if type_ == 'heads':
634 if type_ == 'heads':
635 # starting new branch case
635 # starting new branch case
636 if push_ref['old_rev'] == empty_commit_id:
636 if push_ref['old_rev'] == empty_commit_id:
637 push_ref_name = push_ref['name']
637 push_ref_name = push_ref['name']
638
638
639 if push_ref_name not in branches:
639 if push_ref_name not in branches:
640 branches.append(push_ref_name)
640 branches.append(push_ref_name)
641
641
642 need_head_set = ''
642 need_head_set = ''
643 with Repository(os.getcwd()) as repo:
643 with Repository(os.getcwd()) as repo:
644 try:
644 try:
645 repo.head
645 repo.head
646 except pygit2.GitError:
646 except pygit2.GitError:
647 need_head_set = f'refs/heads/{push_ref_name}'
647 need_head_set = f'refs/heads/{push_ref_name}'
648
648
649 if need_head_set:
649 if need_head_set:
650 repo.set_head(need_head_set)
650 repo.set_head(need_head_set)
651 print(f"Setting default branch to {push_ref_name}")
651 print(f"Setting default branch to {push_ref_name}")
652
652
653 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
653 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
654 stdout, stderr = subprocessio.run_command(
654 stdout, stderr = subprocessio.run_command(
655 cmd, env=os.environ.copy())
655 cmd, env=os.environ.copy())
656 heads = safe_str(stdout)
656 heads = safe_str(stdout)
657 heads = heads.replace(push_ref['ref'], '')
657 heads = heads.replace(push_ref['ref'], '')
658 heads = ' '.join(head for head
658 heads = ' '.join(head for head
659 in heads.splitlines() if head) or '.'
659 in heads.splitlines() if head) or '.'
660 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
660 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
661 '--pretty=format:%H', '--', push_ref['new_rev'],
661 '--pretty=format:%H', '--', push_ref['new_rev'],
662 '--not', heads]
662 '--not', heads]
663 stdout, stderr = subprocessio.run_command(
663 stdout, stderr = subprocessio.run_command(
664 cmd, env=os.environ.copy())
664 cmd, env=os.environ.copy())
665 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
665 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
666
666
667 # delete branch case
667 # delete branch case
668 elif push_ref['new_rev'] == empty_commit_id:
668 elif push_ref['new_rev'] == empty_commit_id:
669 git_revs.append(f'delete_branch=>{push_ref["name"]}')
669 git_revs.append(f'delete_branch=>{push_ref["name"]}')
670 else:
670 else:
671 if push_ref['name'] not in branches:
671 if push_ref['name'] not in branches:
672 branches.append(push_ref['name'])
672 branches.append(push_ref['name'])
673
673
674 cmd = [settings.GIT_EXECUTABLE(), 'log',
674 cmd = [settings.GIT_EXECUTABLE(), 'log',
675 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
675 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
676 '--reverse', '--pretty=format:%H']
676 '--reverse', '--pretty=format:%H']
677 stdout, stderr = subprocessio.run_command(
677 stdout, stderr = subprocessio.run_command(
678 cmd, env=os.environ.copy())
678 cmd, env=os.environ.copy())
679 # we get bytes from stdout, we need str to be consistent
679 # we get bytes from stdout, we need str to be consistent
680 log_revs = list(map(ascii_str, stdout.splitlines()))
680 log_revs = list(map(ascii_str, stdout.splitlines()))
681 git_revs.extend(log_revs)
681 git_revs.extend(log_revs)
682
682
683 # Pure pygit2 impl. but still 2-3x slower :/
683 # Pure pygit2 impl. but still 2-3x slower :/
684 # results = []
684 # results = []
685 #
685 #
686 # with Repository(os.getcwd()) as repo:
686 # with Repository(os.getcwd()) as repo:
687 # repo_new_rev = repo[push_ref['new_rev']]
687 # repo_new_rev = repo[push_ref['new_rev']]
688 # repo_old_rev = repo[push_ref['old_rev']]
688 # repo_old_rev = repo[push_ref['old_rev']]
689 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
689 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
690 #
690 #
691 # for commit in walker:
691 # for commit in walker:
692 # if commit.id == repo_old_rev.id:
692 # if commit.id == repo_old_rev.id:
693 # break
693 # break
694 # results.append(commit.id.hex)
694 # results.append(commit.id.hex)
695 # # reverse the order, can't use GIT_SORT_REVERSE
695 # # reverse the order, can't use GIT_SORT_REVERSE
696 # log_revs = results[::-1]
696 # log_revs = results[::-1]
697
697
698 elif type_ == 'tags':
698 elif type_ == 'tags':
699 if push_ref['name'] not in tags:
699 if push_ref['name'] not in tags:
700 tags.append(push_ref['name'])
700 tags.append(push_ref['name'])
701 git_revs.append(f'tag=>{push_ref["name"]}')
701 git_revs.append(f'tag=>{push_ref["name"]}')
702
702
703 extras['hook_type'] = 'post_receive'
703 extras['hook_type'] = 'post_receive'
704 extras['commit_ids'] = git_revs
704 extras['commit_ids'] = git_revs
705 extras['new_refs'] = {
705 extras['new_refs'] = {
706 'branches': branches,
706 'branches': branches,
707 'bookmarks': [],
707 'bookmarks': [],
708 'tags': tags,
708 'tags': tags,
709 }
709 }
710
710
711 stdout = sys.stdout
711 stdout = sys.stdout
712
712
713 if 'repo_size' in extras['hooks']:
713 if 'repo_size' in extras['hooks']:
714 try:
714 try:
715 _call_hook('repo_size', extras, GitMessageWriter(stdout))
715 _call_hook('repo_size', extras, GitMessageWriter(stdout))
716 except Exception:
716 except Exception:
717 pass
717 pass
718
718
719 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
719 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
720 return status_code
720 return status_code
721
721
722
722
723 def _get_extras_from_txn_id(path, txn_id):
723 def _get_extras_from_txn_id(path, txn_id):
724 _fix_hooks_executables()
724 _fix_hooks_executables()
725
725
726 extras = {}
726 extras = {}
727 try:
727 try:
728 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
728 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
729 '-t', txn_id,
729 '-t', txn_id,
730 '--revprop', path, 'rc-scm-extras']
730 '--revprop', path, 'rc-scm-extras']
731 stdout, stderr = subprocessio.run_command(
731 stdout, stderr = subprocessio.run_command(
732 cmd, env=os.environ.copy())
732 cmd, env=os.environ.copy())
733 extras = json.loads(base64.urlsafe_b64decode(stdout))
733 extras = json.loads(base64.urlsafe_b64decode(stdout))
734 except Exception:
734 except Exception:
735 log.exception('Failed to extract extras info from txn_id')
735 log.exception('Failed to extract extras info from txn_id')
736
736
737 return extras
737 return extras
738
738
739
739
740 def _get_extras_from_commit_id(commit_id, path):
740 def _get_extras_from_commit_id(commit_id, path):
741 _fix_hooks_executables()
741 _fix_hooks_executables()
742
742
743 extras = {}
743 extras = {}
744 try:
744 try:
745 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
745 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
746 '-r', commit_id,
746 '-r', commit_id,
747 '--revprop', path, 'rc-scm-extras']
747 '--revprop', path, 'rc-scm-extras']
748 stdout, stderr = subprocessio.run_command(
748 stdout, stderr = subprocessio.run_command(
749 cmd, env=os.environ.copy())
749 cmd, env=os.environ.copy())
750 extras = json.loads(base64.urlsafe_b64decode(stdout))
750 extras = json.loads(base64.urlsafe_b64decode(stdout))
751 except Exception:
751 except Exception:
752 log.exception('Failed to extract extras info from commit_id')
752 log.exception('Failed to extract extras info from commit_id')
753
753
754 return extras
754 return extras
755
755
756
756
757 def svn_pre_commit(repo_path, commit_data, env):
757 def svn_pre_commit(repo_path, commit_data, env):
758
758
759 path, txn_id = commit_data
759 path, txn_id = commit_data
760 branches = []
760 branches = []
761 tags = []
761 tags = []
762
762
763 if env.get('RC_SCM_DATA'):
763 if env.get('RC_SCM_DATA'):
764 extras = json.loads(env['RC_SCM_DATA'])
764 extras = json.loads(env['RC_SCM_DATA'])
765 else:
765 else:
766 # fallback method to read from TXN-ID stored data
766 # fallback method to read from TXN-ID stored data
767 extras = _get_extras_from_txn_id(path, txn_id)
767 extras = _get_extras_from_txn_id(path, txn_id)
768
768
769 if not extras:
769 if not extras:
770 #TODO: temporary fix until svn txn-id changes are merged
770 #TODO: temporary fix until svn txn-id changes are merged
771 return 0
771 return 0
772 raise ValueError('Failed to extract context data called extras for hook execution')
772 raise ValueError('Failed to extract context data called extras for hook execution')
773
773
774 extras['hook_type'] = 'pre_commit'
774 extras['hook_type'] = 'pre_commit'
775 extras['commit_ids'] = [txn_id]
775 extras['commit_ids'] = [txn_id]
776 extras['txn_id'] = txn_id
776 extras['txn_id'] = txn_id
777 extras['new_refs'] = {
777 extras['new_refs'] = {
778 'total_commits': 1,
778 'total_commits': 1,
779 'branches': branches,
779 'branches': branches,
780 'bookmarks': [],
780 'bookmarks': [],
781 'tags': tags,
781 'tags': tags,
782 }
782 }
783
783
784 return _call_hook('pre_push', extras, SvnMessageWriter())
784 return _call_hook('pre_push', extras, SvnMessageWriter())
785
785
786
786
787 def svn_post_commit(repo_path, commit_data, env):
787 def svn_post_commit(repo_path, commit_data, env):
788 """
788 """
789 commit_data is path, rev, txn_id
789 commit_data is path, rev, txn_id
790 """
790 """
791
791
792 if len(commit_data) == 3:
792 if len(commit_data) == 3:
793 path, commit_id, txn_id = commit_data
793 path, commit_id, txn_id = commit_data
794 elif len(commit_data) == 2:
794 elif len(commit_data) == 2:
795 log.error('Failed to extract txn_id from commit_data using legacy method. '
795 log.error('Failed to extract txn_id from commit_data using legacy method. '
796 'Some functionality might be limited')
796 'Some functionality might be limited')
797 path, commit_id = commit_data
797 path, commit_id = commit_data
798 txn_id = None
798 txn_id = None
799 else:
799 else:
800 return 0
800 return 0
801
801
802 branches = []
802 branches = []
803 tags = []
803 tags = []
804
804
805 if env.get('RC_SCM_DATA'):
805 if env.get('RC_SCM_DATA'):
806 extras = json.loads(env['RC_SCM_DATA'])
806 extras = json.loads(env['RC_SCM_DATA'])
807 else:
807 else:
808 # fallback method to read from TXN-ID stored data
808 # fallback method to read from TXN-ID stored data
809 extras = _get_extras_from_commit_id(commit_id, path)
809 extras = _get_extras_from_commit_id(commit_id, path)
810
810
811 if not extras:
811 if not extras:
812 #TODO: temporary fix until svn txn-id changes are merged
812 #TODO: temporary fix until svn txn-id changes are merged
813 return 0
813 return 0
814 raise ValueError('Failed to extract context data called extras for hook execution')
814 raise ValueError('Failed to extract context data called extras for hook execution')
815
815
816 extras['hook_type'] = 'post_commit'
816 extras['hook_type'] = 'post_commit'
817 extras['commit_ids'] = [commit_id]
817 extras['commit_ids'] = [commit_id]
818 extras['txn_id'] = txn_id
818 extras['txn_id'] = txn_id
819 extras['new_refs'] = {
819 extras['new_refs'] = {
820 'branches': branches,
820 'branches': branches,
821 'bookmarks': [],
821 'bookmarks': [],
822 'tags': tags,
822 'tags': tags,
823 'total_commits': 1,
823 'total_commits': 1,
824 }
824 }
825
825
826 if 'repo_size' in extras['hooks']:
826 if 'repo_size' in extras['hooks']:
827 try:
827 try:
828 _call_hook('repo_size', extras, SvnMessageWriter())
828 _call_hook('repo_size', extras, SvnMessageWriter())
829 except Exception:
829 except Exception:
830 pass
830 pass
831
831
832 return _call_hook('post_push', extras, SvnMessageWriter())
832 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,774 +1,774 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import platform
20 import platform
21 import sys
21 import sys
22 import locale
22 import locale
23 import logging
23 import logging
24 import uuid
24 import uuid
25 import time
25 import time
26 import wsgiref.util
26 import wsgiref.util
27 import tempfile
27 import tempfile
28 import psutil
28 import psutil
29
29
30 from itertools import chain
30 from itertools import chain
31
31
32 import msgpack
32 import msgpack
33 import configparser
33 import configparser
34
34
35 from pyramid.config import Configurator
35 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
36 from pyramid.wsgi import wsgiapp
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40 from vcsserver.lib.rc_json import json
40 from vcsserver.lib.ext_json import json
41 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.config.settings_maker import SettingsMaker
42 from vcsserver.str_utils import safe_int
42 from vcsserver.str_utils import safe_int
43 from vcsserver.lib.statsd_client import StatsdClient
43 from vcsserver.lib.statsd_client import StatsdClient
44 from vcsserver.tweens.request_wrapper import get_headers_call_context
44 from vcsserver.tweens.request_wrapper import get_headers_call_context
45
45
46 import vcsserver
46 import vcsserver
47 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
47 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
48 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
48 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
49 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
49 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
50 from vcsserver.echo_stub.echo_app import EchoApp
50 from vcsserver.echo_stub.echo_app import EchoApp
51 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
51 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
52 from vcsserver.lib.exc_tracking import store_exception, format_exc
52 from vcsserver.lib.exc_tracking import store_exception, format_exc
53 from vcsserver.server import VcsServer
53 from vcsserver.server import VcsServer
54
54
55 strict_vcs = True
55 strict_vcs = True
56
56
57 git_import_err = None
57 git_import_err = None
58 try:
58 try:
59 from vcsserver.remote.git_remote import GitFactory, GitRemote
59 from vcsserver.remote.git_remote import GitFactory, GitRemote
60 except ImportError as e:
60 except ImportError as e:
61 GitFactory = None
61 GitFactory = None
62 GitRemote = None
62 GitRemote = None
63 git_import_err = e
63 git_import_err = e
64 if strict_vcs:
64 if strict_vcs:
65 raise
65 raise
66
66
67
67
68 hg_import_err = None
68 hg_import_err = None
69 try:
69 try:
70 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
70 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
71 except ImportError as e:
71 except ImportError as e:
72 MercurialFactory = None
72 MercurialFactory = None
73 HgRemote = None
73 HgRemote = None
74 hg_import_err = e
74 hg_import_err = e
75 if strict_vcs:
75 if strict_vcs:
76 raise
76 raise
77
77
78
78
79 svn_import_err = None
79 svn_import_err = None
80 try:
80 try:
81 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
81 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
82 except ImportError as e:
82 except ImportError as e:
83 SubversionFactory = None
83 SubversionFactory = None
84 SvnRemote = None
84 SvnRemote = None
85 svn_import_err = e
85 svn_import_err = e
86 if strict_vcs:
86 if strict_vcs:
87 raise
87 raise
88
88
89 log = logging.getLogger(__name__)
89 log = logging.getLogger(__name__)
90
90
91 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
91 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
92 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
92 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
93
93
94 try:
94 try:
95 locale.setlocale(locale.LC_ALL, '')
95 locale.setlocale(locale.LC_ALL, '')
96 except locale.Error as e:
96 except locale.Error as e:
97 log.error(
97 log.error(
98 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
98 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
99 os.environ['LC_ALL'] = 'C'
99 os.environ['LC_ALL'] = 'C'
100
100
101
101
102 def _is_request_chunked(environ):
102 def _is_request_chunked(environ):
103 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
103 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 return stream
104 return stream
105
105
106
106
107 def log_max_fd():
107 def log_max_fd():
108 try:
108 try:
109 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
109 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
110 log.info('Max file descriptors value: %s', maxfd)
110 log.info('Max file descriptors value: %s', maxfd)
111 except Exception:
111 except Exception:
112 pass
112 pass
113
113
114
114
115 class VCS:
115 class VCS:
116 def __init__(self, locale_conf=None, cache_config=None):
116 def __init__(self, locale_conf=None, cache_config=None):
117 self.locale = locale_conf
117 self.locale = locale_conf
118 self.cache_config = cache_config
118 self.cache_config = cache_config
119 self._configure_locale()
119 self._configure_locale()
120
120
121 log_max_fd()
121 log_max_fd()
122
122
123 if GitFactory and GitRemote:
123 if GitFactory and GitRemote:
124 git_factory = GitFactory()
124 git_factory = GitFactory()
125 self._git_remote = GitRemote(git_factory)
125 self._git_remote = GitRemote(git_factory)
126 else:
126 else:
127 log.error("Git client import failed: %s", git_import_err)
127 log.error("Git client import failed: %s", git_import_err)
128
128
129 if MercurialFactory and HgRemote:
129 if MercurialFactory and HgRemote:
130 hg_factory = MercurialFactory()
130 hg_factory = MercurialFactory()
131 self._hg_remote = HgRemote(hg_factory)
131 self._hg_remote = HgRemote(hg_factory)
132 else:
132 else:
133 log.error("Mercurial client import failed: %s", hg_import_err)
133 log.error("Mercurial client import failed: %s", hg_import_err)
134
134
135 if SubversionFactory and SvnRemote:
135 if SubversionFactory and SvnRemote:
136 svn_factory = SubversionFactory()
136 svn_factory = SubversionFactory()
137
137
138 # hg factory is used for svn url validation
138 # hg factory is used for svn url validation
139 hg_factory = MercurialFactory()
139 hg_factory = MercurialFactory()
140 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 else:
141 else:
142 log.error("Subversion client import failed: %s", svn_import_err)
142 log.error("Subversion client import failed: %s", svn_import_err)
143
143
144 self._vcsserver = VcsServer()
144 self._vcsserver = VcsServer()
145
145
146 def _configure_locale(self):
146 def _configure_locale(self):
147 if self.locale:
147 if self.locale:
148 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 else:
149 else:
150 log.info('Configuring locale subsystem based on environment variables')
150 log.info('Configuring locale subsystem based on environment variables')
151 try:
151 try:
152 # If self.locale is the empty string, then the locale
152 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
153 # module will use the environment variables. See the
154 # documentation of the package `locale`.
154 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
155 locale.setlocale(locale.LC_ALL, self.locale)
156
156
157 language_code, encoding = locale.getlocale()
157 language_code, encoding = locale.getlocale()
158 log.info(
158 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
159 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
160 language_code, encoding)
161 except locale.Error:
161 except locale.Error:
162 log.exception('Cannot set locale, not configuring the locale system')
162 log.exception('Cannot set locale, not configuring the locale system')
163
163
164
164
165 class WsgiProxy:
165 class WsgiProxy:
166 def __init__(self, wsgi):
166 def __init__(self, wsgi):
167 self.wsgi = wsgi
167 self.wsgi = wsgi
168
168
169 def __call__(self, environ, start_response):
169 def __call__(self, environ, start_response):
170 input_data = environ['wsgi.input'].read()
170 input_data = environ['wsgi.input'].read()
171 input_data = msgpack.unpackb(input_data)
171 input_data = msgpack.unpackb(input_data)
172
172
173 error = None
173 error = None
174 try:
174 try:
175 data, status, headers = self.wsgi.handle(
175 data, status, headers = self.wsgi.handle(
176 input_data['environment'], input_data['input_data'],
176 input_data['environment'], input_data['input_data'],
177 *input_data['args'], **input_data['kwargs'])
177 *input_data['args'], **input_data['kwargs'])
178 except Exception as e:
178 except Exception as e:
179 data, status, headers = [], None, None
179 data, status, headers = [], None, None
180 error = {
180 error = {
181 'message': str(e),
181 'message': str(e),
182 '_vcs_kind': getattr(e, '_vcs_kind', None)
182 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 }
183 }
184
184
185 start_response(200, {})
185 start_response(200, {})
186 return self._iterator(error, status, headers, data)
186 return self._iterator(error, status, headers, data)
187
187
188 def _iterator(self, error, status, headers, data):
188 def _iterator(self, error, status, headers, data):
189 initial_data = [
189 initial_data = [
190 error,
190 error,
191 status,
191 status,
192 headers,
192 headers,
193 ]
193 ]
194
194
195 for d in chain(initial_data, data):
195 for d in chain(initial_data, data):
196 yield msgpack.packb(d)
196 yield msgpack.packb(d)
197
197
198
198
199 def not_found(request):
199 def not_found(request):
200 return {'status': '404 NOT FOUND'}
200 return {'status': '404 NOT FOUND'}
201
201
202
202
203 class VCSViewPredicate:
203 class VCSViewPredicate:
204 def __init__(self, val, config):
204 def __init__(self, val, config):
205 self.remotes = val
205 self.remotes = val
206
206
207 def text(self):
207 def text(self):
208 return f'vcs view method = {list(self.remotes.keys())}'
208 return f'vcs view method = {list(self.remotes.keys())}'
209
209
210 phash = text
210 phash = text
211
211
212 def __call__(self, context, request):
212 def __call__(self, context, request):
213 """
213 """
214 View predicate that returns true if given backend is supported by
214 View predicate that returns true if given backend is supported by
215 defined remotes.
215 defined remotes.
216 """
216 """
217 backend = request.matchdict.get('backend')
217 backend = request.matchdict.get('backend')
218 return backend in self.remotes
218 return backend in self.remotes
219
219
220
220
221 class HTTPApplication:
221 class HTTPApplication:
222 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
222 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223
223
224 remote_wsgi = remote_wsgi
224 remote_wsgi = remote_wsgi
225 _use_echo_app = False
225 _use_echo_app = False
226
226
227 def __init__(self, settings=None, global_config=None):
227 def __init__(self, settings=None, global_config=None):
228
228
229 self.config = Configurator(settings=settings)
229 self.config = Configurator(settings=settings)
230 # Init our statsd at very start
230 # Init our statsd at very start
231 self.config.registry.statsd = StatsdClient.statsd
231 self.config.registry.statsd = StatsdClient.statsd
232 self.config.registry.vcs_call_context = {}
232 self.config.registry.vcs_call_context = {}
233
233
234 self.global_config = global_config
234 self.global_config = global_config
235 self.config.include('vcsserver.lib.rc_cache')
235 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache.archive_cache')
236 self.config.include('vcsserver.lib.rc_cache.archive_cache')
237
237
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 self._remotes = {
240 self._remotes = {
241 'hg': vcs._hg_remote,
241 'hg': vcs._hg_remote,
242 'git': vcs._git_remote,
242 'git': vcs._git_remote,
243 'svn': vcs._svn_remote,
243 'svn': vcs._svn_remote,
244 'server': vcs._vcsserver,
244 'server': vcs._vcsserver,
245 }
245 }
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 self._use_echo_app = True
247 self._use_echo_app = True
248 log.warning("Using EchoApp for VCS operations.")
248 log.warning("Using EchoApp for VCS operations.")
249 self.remote_wsgi = remote_wsgi_stub
249 self.remote_wsgi = remote_wsgi_stub
250
250
251 self._configure_settings(global_config, settings)
251 self._configure_settings(global_config, settings)
252
252
253 self._configure()
253 self._configure()
254
254
255 def _configure_settings(self, global_config, app_settings):
255 def _configure_settings(self, global_config, app_settings):
256 """
256 """
257 Configure the settings module.
257 Configure the settings module.
258 """
258 """
259 settings_merged = global_config.copy()
259 settings_merged = global_config.copy()
260 settings_merged.update(app_settings)
260 settings_merged.update(app_settings)
261
261
262 binary_dir = app_settings['core.binary_dir']
262 binary_dir = app_settings['core.binary_dir']
263
263
264 settings.BINARY_DIR = binary_dir
264 settings.BINARY_DIR = binary_dir
265
265
266 # Store the settings to make them available to other modules.
266 # Store the settings to make them available to other modules.
267 vcsserver.PYRAMID_SETTINGS = settings_merged
267 vcsserver.PYRAMID_SETTINGS = settings_merged
268 vcsserver.CONFIG = settings_merged
268 vcsserver.CONFIG = settings_merged
269
269
270 def _configure(self):
270 def _configure(self):
271 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
271 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
272
272
273 self.config.add_route('service', '/_service')
273 self.config.add_route('service', '/_service')
274 self.config.add_route('status', '/status')
274 self.config.add_route('status', '/status')
275 self.config.add_route('hg_proxy', '/proxy/hg')
275 self.config.add_route('hg_proxy', '/proxy/hg')
276 self.config.add_route('git_proxy', '/proxy/git')
276 self.config.add_route('git_proxy', '/proxy/git')
277
277
278 # rpc methods
278 # rpc methods
279 self.config.add_route('vcs', '/{backend}')
279 self.config.add_route('vcs', '/{backend}')
280
280
281 # streaming rpc remote methods
281 # streaming rpc remote methods
282 self.config.add_route('vcs_stream', '/{backend}/stream')
282 self.config.add_route('vcs_stream', '/{backend}/stream')
283
283
284 # vcs operations clone/push as streaming
284 # vcs operations clone/push as streaming
285 self.config.add_route('stream_git', '/stream/git/*repo_name')
285 self.config.add_route('stream_git', '/stream/git/*repo_name')
286 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
286 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
287
287
288 self.config.add_view(self.status_view, route_name='status', renderer='json')
288 self.config.add_view(self.status_view, route_name='status', renderer='json')
289 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
289 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
290
290
291 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
291 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
292 self.config.add_view(self.git_proxy(), route_name='git_proxy')
292 self.config.add_view(self.git_proxy(), route_name='git_proxy')
293 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
293 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
294 vcs_view=self._remotes)
294 vcs_view=self._remotes)
295 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
295 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
296 vcs_view=self._remotes)
296 vcs_view=self._remotes)
297
297
298 self.config.add_view(self.hg_stream(), route_name='stream_hg')
298 self.config.add_view(self.hg_stream(), route_name='stream_hg')
299 self.config.add_view(self.git_stream(), route_name='stream_git')
299 self.config.add_view(self.git_stream(), route_name='stream_git')
300
300
301 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
301 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
302
302
303 self.config.add_notfound_view(not_found, renderer='json')
303 self.config.add_notfound_view(not_found, renderer='json')
304
304
305 self.config.add_view(self.handle_vcs_exception, context=Exception)
305 self.config.add_view(self.handle_vcs_exception, context=Exception)
306
306
307 self.config.add_tween(
307 self.config.add_tween(
308 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
308 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
309 )
309 )
310 self.config.add_request_method(
310 self.config.add_request_method(
311 'vcsserver.lib.request_counter.get_request_counter',
311 'vcsserver.lib.request_counter.get_request_counter',
312 'request_count')
312 'request_count')
313
313
314 def wsgi_app(self):
314 def wsgi_app(self):
315 return self.config.make_wsgi_app()
315 return self.config.make_wsgi_app()
316
316
317 def _vcs_view_params(self, request):
317 def _vcs_view_params(self, request):
318 remote = self._remotes[request.matchdict['backend']]
318 remote = self._remotes[request.matchdict['backend']]
319 payload = msgpack.unpackb(request.body, use_list=True)
319 payload = msgpack.unpackb(request.body, use_list=True)
320
320
321 method = payload.get('method')
321 method = payload.get('method')
322 params = payload['params']
322 params = payload['params']
323 wire = params.get('wire')
323 wire = params.get('wire')
324 args = params.get('args')
324 args = params.get('args')
325 kwargs = params.get('kwargs')
325 kwargs = params.get('kwargs')
326 context_uid = None
326 context_uid = None
327
327
328 request.registry.vcs_call_context = {
328 request.registry.vcs_call_context = {
329 'method': method,
329 'method': method,
330 'repo_name': payload.get('_repo_name'),
330 'repo_name': payload.get('_repo_name'),
331 }
331 }
332
332
333 if wire:
333 if wire:
334 try:
334 try:
335 wire['context'] = context_uid = uuid.UUID(wire['context'])
335 wire['context'] = context_uid = uuid.UUID(wire['context'])
336 except KeyError:
336 except KeyError:
337 pass
337 pass
338 args.insert(0, wire)
338 args.insert(0, wire)
339 repo_state_uid = wire.get('repo_state_uid') if wire else None
339 repo_state_uid = wire.get('repo_state_uid') if wire else None
340
340
341 # NOTE(marcink): trading complexity for slight performance
341 # NOTE(marcink): trading complexity for slight performance
342 if log.isEnabledFor(logging.DEBUG):
342 if log.isEnabledFor(logging.DEBUG):
343 # also we SKIP printing out any of those methods args since they maybe excessive
343 # also we SKIP printing out any of those methods args since they maybe excessive
344 just_args_methods = {
344 just_args_methods = {
345 'commitctx': ('content', 'removed', 'updated'),
345 'commitctx': ('content', 'removed', 'updated'),
346 'commit': ('content', 'removed', 'updated')
346 'commit': ('content', 'removed', 'updated')
347 }
347 }
348 if method in just_args_methods:
348 if method in just_args_methods:
349 skip_args = just_args_methods[method]
349 skip_args = just_args_methods[method]
350 call_args = ''
350 call_args = ''
351 call_kwargs = {}
351 call_kwargs = {}
352 for k in kwargs:
352 for k in kwargs:
353 if k in skip_args:
353 if k in skip_args:
354 # replace our skip key with dummy
354 # replace our skip key with dummy
355 call_kwargs[k] = f'RemovedParam({k})'
355 call_kwargs[k] = f'RemovedParam({k})'
356 else:
356 else:
357 call_kwargs[k] = kwargs[k]
357 call_kwargs[k] = kwargs[k]
358 else:
358 else:
359 call_args = args[1:]
359 call_args = args[1:]
360 call_kwargs = kwargs
360 call_kwargs = kwargs
361
361
362 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
362 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
363 method, call_args, call_kwargs, context_uid, repo_state_uid)
363 method, call_args, call_kwargs, context_uid, repo_state_uid)
364
364
365 statsd = request.registry.statsd
365 statsd = request.registry.statsd
366 if statsd:
366 if statsd:
367 statsd.incr(
367 statsd.incr(
368 'vcsserver_method_total', tags=[
368 'vcsserver_method_total', tags=[
369 f"method:{method}",
369 f"method:{method}",
370 ])
370 ])
371 return payload, remote, method, args, kwargs
371 return payload, remote, method, args, kwargs
372
372
373 def vcs_view(self, request):
373 def vcs_view(self, request):
374
374
375 payload, remote, method, args, kwargs = self._vcs_view_params(request)
375 payload, remote, method, args, kwargs = self._vcs_view_params(request)
376 payload_id = payload.get('id')
376 payload_id = payload.get('id')
377
377
378 try:
378 try:
379 resp = getattr(remote, method)(*args, **kwargs)
379 resp = getattr(remote, method)(*args, **kwargs)
380 except Exception as e:
380 except Exception as e:
381 exc_info = list(sys.exc_info())
381 exc_info = list(sys.exc_info())
382 exc_type, exc_value, exc_traceback = exc_info
382 exc_type, exc_value, exc_traceback = exc_info
383
383
384 org_exc = getattr(e, '_org_exc', None)
384 org_exc = getattr(e, '_org_exc', None)
385 org_exc_name = None
385 org_exc_name = None
386 org_exc_tb = ''
386 org_exc_tb = ''
387 if org_exc:
387 if org_exc:
388 org_exc_name = org_exc.__class__.__name__
388 org_exc_name = org_exc.__class__.__name__
389 org_exc_tb = getattr(e, '_org_exc_tb', '')
389 org_exc_tb = getattr(e, '_org_exc_tb', '')
390 # replace our "faked" exception with our org
390 # replace our "faked" exception with our org
391 exc_info[0] = org_exc.__class__
391 exc_info[0] = org_exc.__class__
392 exc_info[1] = org_exc
392 exc_info[1] = org_exc
393
393
394 should_store_exc = True
394 should_store_exc = True
395 if org_exc:
395 if org_exc:
396 def get_exc_fqn(_exc_obj):
396 def get_exc_fqn(_exc_obj):
397 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
397 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
398 return module_name + '.' + org_exc_name
398 return module_name + '.' + org_exc_name
399
399
400 exc_fqn = get_exc_fqn(org_exc)
400 exc_fqn = get_exc_fqn(org_exc)
401
401
402 if exc_fqn in ['mercurial.error.RepoLookupError',
402 if exc_fqn in ['mercurial.error.RepoLookupError',
403 'vcsserver.exceptions.RefNotFoundException']:
403 'vcsserver.exceptions.RefNotFoundException']:
404 should_store_exc = False
404 should_store_exc = False
405
405
406 if should_store_exc:
406 if should_store_exc:
407 store_exception(id(exc_info), exc_info, request_path=request.path)
407 store_exception(id(exc_info), exc_info, request_path=request.path)
408
408
409 tb_info = format_exc(exc_info)
409 tb_info = format_exc(exc_info)
410
410
411 type_ = e.__class__.__name__
411 type_ = e.__class__.__name__
412 if type_ not in self.ALLOWED_EXCEPTIONS:
412 if type_ not in self.ALLOWED_EXCEPTIONS:
413 type_ = None
413 type_ = None
414
414
415 resp = {
415 resp = {
416 'id': payload_id,
416 'id': payload_id,
417 'error': {
417 'error': {
418 'message': str(e),
418 'message': str(e),
419 'traceback': tb_info,
419 'traceback': tb_info,
420 'org_exc': org_exc_name,
420 'org_exc': org_exc_name,
421 'org_exc_tb': org_exc_tb,
421 'org_exc_tb': org_exc_tb,
422 'type': type_
422 'type': type_
423 }
423 }
424 }
424 }
425
425
426 try:
426 try:
427 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
427 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
428 except AttributeError:
428 except AttributeError:
429 pass
429 pass
430 else:
430 else:
431 resp = {
431 resp = {
432 'id': payload_id,
432 'id': payload_id,
433 'result': resp
433 'result': resp
434 }
434 }
435 log.debug('Serving data for method %s', method)
435 log.debug('Serving data for method %s', method)
436 return resp
436 return resp
437
437
438 def vcs_stream_view(self, request):
438 def vcs_stream_view(self, request):
439 payload, remote, method, args, kwargs = self._vcs_view_params(request)
439 payload, remote, method, args, kwargs = self._vcs_view_params(request)
440 # this method has a stream: marker we remove it here
440 # this method has a stream: marker we remove it here
441 method = method.split('stream:')[-1]
441 method = method.split('stream:')[-1]
442 chunk_size = safe_int(payload.get('chunk_size')) or 4096
442 chunk_size = safe_int(payload.get('chunk_size')) or 4096
443
443
444 resp = getattr(remote, method)(*args, **kwargs)
444 resp = getattr(remote, method)(*args, **kwargs)
445
445
446 def get_chunked_data(method_resp):
446 def get_chunked_data(method_resp):
447 stream = io.BytesIO(method_resp)
447 stream = io.BytesIO(method_resp)
448 while 1:
448 while 1:
449 chunk = stream.read(chunk_size)
449 chunk = stream.read(chunk_size)
450 if not chunk:
450 if not chunk:
451 break
451 break
452 yield chunk
452 yield chunk
453
453
454 response = Response(app_iter=get_chunked_data(resp))
454 response = Response(app_iter=get_chunked_data(resp))
455 response.content_type = 'application/octet-stream'
455 response.content_type = 'application/octet-stream'
456
456
457 return response
457 return response
458
458
459 def status_view(self, request):
459 def status_view(self, request):
460 import vcsserver
460 import vcsserver
461 _platform_id = platform.uname()[1] or 'instance'
461 _platform_id = platform.uname()[1] or 'instance'
462
462
463 return {
463 return {
464 "status": "OK",
464 "status": "OK",
465 "vcsserver_version": vcsserver.get_version(),
465 "vcsserver_version": vcsserver.get_version(),
466 "platform": _platform_id,
466 "platform": _platform_id,
467 "pid": os.getpid(),
467 "pid": os.getpid(),
468 }
468 }
469
469
470 def service_view(self, request):
470 def service_view(self, request):
471 import vcsserver
471 import vcsserver
472
472
473 payload = msgpack.unpackb(request.body, use_list=True)
473 payload = msgpack.unpackb(request.body, use_list=True)
474 server_config, app_config = {}, {}
474 server_config, app_config = {}, {}
475
475
476 try:
476 try:
477 path = self.global_config['__file__']
477 path = self.global_config['__file__']
478 config = configparser.RawConfigParser()
478 config = configparser.RawConfigParser()
479
479
480 config.read(path)
480 config.read(path)
481
481
482 if config.has_section('server:main'):
482 if config.has_section('server:main'):
483 server_config = dict(config.items('server:main'))
483 server_config = dict(config.items('server:main'))
484 if config.has_section('app:main'):
484 if config.has_section('app:main'):
485 app_config = dict(config.items('app:main'))
485 app_config = dict(config.items('app:main'))
486
486
487 except Exception:
487 except Exception:
488 log.exception('Failed to read .ini file for display')
488 log.exception('Failed to read .ini file for display')
489
489
490 environ = list(os.environ.items())
490 environ = list(os.environ.items())
491
491
492 resp = {
492 resp = {
493 'id': payload.get('id'),
493 'id': payload.get('id'),
494 'result': dict(
494 'result': dict(
495 version=vcsserver.get_version(),
495 version=vcsserver.get_version(),
496 config=server_config,
496 config=server_config,
497 app_config=app_config,
497 app_config=app_config,
498 environ=environ,
498 environ=environ,
499 payload=payload,
499 payload=payload,
500 )
500 )
501 }
501 }
502 return resp
502 return resp
503
503
504 def _msgpack_renderer_factory(self, info):
504 def _msgpack_renderer_factory(self, info):
505
505
506 def _render(value, system):
506 def _render(value, system):
507 bin_type = False
507 bin_type = False
508 res = value.get('result')
508 res = value.get('result')
509 if isinstance(res, BytesEnvelope):
509 if isinstance(res, BytesEnvelope):
510 log.debug('Result is wrapped in BytesEnvelope type')
510 log.debug('Result is wrapped in BytesEnvelope type')
511 bin_type = True
511 bin_type = True
512 elif isinstance(res, BinaryEnvelope):
512 elif isinstance(res, BinaryEnvelope):
513 log.debug('Result is wrapped in BinaryEnvelope type')
513 log.debug('Result is wrapped in BinaryEnvelope type')
514 value['result'] = res.val
514 value['result'] = res.val
515 bin_type = True
515 bin_type = True
516
516
517 request = system.get('request')
517 request = system.get('request')
518 if request is not None:
518 if request is not None:
519 response = request.response
519 response = request.response
520 ct = response.content_type
520 ct = response.content_type
521 if ct == response.default_content_type:
521 if ct == response.default_content_type:
522 response.content_type = 'application/x-msgpack'
522 response.content_type = 'application/x-msgpack'
523 if bin_type:
523 if bin_type:
524 response.content_type = 'application/x-msgpack-bin'
524 response.content_type = 'application/x-msgpack-bin'
525
525
526 return msgpack.packb(value, use_bin_type=bin_type)
526 return msgpack.packb(value, use_bin_type=bin_type)
527 return _render
527 return _render
528
528
529 def set_env_from_config(self, environ, config):
529 def set_env_from_config(self, environ, config):
530 dict_conf = {}
530 dict_conf = {}
531 try:
531 try:
532 for elem in config:
532 for elem in config:
533 if elem[0] == 'rhodecode':
533 if elem[0] == 'rhodecode':
534 dict_conf = json.loads(elem[2])
534 dict_conf = json.loads(elem[2])
535 break
535 break
536 except Exception:
536 except Exception:
537 log.exception('Failed to fetch SCM CONFIG')
537 log.exception('Failed to fetch SCM CONFIG')
538 return
538 return
539
539
540 username = dict_conf.get('username')
540 username = dict_conf.get('username')
541 if username:
541 if username:
542 environ['REMOTE_USER'] = username
542 environ['REMOTE_USER'] = username
543 # mercurial specific, some extension api rely on this
543 # mercurial specific, some extension api rely on this
544 environ['HGUSER'] = username
544 environ['HGUSER'] = username
545
545
546 ip = dict_conf.get('ip')
546 ip = dict_conf.get('ip')
547 if ip:
547 if ip:
548 environ['REMOTE_HOST'] = ip
548 environ['REMOTE_HOST'] = ip
549
549
550 if _is_request_chunked(environ):
550 if _is_request_chunked(environ):
551 # set the compatibility flag for webob
551 # set the compatibility flag for webob
552 environ['wsgi.input_terminated'] = True
552 environ['wsgi.input_terminated'] = True
553
553
554 def hg_proxy(self):
554 def hg_proxy(self):
555 @wsgiapp
555 @wsgiapp
556 def _hg_proxy(environ, start_response):
556 def _hg_proxy(environ, start_response):
557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
558 return app(environ, start_response)
558 return app(environ, start_response)
559 return _hg_proxy
559 return _hg_proxy
560
560
561 def git_proxy(self):
561 def git_proxy(self):
562 @wsgiapp
562 @wsgiapp
563 def _git_proxy(environ, start_response):
563 def _git_proxy(environ, start_response):
564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
565 return app(environ, start_response)
565 return app(environ, start_response)
566 return _git_proxy
566 return _git_proxy
567
567
568 def hg_stream(self):
568 def hg_stream(self):
569 if self._use_echo_app:
569 if self._use_echo_app:
570 @wsgiapp
570 @wsgiapp
571 def _hg_stream(environ, start_response):
571 def _hg_stream(environ, start_response):
572 app = EchoApp('fake_path', 'fake_name', None)
572 app = EchoApp('fake_path', 'fake_name', None)
573 return app(environ, start_response)
573 return app(environ, start_response)
574 return _hg_stream
574 return _hg_stream
575 else:
575 else:
576 @wsgiapp
576 @wsgiapp
577 def _hg_stream(environ, start_response):
577 def _hg_stream(environ, start_response):
578 log.debug('http-app: handling hg stream')
578 log.debug('http-app: handling hg stream')
579 call_context = get_headers_call_context(environ)
579 call_context = get_headers_call_context(environ)
580
580
581 repo_path = call_context['repo_path']
581 repo_path = call_context['repo_path']
582 repo_name = call_context['repo_name']
582 repo_name = call_context['repo_name']
583 config = call_context['repo_config']
583 config = call_context['repo_config']
584
584
585 app = scm_app.create_hg_wsgi_app(
585 app = scm_app.create_hg_wsgi_app(
586 repo_path, repo_name, config)
586 repo_path, repo_name, config)
587
587
588 # Consistent path information for hgweb
588 # Consistent path information for hgweb
589 environ['PATH_INFO'] = call_context['path_info']
589 environ['PATH_INFO'] = call_context['path_info']
590 environ['REPO_NAME'] = repo_name
590 environ['REPO_NAME'] = repo_name
591 self.set_env_from_config(environ, config)
591 self.set_env_from_config(environ, config)
592
592
593 log.debug('http-app: starting app handler '
593 log.debug('http-app: starting app handler '
594 'with %s and process request', app)
594 'with %s and process request', app)
595 return app(environ, ResponseFilter(start_response))
595 return app(environ, ResponseFilter(start_response))
596 return _hg_stream
596 return _hg_stream
597
597
598 def git_stream(self):
598 def git_stream(self):
599 if self._use_echo_app:
599 if self._use_echo_app:
600 @wsgiapp
600 @wsgiapp
601 def _git_stream(environ, start_response):
601 def _git_stream(environ, start_response):
602 app = EchoApp('fake_path', 'fake_name', None)
602 app = EchoApp('fake_path', 'fake_name', None)
603 return app(environ, start_response)
603 return app(environ, start_response)
604 return _git_stream
604 return _git_stream
605 else:
605 else:
606 @wsgiapp
606 @wsgiapp
607 def _git_stream(environ, start_response):
607 def _git_stream(environ, start_response):
608 log.debug('http-app: handling git stream')
608 log.debug('http-app: handling git stream')
609
609
610 call_context = get_headers_call_context(environ)
610 call_context = get_headers_call_context(environ)
611
611
612 repo_path = call_context['repo_path']
612 repo_path = call_context['repo_path']
613 repo_name = call_context['repo_name']
613 repo_name = call_context['repo_name']
614 config = call_context['repo_config']
614 config = call_context['repo_config']
615
615
616 environ['PATH_INFO'] = call_context['path_info']
616 environ['PATH_INFO'] = call_context['path_info']
617 self.set_env_from_config(environ, config)
617 self.set_env_from_config(environ, config)
618
618
619 content_type = environ.get('CONTENT_TYPE', '')
619 content_type = environ.get('CONTENT_TYPE', '')
620
620
621 path = environ['PATH_INFO']
621 path = environ['PATH_INFO']
622 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
622 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
623 log.debug(
623 log.debug(
624 'LFS: Detecting if request `%s` is LFS server path based '
624 'LFS: Detecting if request `%s` is LFS server path based '
625 'on content type:`%s`, is_lfs:%s',
625 'on content type:`%s`, is_lfs:%s',
626 path, content_type, is_lfs_request)
626 path, content_type, is_lfs_request)
627
627
628 if not is_lfs_request:
628 if not is_lfs_request:
629 # fallback detection by path
629 # fallback detection by path
630 if GIT_LFS_PROTO_PAT.match(path):
630 if GIT_LFS_PROTO_PAT.match(path):
631 is_lfs_request = True
631 is_lfs_request = True
632 log.debug(
632 log.debug(
633 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
633 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
634 path, is_lfs_request)
634 path, is_lfs_request)
635
635
636 if is_lfs_request:
636 if is_lfs_request:
637 app = scm_app.create_git_lfs_wsgi_app(
637 app = scm_app.create_git_lfs_wsgi_app(
638 repo_path, repo_name, config)
638 repo_path, repo_name, config)
639 else:
639 else:
640 app = scm_app.create_git_wsgi_app(
640 app = scm_app.create_git_wsgi_app(
641 repo_path, repo_name, config)
641 repo_path, repo_name, config)
642
642
643 log.debug('http-app: starting app handler '
643 log.debug('http-app: starting app handler '
644 'with %s and process request', app)
644 'with %s and process request', app)
645
645
646 return app(environ, start_response)
646 return app(environ, start_response)
647
647
648 return _git_stream
648 return _git_stream
649
649
650 def handle_vcs_exception(self, exception, request):
650 def handle_vcs_exception(self, exception, request):
651 _vcs_kind = getattr(exception, '_vcs_kind', '')
651 _vcs_kind = getattr(exception, '_vcs_kind', '')
652
652
653 if _vcs_kind == 'repo_locked':
653 if _vcs_kind == 'repo_locked':
654 headers_call_context = get_headers_call_context(request.environ)
654 headers_call_context = get_headers_call_context(request.environ)
655 status_code = safe_int(headers_call_context['locked_status_code'])
655 status_code = safe_int(headers_call_context['locked_status_code'])
656
656
657 return HTTPRepoLocked(
657 return HTTPRepoLocked(
658 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
658 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
659
659
660 elif _vcs_kind == 'repo_branch_protected':
660 elif _vcs_kind == 'repo_branch_protected':
661 # Get custom repo-branch-protected status code if present.
661 # Get custom repo-branch-protected status code if present.
662 return HTTPRepoBranchProtected(
662 return HTTPRepoBranchProtected(
663 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
663 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
664
664
665 exc_info = request.exc_info
665 exc_info = request.exc_info
666 store_exception(id(exc_info), exc_info)
666 store_exception(id(exc_info), exc_info)
667
667
668 traceback_info = 'unavailable'
668 traceback_info = 'unavailable'
669 if request.exc_info:
669 if request.exc_info:
670 traceback_info = format_exc(request.exc_info)
670 traceback_info = format_exc(request.exc_info)
671
671
672 log.error(
672 log.error(
673 'error occurred handling this request for path: %s, \n%s',
673 'error occurred handling this request for path: %s, \n%s',
674 request.path, traceback_info)
674 request.path, traceback_info)
675
675
676 statsd = request.registry.statsd
676 statsd = request.registry.statsd
677 if statsd:
677 if statsd:
678 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
678 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
679 statsd.incr('vcsserver_exception_total',
679 statsd.incr('vcsserver_exception_total',
680 tags=[f"type:{exc_type}"])
680 tags=[f"type:{exc_type}"])
681 raise exception
681 raise exception
682
682
683
683
684 class ResponseFilter:
684 class ResponseFilter:
685
685
686 def __init__(self, start_response):
686 def __init__(self, start_response):
687 self._start_response = start_response
687 self._start_response = start_response
688
688
689 def __call__(self, status, response_headers, exc_info=None):
689 def __call__(self, status, response_headers, exc_info=None):
690 headers = tuple(
690 headers = tuple(
691 (h, v) for h, v in response_headers
691 (h, v) for h, v in response_headers
692 if not wsgiref.util.is_hop_by_hop(h))
692 if not wsgiref.util.is_hop_by_hop(h))
693 return self._start_response(status, headers, exc_info)
693 return self._start_response(status, headers, exc_info)
694
694
695
695
696 def sanitize_settings_and_apply_defaults(global_config, settings):
696 def sanitize_settings_and_apply_defaults(global_config, settings):
697 _global_settings_maker = SettingsMaker(global_config)
697 _global_settings_maker = SettingsMaker(global_config)
698 settings_maker = SettingsMaker(settings)
698 settings_maker = SettingsMaker(settings)
699
699
700 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
700 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
701
701
702 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
702 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
703 settings_maker.enable_logging(logging_conf)
703 settings_maker.enable_logging(logging_conf)
704
704
705 # Default includes, possible to change as a user
705 # Default includes, possible to change as a user
706 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
706 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
707 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
707 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
708
708
709 settings_maker.make_setting('__file__', global_config.get('__file__'))
709 settings_maker.make_setting('__file__', global_config.get('__file__'))
710
710
711 settings_maker.make_setting('pyramid.default_locale_name', 'en')
711 settings_maker.make_setting('pyramid.default_locale_name', 'en')
712 settings_maker.make_setting('locale', 'en_US.UTF-8')
712 settings_maker.make_setting('locale', 'en_US.UTF-8')
713
713
714 settings_maker.make_setting(
714 settings_maker.make_setting(
715 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
715 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
716 default_when_empty=True, parser='string:noquote')
716 default_when_empty=True, parser='string:noquote')
717
717
718 temp_store = tempfile.gettempdir()
718 temp_store = tempfile.gettempdir()
719 default_cache_dir = os.path.join(temp_store, 'rc_cache')
719 default_cache_dir = os.path.join(temp_store, 'rc_cache')
720 # save default, cache dir, and use it for all backends later.
720 # save default, cache dir, and use it for all backends later.
721 default_cache_dir = settings_maker.make_setting(
721 default_cache_dir = settings_maker.make_setting(
722 'cache_dir',
722 'cache_dir',
723 default=default_cache_dir, default_when_empty=True,
723 default=default_cache_dir, default_when_empty=True,
724 parser='dir:ensured')
724 parser='dir:ensured')
725
725
726 # exception store cache
726 # exception store cache
727 settings_maker.make_setting(
727 settings_maker.make_setting(
728 'exception_tracker.store_path',
728 'exception_tracker.store_path',
729 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
729 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
730 parser='dir:ensured'
730 parser='dir:ensured'
731 )
731 )
732
732
733 # repo_object cache defaults
733 # repo_object cache defaults
734 settings_maker.make_setting(
734 settings_maker.make_setting(
735 'rc_cache.repo_object.backend',
735 'rc_cache.repo_object.backend',
736 default='dogpile.cache.rc.file_namespace',
736 default='dogpile.cache.rc.file_namespace',
737 parser='string')
737 parser='string')
738 settings_maker.make_setting(
738 settings_maker.make_setting(
739 'rc_cache.repo_object.expiration_time',
739 'rc_cache.repo_object.expiration_time',
740 default=30 * 24 * 60 * 60, # 30days
740 default=30 * 24 * 60 * 60, # 30days
741 parser='int')
741 parser='int')
742 settings_maker.make_setting(
742 settings_maker.make_setting(
743 'rc_cache.repo_object.arguments.filename',
743 'rc_cache.repo_object.arguments.filename',
744 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
744 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
745 parser='string')
745 parser='string')
746
746
747 # statsd
747 # statsd
748 settings_maker.make_setting('statsd.enabled', False, parser='bool')
748 settings_maker.make_setting('statsd.enabled', False, parser='bool')
749 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
749 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
750 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
750 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
751 settings_maker.make_setting('statsd.statsd_prefix', '')
751 settings_maker.make_setting('statsd.statsd_prefix', '')
752 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
752 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
753
753
754 settings_maker.env_expand()
754 settings_maker.env_expand()
755
755
756
756
757 def main(global_config, **settings):
757 def main(global_config, **settings):
758 start_time = time.time()
758 start_time = time.time()
759 log.info('Pyramid app config starting')
759 log.info('Pyramid app config starting')
760
760
761 if MercurialFactory:
761 if MercurialFactory:
762 hgpatches.patch_largefiles_capabilities()
762 hgpatches.patch_largefiles_capabilities()
763 hgpatches.patch_subrepo_type_mapping()
763 hgpatches.patch_subrepo_type_mapping()
764
764
765 # Fill in and sanitize the defaults & do ENV expansion
765 # Fill in and sanitize the defaults & do ENV expansion
766 sanitize_settings_and_apply_defaults(global_config, settings)
766 sanitize_settings_and_apply_defaults(global_config, settings)
767
767
768 # init and bootstrap StatsdClient
768 # init and bootstrap StatsdClient
769 StatsdClient.setup(settings)
769 StatsdClient.setup(settings)
770
770
771 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
771 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
772 total_time = time.time() - start_time
772 total_time = time.time() - start_time
773 log.info('Pyramid app created and configured in %.2fs', total_time)
773 log.info('Pyramid app created and configured in %.2fs', total_time)
774 return pyramid_app
774 return pyramid_app
1 NO CONTENT: file renamed from vcsserver/lib/rc_json.py to vcsserver/lib/ext_json.py
NO CONTENT: file renamed from vcsserver/lib/rc_json.py to vcsserver/lib/ext_json.py
@@ -1,417 +1,417 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import dulwich.protocol
24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver.lib.rc_json import json
28 from vcsserver.lib.ext_json import json
29 from vcsserver import hooks, subprocessio
29 from vcsserver import hooks, subprocessio
30 from vcsserver.str_utils import ascii_bytes
30 from vcsserver.str_utils import ascii_bytes
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class FileWrapper:
36 class FileWrapper:
37 """File wrapper that ensures how much data is read from it."""
37 """File wrapper that ensures how much data is read from it."""
38
38
39 def __init__(self, fd, content_length):
39 def __init__(self, fd, content_length):
40 self.fd = fd
40 self.fd = fd
41 self.content_length = content_length
41 self.content_length = content_length
42 self.remain = content_length
42 self.remain = content_length
43
43
44 def read(self, size):
44 def read(self, size):
45 if size <= self.remain:
45 if size <= self.remain:
46 try:
46 try:
47 data = self.fd.read(size)
47 data = self.fd.read(size)
48 except socket.error:
48 except socket.error:
49 raise IOError(self)
49 raise IOError(self)
50 self.remain -= size
50 self.remain -= size
51 elif self.remain:
51 elif self.remain:
52 data = self.fd.read(self.remain)
52 data = self.fd.read(self.remain)
53 self.remain = 0
53 self.remain = 0
54 else:
54 else:
55 data = None
55 data = None
56 return data
56 return data
57
57
58 def __repr__(self):
58 def __repr__(self):
59 return '<FileWrapper {} len: {}, read: {}>'.format(
59 return '<FileWrapper {} len: {}, read: {}>'.format(
60 self.fd, self.content_length, self.content_length - self.remain
60 self.fd, self.content_length, self.content_length - self.remain
61 )
61 )
62
62
63
63
64 class GitRepository:
64 class GitRepository:
65 """WSGI app for handling Git smart protocol endpoints."""
65 """WSGI app for handling Git smart protocol endpoints."""
66
66
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
69 valid_accepts = frozenset(f'application/x-{c}-result' for c in commands)
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 FLUSH_PACKET = b"0000"
76 FLUSH_PACKET = b"0000"
77
77
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 files = frozenset(f.lower() for f in os.listdir(content_path))
82 valid_dir_signature = self.git_folder_signature.issubset(files)
82 valid_dir_signature = self.git_folder_signature.issubset(files)
83
83
84 if not valid_dir_signature:
84 if not valid_dir_signature:
85 raise OSError(f'{content_path} missing git signature')
85 raise OSError(f'{content_path} missing git signature')
86
86
87 self.content_path = content_path
87 self.content_path = content_path
88 self.repo_name = repo_name
88 self.repo_name = repo_name
89 self.extras = extras
89 self.extras = extras
90 self.git_path = git_path
90 self.git_path = git_path
91 self.update_server_info = update_server_info
91 self.update_server_info = update_server_info
92
92
93 def _get_fixedpath(self, path):
93 def _get_fixedpath(self, path):
94 """
94 """
95 Small fix for repo_path
95 Small fix for repo_path
96
96
97 :param path:
97 :param path:
98 """
98 """
99 path = path.split(self.repo_name, 1)[-1]
99 path = path.split(self.repo_name, 1)[-1]
100 if path.startswith('.git'):
100 if path.startswith('.git'):
101 # for bare repos we still get the .git prefix inside, we skip it
101 # for bare repos we still get the .git prefix inside, we skip it
102 # here, and remove from the service command
102 # here, and remove from the service command
103 path = path[4:]
103 path = path[4:]
104
104
105 return path.strip('/')
105 return path.strip('/')
106
106
107 def inforefs(self, request, unused_environ):
107 def inforefs(self, request, unused_environ):
108 """
108 """
109 WSGI Response producer for HTTP GET Git Smart
109 WSGI Response producer for HTTP GET Git Smart
110 HTTP /info/refs request.
110 HTTP /info/refs request.
111 """
111 """
112
112
113 git_command = request.GET.get('service')
113 git_command = request.GET.get('service')
114 if git_command not in self.commands:
114 if git_command not in self.commands:
115 log.debug('command %s not allowed', git_command)
115 log.debug('command %s not allowed', git_command)
116 return exc.HTTPForbidden()
116 return exc.HTTPForbidden()
117
117
118 # please, resist the urge to add '\n' to git capture and increment
118 # please, resist the urge to add '\n' to git capture and increment
119 # line count by 1.
119 # line count by 1.
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
121 # a part of protocol.
121 # a part of protocol.
122 # The code in Git client not only does NOT need '\n', but actually
122 # The code in Git client not only does NOT need '\n', but actually
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
124 # It reads binary, per number of bytes specified.
124 # It reads binary, per number of bytes specified.
125 # if you do add '\n' as part of data, count it.
125 # if you do add '\n' as part of data, count it.
126 server_advert = f'# service={git_command}\n'
126 server_advert = f'# service={git_command}\n'
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
128 try:
128 try:
129 gitenv = dict(os.environ)
129 gitenv = dict(os.environ)
130 # forget all configs
130 # forget all configs
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
133 '--advertise-refs', self.content_path]
133 '--advertise-refs', self.content_path]
134 out = subprocessio.SubprocessIOChunker(
134 out = subprocessio.SubprocessIOChunker(
135 command,
135 command,
136 env=gitenv,
136 env=gitenv,
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
138 shell=False
138 shell=False
139 )
139 )
140 except OSError:
140 except OSError:
141 log.exception('Error processing command')
141 log.exception('Error processing command')
142 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
143
143
144 resp = Response()
144 resp = Response()
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 resp.content_type = f'application/x-{git_command}-advertisement'
146 resp.charset = None
146 resp.charset = None
147 resp.app_iter = out
147 resp.app_iter = out
148
148
149 return resp
149 return resp
150
150
151 def _get_want_capabilities(self, request):
151 def _get_want_capabilities(self, request):
152 """Read the capabilities found in the first want line of the request."""
152 """Read the capabilities found in the first want line of the request."""
153 pos = request.body_file_seekable.tell()
153 pos = request.body_file_seekable.tell()
154 first_line = request.body_file_seekable.readline()
154 first_line = request.body_file_seekable.readline()
155 request.body_file_seekable.seek(pos)
155 request.body_file_seekable.seek(pos)
156
156
157 return frozenset(
157 return frozenset(
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
159
159
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
161 """
161 """
162 Construct a response with an empty PACK file.
162 Construct a response with an empty PACK file.
163
163
164 We use an empty PACK file, as that would trigger the failure of the pull
164 We use an empty PACK file, as that would trigger the failure of the pull
165 or clone command.
165 or clone command.
166
166
167 We also print in the error output a message explaining why the command
167 We also print in the error output a message explaining why the command
168 was aborted.
168 was aborted.
169
169
170 If additionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
171 of the pre-pull hook.
171 of the pre-pull hook.
172
172
173 Note that for clients not supporting side-band we just send them the
173 Note that for clients not supporting side-band we just send them the
174 emtpy PACK file.
174 emtpy PACK file.
175 """
175 """
176
176
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
178 response = []
178 response = []
179 proto = dulwich.protocol.Protocol(None, response.append)
179 proto = dulwich.protocol.Protocol(None, response.append)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
181
181
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
184 # produces a fatal error in the client:
184 # produces a fatal error in the client:
185 # fatal: error in sideband demultiplexer
185 # fatal: error in sideband demultiplexer
186 proto.write_sideband(
186 proto.write_sideband(
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
192
192
193 # writes b"0000" as default
193 # writes b"0000" as default
194 proto.write_pkt_line(None)
194 proto.write_pkt_line(None)
195
195
196 return response
196 return response
197 else:
197 else:
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
201 """
202 Given a list response we inject the post-pull messages.
202 Given a list response we inject the post-pull messages.
203
203
204 We only inject the messages if the client supports sideband, and the
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
205 response has the format:
206 0008NAK\n...0000
206 0008NAK\n...0000
207
207
208 Note that we do not check the no-progress capability as by default, git
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
209 sends it, which effectively would block all messages.
210 """
210 """
211
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
213 return response
214
214
215 if not start_message and not end_message:
215 if not start_message and not end_message:
216 return response
216 return response
217
217
218 try:
218 try:
219 iter(response)
219 iter(response)
220 # iterator probably will work, we continue
220 # iterator probably will work, we continue
221 except TypeError:
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
225
226 def injected_response():
226 def injected_response():
227
227
228 do_loop = 1
228 do_loop = 1
229 header_injected = 0
229 header_injected = 0
230 next_item = None
230 next_item = None
231 has_item = False
231 has_item = False
232 item = b''
232 item = b''
233
233
234 while do_loop:
234 while do_loop:
235
235
236 try:
236 try:
237 next_item = next(response)
237 next_item = next(response)
238 except StopIteration:
238 except StopIteration:
239 do_loop = 0
239 do_loop = 0
240
240
241 if has_item:
241 if has_item:
242 # last item ! alter it now
242 # last item ! alter it now
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
243 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
244 new_response = [item[:-4]]
244 new_response = [item[:-4]]
245 new_response.extend(self._get_messages(end_message, capabilities))
245 new_response.extend(self._get_messages(end_message, capabilities))
246 new_response.append(self.FLUSH_PACKET)
246 new_response.append(self.FLUSH_PACKET)
247 item = b''.join(new_response)
247 item = b''.join(new_response)
248
248
249 yield item
249 yield item
250
250
251 has_item = True
251 has_item = True
252 item = next_item
252 item = next_item
253
253
254 # alter item if it's the initial chunk
254 # alter item if it's the initial chunk
255 if not header_injected and item.startswith(b'0008NAK\n'):
255 if not header_injected and item.startswith(b'0008NAK\n'):
256 new_response = [b'0008NAK\n']
256 new_response = [b'0008NAK\n']
257 new_response.extend(self._get_messages(start_message, capabilities))
257 new_response.extend(self._get_messages(start_message, capabilities))
258 new_response.append(item[8:])
258 new_response.append(item[8:])
259 item = b''.join(new_response)
259 item = b''.join(new_response)
260 header_injected = 1
260 header_injected = 1
261
261
262 return injected_response()
262 return injected_response()
263
263
264 def _write_sideband_to_proto(self, proto, data, capabilities):
264 def _write_sideband_to_proto(self, proto, data, capabilities):
265 """
265 """
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
266 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
267
267
268 We do not use dulwich's write_sideband directly as it only supports
268 We do not use dulwich's write_sideband directly as it only supports
269 side-band-64k.
269 side-band-64k.
270 """
270 """
271 if not data:
271 if not data:
272 return
272 return
273
273
274 # N.B.(skreft): The values below are explained in the pack protocol
274 # N.B.(skreft): The values below are explained in the pack protocol
275 # documentation, section Packfile Data.
275 # documentation, section Packfile Data.
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
276 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
277 if CAPABILITY_SIDE_BAND_64K in capabilities:
278 chunk_size = 65515
278 chunk_size = 65515
279 elif CAPABILITY_SIDE_BAND in capabilities:
279 elif CAPABILITY_SIDE_BAND in capabilities:
280 chunk_size = 995
280 chunk_size = 995
281 else:
281 else:
282 return
282 return
283
283
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
284 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
285
285
286 for chunk in chunker:
286 for chunk in chunker:
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
287 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
288
288
289 def _get_messages(self, data, capabilities):
289 def _get_messages(self, data, capabilities):
290 """Return a list with packets for sending data in sideband number 2."""
290 """Return a list with packets for sending data in sideband number 2."""
291 response = []
291 response = []
292 proto = dulwich.protocol.Protocol(None, response.append)
292 proto = dulwich.protocol.Protocol(None, response.append)
293
293
294 self._write_sideband_to_proto(proto, data, capabilities)
294 self._write_sideband_to_proto(proto, data, capabilities)
295
295
296 return response
296 return response
297
297
298 def backend(self, request, environ):
298 def backend(self, request, environ):
299 """
299 """
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
300 WSGI Response producer for HTTP POST Git Smart HTTP requests.
301 Reads commands and data from HTTP POST's body.
301 Reads commands and data from HTTP POST's body.
302 returns an iterator obj with contents of git command's
302 returns an iterator obj with contents of git command's
303 response to stdout
303 response to stdout
304 """
304 """
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
305 # TODO(skreft): think how we could detect an HTTPLockedException, as
306 # we probably want to have the same mechanism used by mercurial and
306 # we probably want to have the same mechanism used by mercurial and
307 # simplevcs.
307 # simplevcs.
308 # For that we would need to parse the output of the command looking for
308 # For that we would need to parse the output of the command looking for
309 # some signs of the HTTPLockedError, parse the data and reraise it in
309 # some signs of the HTTPLockedError, parse the data and reraise it in
310 # pygrack. However, that would interfere with the streaming.
310 # pygrack. However, that would interfere with the streaming.
311 #
311 #
312 # Now the output of a blocked push is:
312 # Now the output of a blocked push is:
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
313 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
314 # POST git-receive-pack (1047 bytes)
314 # POST git-receive-pack (1047 bytes)
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
315 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
316 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
317 # ! [remote rejected] master -> master (pre-receive hook declined)
317 # ! [remote rejected] master -> master (pre-receive hook declined)
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
318 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
319
319
320 git_command = self._get_fixedpath(request.path_info)
320 git_command = self._get_fixedpath(request.path_info)
321 if git_command not in self.commands:
321 if git_command not in self.commands:
322 log.debug('command %s not allowed', git_command)
322 log.debug('command %s not allowed', git_command)
323 return exc.HTTPForbidden()
323 return exc.HTTPForbidden()
324
324
325 capabilities = None
325 capabilities = None
326 if git_command == 'git-upload-pack':
326 if git_command == 'git-upload-pack':
327 capabilities = self._get_want_capabilities(request)
327 capabilities = self._get_want_capabilities(request)
328
328
329 if 'CONTENT_LENGTH' in environ:
329 if 'CONTENT_LENGTH' in environ:
330 inputstream = FileWrapper(request.body_file_seekable,
330 inputstream = FileWrapper(request.body_file_seekable,
331 request.content_length)
331 request.content_length)
332 else:
332 else:
333 inputstream = request.body_file_seekable
333 inputstream = request.body_file_seekable
334
334
335 resp = Response()
335 resp = Response()
336 resp.content_type = f'application/x-{git_command}-result'
336 resp.content_type = f'application/x-{git_command}-result'
337 resp.charset = None
337 resp.charset = None
338
338
339 pre_pull_messages = ''
339 pre_pull_messages = ''
340 # Upload-pack == clone
340 # Upload-pack == clone
341 if git_command == 'git-upload-pack':
341 if git_command == 'git-upload-pack':
342 hook_response = hooks.git_pre_pull(self.extras)
342 hook_response = hooks.git_pre_pull(self.extras)
343 if hook_response.status != 0:
343 if hook_response.status != 0:
344 pre_pull_messages = hook_response.output
344 pre_pull_messages = hook_response.output
345 resp.app_iter = self._build_failed_pre_pull_response(
345 resp.app_iter = self._build_failed_pre_pull_response(
346 capabilities, pre_pull_messages)
346 capabilities, pre_pull_messages)
347 return resp
347 return resp
348
348
349 gitenv = dict(os.environ)
349 gitenv = dict(os.environ)
350 # forget all configs
350 # forget all configs
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
351 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
352 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
353 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
354 self.content_path]
354 self.content_path]
355 log.debug('handling cmd %s', cmd)
355 log.debug('handling cmd %s', cmd)
356
356
357 out = subprocessio.SubprocessIOChunker(
357 out = subprocessio.SubprocessIOChunker(
358 cmd,
358 cmd,
359 input_stream=inputstream,
359 input_stream=inputstream,
360 env=gitenv,
360 env=gitenv,
361 cwd=self.content_path,
361 cwd=self.content_path,
362 shell=False,
362 shell=False,
363 fail_on_stderr=False,
363 fail_on_stderr=False,
364 fail_on_return_code=False
364 fail_on_return_code=False
365 )
365 )
366
366
367 if self.update_server_info and git_command == 'git-receive-pack':
367 if self.update_server_info and git_command == 'git-receive-pack':
368 # We need to fully consume the iterator here, as the
368 # We need to fully consume the iterator here, as the
369 # update-server-info command needs to be run after the push.
369 # update-server-info command needs to be run after the push.
370 out = list(out)
370 out = list(out)
371
371
372 # Updating refs manually after each push.
372 # Updating refs manually after each push.
373 # This is required as some clients are exposing Git repos internally
373 # This is required as some clients are exposing Git repos internally
374 # with the dumb protocol.
374 # with the dumb protocol.
375 cmd = [self.git_path, 'update-server-info']
375 cmd = [self.git_path, 'update-server-info']
376 log.debug('handling cmd %s', cmd)
376 log.debug('handling cmd %s', cmd)
377 output = subprocessio.SubprocessIOChunker(
377 output = subprocessio.SubprocessIOChunker(
378 cmd,
378 cmd,
379 input_stream=inputstream,
379 input_stream=inputstream,
380 env=gitenv,
380 env=gitenv,
381 cwd=self.content_path,
381 cwd=self.content_path,
382 shell=False,
382 shell=False,
383 fail_on_stderr=False,
383 fail_on_stderr=False,
384 fail_on_return_code=False
384 fail_on_return_code=False
385 )
385 )
386 # Consume all the output so the subprocess finishes
386 # Consume all the output so the subprocess finishes
387 for _ in output:
387 for _ in output:
388 pass
388 pass
389
389
390 # Upload-pack == clone
390 # Upload-pack == clone
391 if git_command == 'git-upload-pack':
391 if git_command == 'git-upload-pack':
392 hook_response = hooks.git_post_pull(self.extras)
392 hook_response = hooks.git_post_pull(self.extras)
393 post_pull_messages = hook_response.output
393 post_pull_messages = hook_response.output
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
394 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
395 else:
395 else:
396 resp.app_iter = out
396 resp.app_iter = out
397
397
398 return resp
398 return resp
399
399
400 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
401 request = Request(environ)
401 request = Request(environ)
402 _path = self._get_fixedpath(request.path_info)
402 _path = self._get_fixedpath(request.path_info)
403 if _path.startswith('info/refs'):
403 if _path.startswith('info/refs'):
404 app = self.inforefs
404 app = self.inforefs
405 else:
405 else:
406 app = self.backend
406 app = self.backend
407
407
408 try:
408 try:
409 resp = app(request, environ)
409 resp = app(request, environ)
410 except exc.HTTPException as error:
410 except exc.HTTPException as error:
411 log.exception('HTTP Error')
411 log.exception('HTTP Error')
412 resp = error
412 resp = error
413 except Exception:
413 except Exception:
414 log.exception('Unknown error')
414 log.exception('Unknown error')
415 resp = exc.HTTPInternalServerError()
415 resp = exc.HTTPInternalServerError()
416
416
417 return resp(environ, start_response)
417 return resp(environ, start_response)
@@ -1,257 +1,257 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import threading
18 import threading
19 import msgpack
19 import msgpack
20
20
21 from http.server import BaseHTTPRequestHandler
21 from http.server import BaseHTTPRequestHandler
22 from socketserver import TCPServer
22 from socketserver import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from vcsserver.hooks import HooksHttpClient
28 from vcsserver.hooks import HooksHttpClient
29 from vcsserver.lib.rc_json import json
29 from vcsserver.lib.ext_json import json
30 from vcsserver import hooks
30 from vcsserver import hooks
31
31
32
32
33 def get_hg_ui(extras=None):
33 def get_hg_ui(extras=None):
34 """Create a Config object with a valid RC_SCM_DATA entry."""
34 """Create a Config object with a valid RC_SCM_DATA entry."""
35 extras = extras or {}
35 extras = extras or {}
36 required_extras = {
36 required_extras = {
37 'username': '',
37 'username': '',
38 'repository': '',
38 'repository': '',
39 'locked_by': '',
39 'locked_by': '',
40 'scm': '',
40 'scm': '',
41 'make_lock': '',
41 'make_lock': '',
42 'action': '',
42 'action': '',
43 'ip': '',
43 'ip': '',
44 'hooks_uri': 'fake_hooks_uri',
44 'hooks_uri': 'fake_hooks_uri',
45 }
45 }
46 required_extras.update(extras)
46 required_extras.update(extras)
47 hg_ui = mercurial.ui.ui()
47 hg_ui = mercurial.ui.ui()
48 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
48 hg_ui.setconfig(b'rhodecode', b'RC_SCM_DATA', json.dumps(required_extras))
49
49
50 return hg_ui
50 return hg_ui
51
51
52
52
53 def test_git_pre_receive_is_disabled():
53 def test_git_pre_receive_is_disabled():
54 extras = {'hooks': ['pull']}
54 extras = {'hooks': ['pull']}
55 response = hooks.git_pre_receive(None, None,
55 response = hooks.git_pre_receive(None, None,
56 {'RC_SCM_DATA': json.dumps(extras)})
56 {'RC_SCM_DATA': json.dumps(extras)})
57
57
58 assert response == 0
58 assert response == 0
59
59
60
60
61 def test_git_post_receive_is_disabled():
61 def test_git_post_receive_is_disabled():
62 extras = {'hooks': ['pull']}
62 extras = {'hooks': ['pull']}
63 response = hooks.git_post_receive(None, '',
63 response = hooks.git_post_receive(None, '',
64 {'RC_SCM_DATA': json.dumps(extras)})
64 {'RC_SCM_DATA': json.dumps(extras)})
65
65
66 assert response == 0
66 assert response == 0
67
67
68
68
69 def test_git_post_receive_calls_repo_size():
69 def test_git_post_receive_calls_repo_size():
70 extras = {'hooks': ['push', 'repo_size']}
70 extras = {'hooks': ['push', 'repo_size']}
71
71
72 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
72 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
73 hooks.git_post_receive(
73 hooks.git_post_receive(
74 None, '', {'RC_SCM_DATA': json.dumps(extras)})
74 None, '', {'RC_SCM_DATA': json.dumps(extras)})
75 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
75 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
76 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
76 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
77 expected_calls = [
77 expected_calls = [
78 mock.call('repo_size', extras, mock.ANY),
78 mock.call('repo_size', extras, mock.ANY),
79 mock.call('post_push', extras, mock.ANY),
79 mock.call('post_push', extras, mock.ANY),
80 ]
80 ]
81 assert call_hook_mock.call_args_list == expected_calls
81 assert call_hook_mock.call_args_list == expected_calls
82
82
83
83
84 def test_git_post_receive_does_not_call_disabled_repo_size():
84 def test_git_post_receive_does_not_call_disabled_repo_size():
85 extras = {'hooks': ['push']}
85 extras = {'hooks': ['push']}
86
86
87 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
87 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
88 hooks.git_post_receive(
88 hooks.git_post_receive(
89 None, '', {'RC_SCM_DATA': json.dumps(extras)})
89 None, '', {'RC_SCM_DATA': json.dumps(extras)})
90 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
90 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
91 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
91 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
92 expected_calls = [
92 expected_calls = [
93 mock.call('post_push', extras, mock.ANY)
93 mock.call('post_push', extras, mock.ANY)
94 ]
94 ]
95 assert call_hook_mock.call_args_list == expected_calls
95 assert call_hook_mock.call_args_list == expected_calls
96
96
97
97
98 def test_repo_size_exception_does_not_affect_git_post_receive():
98 def test_repo_size_exception_does_not_affect_git_post_receive():
99 extras = {'hooks': ['push', 'repo_size']}
99 extras = {'hooks': ['push', 'repo_size']}
100 status = 0
100 status = 0
101
101
102 def side_effect(name, *args, **kwargs):
102 def side_effect(name, *args, **kwargs):
103 if name == 'repo_size':
103 if name == 'repo_size':
104 raise Exception('Fake exception')
104 raise Exception('Fake exception')
105 else:
105 else:
106 return status
106 return status
107
107
108 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
108 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
109 call_hook_mock.side_effect = side_effect
109 call_hook_mock.side_effect = side_effect
110 result = hooks.git_post_receive(
110 result = hooks.git_post_receive(
111 None, '', {'RC_SCM_DATA': json.dumps(extras)})
111 None, '', {'RC_SCM_DATA': json.dumps(extras)})
112 assert result == status
112 assert result == status
113
113
114
114
115 def test_git_pre_pull_is_disabled():
115 def test_git_pre_pull_is_disabled():
116 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
116 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
117
117
118
118
119 def test_git_post_pull_is_disabled():
119 def test_git_post_pull_is_disabled():
120 assert (
120 assert (
121 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
121 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
122
122
123
123
124 class TestGetHooksClient:
124 class TestGetHooksClient:
125
125
126 def test_returns_http_client_when_protocol_matches(self):
126 def test_returns_http_client_when_protocol_matches(self):
127 hooks_uri = 'localhost:8000'
127 hooks_uri = 'localhost:8000'
128 result = hooks._get_hooks_client({
128 result = hooks._get_hooks_client({
129 'hooks_uri': hooks_uri,
129 'hooks_uri': hooks_uri,
130 'hooks_protocol': 'http'
130 'hooks_protocol': 'http'
131 })
131 })
132 assert isinstance(result, hooks.HooksHttpClient)
132 assert isinstance(result, hooks.HooksHttpClient)
133 assert result.hooks_uri == hooks_uri
133 assert result.hooks_uri == hooks_uri
134
134
135 def test_return_celery_client_when_queue_and_backend_provided(self):
135 def test_return_celery_client_when_queue_and_backend_provided(self):
136 task_queue = 'redis://task_queue:0'
136 task_queue = 'redis://task_queue:0'
137 task_backend = task_queue
137 task_backend = task_queue
138 result = hooks._get_hooks_client({
138 result = hooks._get_hooks_client({
139 'task_queue': task_queue,
139 'task_queue': task_queue,
140 'task_backend': task_backend
140 'task_backend': task_backend
141 })
141 })
142 assert isinstance(result, hooks.HooksCeleryClient)
142 assert isinstance(result, hooks.HooksCeleryClient)
143
143
144
144
145 class TestHooksHttpClient:
145 class TestHooksHttpClient:
146 def test_init_sets_hooks_uri(self):
146 def test_init_sets_hooks_uri(self):
147 uri = 'localhost:3000'
147 uri = 'localhost:3000'
148 client = hooks.HooksHttpClient(uri)
148 client = hooks.HooksHttpClient(uri)
149 assert client.hooks_uri == uri
149 assert client.hooks_uri == uri
150
150
151 def test_serialize_returns_serialized_string(self):
151 def test_serialize_returns_serialized_string(self):
152 client = hooks.HooksHttpClient('localhost:3000')
152 client = hooks.HooksHttpClient('localhost:3000')
153 hook_name = 'test'
153 hook_name = 'test'
154 extras = {
154 extras = {
155 'first': 1,
155 'first': 1,
156 'second': 'two'
156 'second': 'two'
157 }
157 }
158 hooks_proto, result = client._serialize(hook_name, extras)
158 hooks_proto, result = client._serialize(hook_name, extras)
159 expected_result = msgpack.packb({
159 expected_result = msgpack.packb({
160 'method': hook_name,
160 'method': hook_name,
161 'extras': extras,
161 'extras': extras,
162 })
162 })
163 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1', 'Connection': 'keep-alive'}
163 assert hooks_proto == {'rc-hooks-protocol': 'msgpack.v1', 'Connection': 'keep-alive'}
164 assert result == expected_result
164 assert result == expected_result
165
165
166 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
167 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
168 hook_name = 'test'
168 hook_name = 'test'
169 extras = {
169 extras = {
170 'first': 1,
170 'first': 1,
171 'second': 'two'
171 'second': 'two'
172 }
172 }
173 result = client(hook_name, extras)
173 result = client(hook_name, extras)
174 expected_result = msgpack.unpackb(msgpack.packb({
174 expected_result = msgpack.unpackb(msgpack.packb({
175 'method': hook_name,
175 'method': hook_name,
176 'extras': extras
176 'extras': extras
177 }), raw=False)
177 }), raw=False)
178 assert result == expected_result
178 assert result == expected_result
179
179
180
180
181 @pytest.fixture
181 @pytest.fixture
182 def http_mirror(request):
182 def http_mirror(request):
183 server = MirrorHttpServer()
183 server = MirrorHttpServer()
184 request.addfinalizer(server.stop)
184 request.addfinalizer(server.stop)
185 return server
185 return server
186
186
187
187
188 class MirrorHttpHandler(BaseHTTPRequestHandler):
188 class MirrorHttpHandler(BaseHTTPRequestHandler):
189
189
190 def do_POST(self):
190 def do_POST(self):
191 length = int(self.headers['Content-Length'])
191 length = int(self.headers['Content-Length'])
192 body = self.rfile.read(length)
192 body = self.rfile.read(length)
193 self.send_response(200)
193 self.send_response(200)
194 self.end_headers()
194 self.end_headers()
195 self.wfile.write(body)
195 self.wfile.write(body)
196
196
197
197
198 class MirrorHttpServer:
198 class MirrorHttpServer:
199 ip_address = '127.0.0.1'
199 ip_address = '127.0.0.1'
200 port = 0
200 port = 0
201
201
202 def __init__(self):
202 def __init__(self):
203 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
203 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
204 _, self.port = self._daemon.server_address
204 _, self.port = self._daemon.server_address
205 self._thread = threading.Thread(target=self._daemon.serve_forever)
205 self._thread = threading.Thread(target=self._daemon.serve_forever)
206 self._thread.daemon = True
206 self._thread.daemon = True
207 self._thread.start()
207 self._thread.start()
208
208
209 def stop(self):
209 def stop(self):
210 self._daemon.shutdown()
210 self._daemon.shutdown()
211 self._thread.join()
211 self._thread.join()
212 self._daemon = None
212 self._daemon = None
213 self._thread = None
213 self._thread = None
214
214
215 @property
215 @property
216 def uri(self):
216 def uri(self):
217 return '{}:{}'.format(self.ip_address, self.port)
217 return '{}:{}'.format(self.ip_address, self.port)
218
218
219
219
220 def test_hooks_http_client_init():
220 def test_hooks_http_client_init():
221 hooks_uri = 'http://localhost:8000'
221 hooks_uri = 'http://localhost:8000'
222 client = HooksHttpClient(hooks_uri)
222 client = HooksHttpClient(hooks_uri)
223 assert client.hooks_uri == hooks_uri
223 assert client.hooks_uri == hooks_uri
224
224
225
225
226 def test_hooks_http_client_call():
226 def test_hooks_http_client_call():
227 hooks_uri = 'http://localhost:8000'
227 hooks_uri = 'http://localhost:8000'
228
228
229 method = 'test_method'
229 method = 'test_method'
230 extras = {'key': 'value'}
230 extras = {'key': 'value'}
231
231
232 with \
232 with \
233 mock.patch('http.client.HTTPConnection') as mock_connection,\
233 mock.patch('http.client.HTTPConnection') as mock_connection,\
234 mock.patch('msgpack.load') as mock_load:
234 mock.patch('msgpack.load') as mock_load:
235
235
236 client = HooksHttpClient(hooks_uri)
236 client = HooksHttpClient(hooks_uri)
237
237
238 mock_load.return_value = {'result': 'success'}
238 mock_load.return_value = {'result': 'success'}
239 response = mock.MagicMock()
239 response = mock.MagicMock()
240 response.status = 200
240 response.status = 200
241 mock_connection.request.side_effect = None
241 mock_connection.request.side_effect = None
242 mock_connection.getresponse.return_value = response
242 mock_connection.getresponse.return_value = response
243
243
244 result = client(method, extras)
244 result = client(method, extras)
245
245
246 mock_connection.assert_called_with(hooks_uri)
246 mock_connection.assert_called_with(hooks_uri)
247 mock_connection.return_value.request.assert_called_once()
247 mock_connection.return_value.request.assert_called_once()
248 assert result == {'result': 'success'}
248 assert result == {'result': 'success'}
249
249
250
250
251 def test_hooks_http_client_serialize():
251 def test_hooks_http_client_serialize():
252 method = 'test_method'
252 method = 'test_method'
253 extras = {'key': 'value'}
253 extras = {'key': 'value'}
254 headers, body = HooksHttpClient._serialize(method, extras)
254 headers, body = HooksHttpClient._serialize(method, extras)
255
255
256 assert headers == {'rc-hooks-protocol': HooksHttpClient.proto, 'Connection': 'keep-alive'}
256 assert headers == {'rc-hooks-protocol': HooksHttpClient.proto, 'Connection': 'keep-alive'}
257 assert msgpack.unpackb(body) == {'method': method, 'extras': extras}
257 assert msgpack.unpackb(body) == {'method': method, 'extras': extras}
General Comments 0
You need to be logged in to leave comments. Login now