##// END OF EJS Templates
core: renamed remote packages to prevent conflicts with builtin libraries like svn core library called same as svn remote
super-admin -
r1145:a44d603f default
parent child Browse files
Show More
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from .app import create_app
19 from .app import create_app # noqa
@@ -1,292 +1,291 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
21
20
22 from pyramid.config import Configurator
21 from pyramid.config import Configurator
23 from pyramid.response import Response, FileIter
22 from pyramid.response import Response, FileIter
24 from pyramid.httpexceptions import (
23 from pyramid.httpexceptions import (
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
24 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPUnprocessableEntity)
25 HTTPUnprocessableEntity)
27
26
28 from vcsserver.lib.rc_json import json
27 from vcsserver.lib.rc_json import json
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
28 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
29 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.str_utils import safe_int
30 from vcsserver.str_utils import safe_int
32
31
33 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
34
33
35
34
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
35 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
36 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
37
39
38
40 def write_response_error(http_exception, text=None):
39 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
40 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
41 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
42 _exception.content_type = content_type
44 if text:
43 if text:
45 _exception.body = json.dumps({'message': text})
44 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
45 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
46 http_exception, text)
48 return _exception
47 return _exception
49
48
50
49
51 class AuthHeaderRequired(object):
50 class AuthHeaderRequired(object):
52 """
51 """
53 Decorator to check if request has proper auth-header
52 Decorator to check if request has proper auth-header
54 """
53 """
55
54
56 def __call__(self, func):
55 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
56 return get_cython_compat_decorator(self.__wrapper, func)
58
57
59 def __wrapper(self, func, *fargs, **fkwargs):
58 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
59 request = fargs[1]
61 auth = request.authorization
60 auth = request.authorization
62 if not auth:
61 if not auth:
63 return write_response_error(HTTPForbidden)
62 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
63 return func(*fargs[1:], **fkwargs)
65
64
66
65
67 # views
66 # views
68
67
69 def lfs_objects(request):
68 def lfs_objects(request):
70 # indicate not supported, V1 API
69 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
70 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
71 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
72
74
73
75 @AuthHeaderRequired()
74 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
75 def lfs_objects_batch(request):
77 """
76 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
77 The client sends the following information to the Batch endpoint to transfer some objects:
79
78
80 operation - Should be download or upload.
79 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
80 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
81 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
82 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
83 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
84 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
85 size - Integer byte size of the LFS object. Must be at least zero.
87 """
86 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
87 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
88 auth = request.authorization
90 repo = request.matchdict.get('repo')
89 repo = request.matchdict.get('repo')
91 data = request.json
90 data = request.json
92 operation = data.get('operation')
91 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
92 http_scheme = request.registry.git_lfs_http_scheme
94
93
95 if operation not in ('download', 'upload'):
94 if operation not in ('download', 'upload'):
96 log.debug('LFS: unsupported operation:%s', operation)
95 log.debug('LFS: unsupported operation:%s', operation)
97 return write_response_error(
96 return write_response_error(
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
97 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99
98
100 if 'objects' not in data:
99 if 'objects' not in data:
101 log.debug('LFS: missing objects data')
100 log.debug('LFS: missing objects data')
102 return write_response_error(
101 return write_response_error(
103 HTTPBadRequest, 'missing objects data')
102 HTTPBadRequest, 'missing objects data')
104
103
105 log.debug('LFS: handling operation of type: %s', operation)
104 log.debug('LFS: handling operation of type: %s', operation)
106
105
107 objects = []
106 objects = []
108 for o in data['objects']:
107 for o in data['objects']:
109 try:
108 try:
110 oid = o['oid']
109 oid = o['oid']
111 obj_size = o['size']
110 obj_size = o['size']
112 except KeyError:
111 except KeyError:
113 log.exception('LFS, failed to extract data')
112 log.exception('LFS, failed to extract data')
114 return write_response_error(
113 return write_response_error(
115 HTTPBadRequest, 'unsupported data in objects')
114 HTTPBadRequest, 'unsupported data in objects')
116
115
117 obj_data = {'oid': oid}
116 obj_data = {'oid': oid}
118
117
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
118 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 _scheme=http_scheme)
119 _scheme=http_scheme)
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
120 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 _scheme=http_scheme)
121 _scheme=http_scheme)
123 store = LFSOidStore(
122 store = LFSOidStore(
124 oid, repo, store_location=request.registry.git_lfs_store_path)
123 oid, repo, store_location=request.registry.git_lfs_store_path)
125 handler = OidHandler(
124 handler = OidHandler(
126 store, repo, auth, oid, obj_size, obj_data,
125 store, repo, auth, oid, obj_size, obj_data,
127 obj_href, obj_verify_href)
126 obj_href, obj_verify_href)
128
127
129 # this verifies also OIDs
128 # this verifies also OIDs
130 actions, errors = handler.exec_operation(operation)
129 actions, errors = handler.exec_operation(operation)
131 if errors:
130 if errors:
132 log.warning('LFS: got following errors: %s', errors)
131 log.warning('LFS: got following errors: %s', errors)
133 obj_data['errors'] = errors
132 obj_data['errors'] = errors
134
133
135 if actions:
134 if actions:
136 obj_data['actions'] = actions
135 obj_data['actions'] = actions
137
136
138 obj_data['size'] = obj_size
137 obj_data['size'] = obj_size
139 obj_data['authenticated'] = True
138 obj_data['authenticated'] = True
140 objects.append(obj_data)
139 objects.append(obj_data)
141
140
142 result = {'objects': objects, 'transfer': 'basic'}
141 result = {'objects': objects, 'transfer': 'basic'}
143 log.debug('LFS Response %s', safe_result(result))
142 log.debug('LFS Response %s', safe_result(result))
144
143
145 return result
144 return result
146
145
147
146
148 def lfs_objects_oid_upload(request):
147 def lfs_objects_oid_upload(request):
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
148 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 repo = request.matchdict.get('repo')
149 repo = request.matchdict.get('repo')
151 oid = request.matchdict.get('oid')
150 oid = request.matchdict.get('oid')
152 store = LFSOidStore(
151 store = LFSOidStore(
153 oid, repo, store_location=request.registry.git_lfs_store_path)
152 oid, repo, store_location=request.registry.git_lfs_store_path)
154 engine = store.get_engine(mode='wb')
153 engine = store.get_engine(mode='wb')
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
154 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156
155
157 body = request.environ['wsgi.input']
156 body = request.environ['wsgi.input']
158
157
159 with engine as f:
158 with engine as f:
160 blksize = 64 * 1024 # 64kb
159 blksize = 64 * 1024 # 64kb
161 while True:
160 while True:
162 # read in chunks as stream comes in from Gunicorn
161 # read in chunks as stream comes in from Gunicorn
163 # this is a specific Gunicorn support function.
162 # this is a specific Gunicorn support function.
164 # might work differently on waitress
163 # might work differently on waitress
165 chunk = body.read(blksize)
164 chunk = body.read(blksize)
166 if not chunk:
165 if not chunk:
167 break
166 break
168 f.write(chunk)
167 f.write(chunk)
169
168
170 return {'upload': 'ok'}
169 return {'upload': 'ok'}
171
170
172
171
173 def lfs_objects_oid_download(request):
172 def lfs_objects_oid_download(request):
174 repo = request.matchdict.get('repo')
173 repo = request.matchdict.get('repo')
175 oid = request.matchdict.get('oid')
174 oid = request.matchdict.get('oid')
176
175
177 store = LFSOidStore(
176 store = LFSOidStore(
178 oid, repo, store_location=request.registry.git_lfs_store_path)
177 oid, repo, store_location=request.registry.git_lfs_store_path)
179 if not store.has_oid():
178 if not store.has_oid():
180 log.debug('LFS: oid %s does not exists in store', oid)
179 log.debug('LFS: oid %s does not exists in store', oid)
181 return write_response_error(
180 return write_response_error(
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
181 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183
182
184 # TODO(marcink): support range header ?
183 # TODO(marcink): support range header ?
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
184 # Range: bytes=0-, `bytes=(\d+)\-.*`
186
185
187 f = open(store.oid_path, 'rb')
186 f = open(store.oid_path, 'rb')
188 response = Response(
187 response = Response(
189 content_type='application/octet-stream', app_iter=FileIter(f))
188 content_type='application/octet-stream', app_iter=FileIter(f))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
189 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 return response
190 return response
192
191
193
192
194 def lfs_objects_verify(request):
193 def lfs_objects_verify(request):
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
194 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 repo = request.matchdict.get('repo')
195 repo = request.matchdict.get('repo')
197
196
198 data = request.json
197 data = request.json
199 oid = data.get('oid')
198 oid = data.get('oid')
200 size = safe_int(data.get('size'))
199 size = safe_int(data.get('size'))
201
200
202 if not (oid and size):
201 if not (oid and size):
203 return write_response_error(
202 return write_response_error(
204 HTTPBadRequest, 'missing oid and size in request data')
203 HTTPBadRequest, 'missing oid and size in request data')
205
204
206 store = LFSOidStore(
205 store = LFSOidStore(
207 oid, repo, store_location=request.registry.git_lfs_store_path)
206 oid, repo, store_location=request.registry.git_lfs_store_path)
208 if not store.has_oid():
207 if not store.has_oid():
209 log.debug('LFS: oid %s does not exists in store', oid)
208 log.debug('LFS: oid %s does not exists in store', oid)
210 return write_response_error(
209 return write_response_error(
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
210 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212
211
213 store_size = store.size_oid()
212 store_size = store.size_oid()
214 if store_size != size:
213 if store_size != size:
215 msg = 'requested file size mismatch store size:{} requested:{}'.format(
214 msg = 'requested file size mismatch store size:{} requested:{}'.format(
216 store_size, size)
215 store_size, size)
217 return write_response_error(
216 return write_response_error(
218 HTTPUnprocessableEntity, msg)
217 HTTPUnprocessableEntity, msg)
219
218
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
219 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221
220
222
221
223 def lfs_objects_lock(request):
222 def lfs_objects_lock(request):
224 return write_response_error(
223 return write_response_error(
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
224 HTTPNotImplemented, 'GIT LFS locking api not supported')
226
225
227
226
228 def not_found(request):
227 def not_found(request):
229 return write_response_error(
228 return write_response_error(
230 HTTPNotFound, 'request path not found')
229 HTTPNotFound, 'request path not found')
231
230
232
231
233 def lfs_disabled(request):
232 def lfs_disabled(request):
234 return write_response_error(
233 return write_response_error(
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
234 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236
235
237
236
238 def git_lfs_app(config):
237 def git_lfs_app(config):
239
238
240 # v1 API deprecation endpoint
239 # v1 API deprecation endpoint
241 config.add_route('lfs_objects',
240 config.add_route('lfs_objects',
242 '/{repo:.*?[^/]}/info/lfs/objects')
241 '/{repo:.*?[^/]}/info/lfs/objects')
243 config.add_view(lfs_objects, route_name='lfs_objects',
242 config.add_view(lfs_objects, route_name='lfs_objects',
244 request_method='POST', renderer='json')
243 request_method='POST', renderer='json')
245
244
246 # locking API
245 # locking API
247 config.add_route('lfs_objects_lock',
246 config.add_route('lfs_objects_lock',
248 '/{repo:.*?[^/]}/info/lfs/locks')
247 '/{repo:.*?[^/]}/info/lfs/locks')
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
248 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 request_method=('POST', 'GET'), renderer='json')
249 request_method=('POST', 'GET'), renderer='json')
251
250
252 config.add_route('lfs_objects_lock_verify',
251 config.add_route('lfs_objects_lock_verify',
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
252 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
253 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 request_method=('POST', 'GET'), renderer='json')
254 request_method=('POST', 'GET'), renderer='json')
256
255
257 # batch API
256 # batch API
258 config.add_route('lfs_objects_batch',
257 config.add_route('lfs_objects_batch',
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
258 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
259 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 request_method='POST', renderer='json')
260 request_method='POST', renderer='json')
262
261
263 # oid upload/download API
262 # oid upload/download API
264 config.add_route('lfs_objects_oid',
263 config.add_route('lfs_objects_oid',
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
264 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
265 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 request_method='PUT', renderer='json')
266 request_method='PUT', renderer='json')
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
267 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 request_method='GET', renderer='json')
268 request_method='GET', renderer='json')
270
269
271 # verification API
270 # verification API
272 config.add_route('lfs_objects_verify',
271 config.add_route('lfs_objects_verify',
273 '/{repo:.*?[^/]}/info/lfs/verify')
272 '/{repo:.*?[^/]}/info/lfs/verify')
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
273 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 request_method='POST', renderer='json')
274 request_method='POST', renderer='json')
276
275
277 # not found handler for API
276 # not found handler for API
278 config.add_notfound_view(not_found, renderer='json')
277 config.add_notfound_view(not_found, renderer='json')
279
278
280
279
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
280 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 config = Configurator()
281 config = Configurator()
283 if git_lfs_enabled:
282 if git_lfs_enabled:
284 config.include(git_lfs_app)
283 config.include(git_lfs_app)
285 config.registry.git_lfs_store_path = git_lfs_store_path
284 config.registry.git_lfs_store_path = git_lfs_store_path
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
285 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 else:
286 else:
288 # not found handler for API, reporting disabled LFS support
287 # not found handler for API, reporting disabled LFS support
289 config.add_notfound_view(lfs_disabled, renderer='json')
288 config.add_notfound_view(lfs_disabled, renderer='json')
290
289
291 app = config.make_wsgi_app()
290 app = config.make_wsgi_app()
292 return app
291 return app
@@ -1,779 +1,779 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import importlib
23 import importlib
24 import base64
24 import base64
25 import msgpack
25 import msgpack
26 import dataclasses
26 import dataclasses
27 import pygit2
27 import pygit2
28
28
29 import http.client
29 import http.client
30
30
31
31
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.node
33 import mercurial.node
34
34
35 from vcsserver.lib.rc_json import json
35 from vcsserver.lib.rc_json import json
36 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import ascii_str, safe_str
37 from vcsserver.str_utils import ascii_str, safe_str
38 from vcsserver.remote.git import Repository
38 from vcsserver.remote.git_remote import Repository
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HooksHttpClient(object):
43 class HooksHttpClient(object):
44 proto = 'msgpack.v1'
44 proto = 'msgpack.v1'
45 connection = None
45 connection = None
46
46
47 def __init__(self, hooks_uri):
47 def __init__(self, hooks_uri):
48 self.hooks_uri = hooks_uri
48 self.hooks_uri = hooks_uri
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
52
53 def __call__(self, method, extras):
53 def __call__(self, method, extras):
54 connection = http.client.HTTPConnection(self.hooks_uri)
54 connection = http.client.HTTPConnection(self.hooks_uri)
55 # binary msgpack body
55 # binary msgpack body
56 headers, body = self._serialize(method, extras)
56 headers, body = self._serialize(method, extras)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58
58
59 try:
59 try:
60 try:
60 try:
61 connection.request('POST', '/', body, headers)
61 connection.request('POST', '/', body, headers)
62 except Exception as error:
62 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
64 raise
65
65
66 response = connection.getresponse()
66 response = connection.getresponse()
67 try:
67 try:
68 return msgpack.load(response)
68 return msgpack.load(response)
69 except Exception:
69 except Exception:
70 response_data = response.read()
70 response_data = response.read()
71 log.exception('Failed to decode hook response json data. '
71 log.exception('Failed to decode hook response json data. '
72 'response_code:%s, raw_data:%s',
72 'response_code:%s, raw_data:%s',
73 response.status, response_data)
73 response.status, response_data)
74 raise
74 raise
75 finally:
75 finally:
76 connection.close()
76 connection.close()
77
77
78 @classmethod
78 @classmethod
79 def _serialize(cls, hook_name, extras):
79 def _serialize(cls, hook_name, extras):
80 data = {
80 data = {
81 'method': hook_name,
81 'method': hook_name,
82 'extras': extras
82 'extras': extras
83 }
83 }
84 headers = {
84 headers = {
85 "rc-hooks-protocol": cls.proto,
85 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
86 "Connection": "keep-alive"
87 }
87 }
88 return headers, msgpack.packb(data)
88 return headers, msgpack.packb(data)
89
89
90
90
91 class HooksDummyClient(object):
91 class HooksDummyClient(object):
92 def __init__(self, hooks_module):
92 def __init__(self, hooks_module):
93 self._hooks_module = importlib.import_module(hooks_module)
93 self._hooks_module = importlib.import_module(hooks_module)
94
94
95 def __call__(self, hook_name, extras):
95 def __call__(self, hook_name, extras):
96 with self._hooks_module.Hooks() as hooks:
96 with self._hooks_module.Hooks() as hooks:
97 return getattr(hooks, hook_name)(extras)
97 return getattr(hooks, hook_name)(extras)
98
98
99
99
100 class HooksShadowRepoClient(object):
100 class HooksShadowRepoClient(object):
101
101
102 def __call__(self, hook_name, extras):
102 def __call__(self, hook_name, extras):
103 return {'output': '', 'status': 0}
103 return {'output': '', 'status': 0}
104
104
105
105
106 class RemoteMessageWriter(object):
106 class RemoteMessageWriter(object):
107 """Writer base class."""
107 """Writer base class."""
108 def write(self, message):
108 def write(self, message):
109 raise NotImplementedError()
109 raise NotImplementedError()
110
110
111
111
112 class HgMessageWriter(RemoteMessageWriter):
112 class HgMessageWriter(RemoteMessageWriter):
113 """Writer that knows how to send messages to mercurial clients."""
113 """Writer that knows how to send messages to mercurial clients."""
114
114
115 def __init__(self, ui):
115 def __init__(self, ui):
116 self.ui = ui
116 self.ui = ui
117
117
118 def write(self, message: str):
118 def write(self, message: str):
119 # TODO: Check why the quiet flag is set by default.
119 # TODO: Check why the quiet flag is set by default.
120 old = self.ui.quiet
120 old = self.ui.quiet
121 self.ui.quiet = False
121 self.ui.quiet = False
122 self.ui.status(message.encode('utf-8'))
122 self.ui.status(message.encode('utf-8'))
123 self.ui.quiet = old
123 self.ui.quiet = old
124
124
125
125
126 class GitMessageWriter(RemoteMessageWriter):
126 class GitMessageWriter(RemoteMessageWriter):
127 """Writer that knows how to send messages to git clients."""
127 """Writer that knows how to send messages to git clients."""
128
128
129 def __init__(self, stdout=None):
129 def __init__(self, stdout=None):
130 self.stdout = stdout or sys.stdout
130 self.stdout = stdout or sys.stdout
131
131
132 def write(self, message: str):
132 def write(self, message: str):
133 self.stdout.write(message)
133 self.stdout.write(message)
134
134
135
135
136 class SvnMessageWriter(RemoteMessageWriter):
136 class SvnMessageWriter(RemoteMessageWriter):
137 """Writer that knows how to send messages to svn clients."""
137 """Writer that knows how to send messages to svn clients."""
138
138
139 def __init__(self, stderr=None):
139 def __init__(self, stderr=None):
140 # SVN needs data sent to stderr for back-to-client messaging
140 # SVN needs data sent to stderr for back-to-client messaging
141 self.stderr = stderr or sys.stderr
141 self.stderr = stderr or sys.stderr
142
142
143 def write(self, message):
143 def write(self, message):
144 self.stderr.write(message.encode('utf-8'))
144 self.stderr.write(message.encode('utf-8'))
145
145
146
146
147 def _handle_exception(result):
147 def _handle_exception(result):
148 exception_class = result.get('exception')
148 exception_class = result.get('exception')
149 exception_traceback = result.get('exception_traceback')
149 exception_traceback = result.get('exception_traceback')
150 log.debug('Handling hook-call exception: %s', exception_class)
150 log.debug('Handling hook-call exception: %s', exception_class)
151
151
152 if exception_traceback:
152 if exception_traceback:
153 log.error('Got traceback from remote call:%s', exception_traceback)
153 log.error('Got traceback from remote call:%s', exception_traceback)
154
154
155 if exception_class == 'HTTPLockedRC':
155 if exception_class == 'HTTPLockedRC':
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
157 elif exception_class == 'HTTPBranchProtected':
157 elif exception_class == 'HTTPBranchProtected':
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
159 elif exception_class == 'RepositoryError':
159 elif exception_class == 'RepositoryError':
160 raise exceptions.VcsException()(*result['exception_args'])
160 raise exceptions.VcsException()(*result['exception_args'])
161 elif exception_class:
161 elif exception_class:
162 raise Exception(
162 raise Exception(
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
164 )
164 )
165
165
166
166
167 def _get_hooks_client(extras):
167 def _get_hooks_client(extras):
168 hooks_uri = extras.get('hooks_uri')
168 hooks_uri = extras.get('hooks_uri')
169 is_shadow_repo = extras.get('is_shadow_repo')
169 is_shadow_repo = extras.get('is_shadow_repo')
170
170
171 if hooks_uri:
171 if hooks_uri:
172 return HooksHttpClient(extras['hooks_uri'])
172 return HooksHttpClient(extras['hooks_uri'])
173 elif is_shadow_repo:
173 elif is_shadow_repo:
174 return HooksShadowRepoClient()
174 return HooksShadowRepoClient()
175 else:
175 else:
176 return HooksDummyClient(extras['hooks_module'])
176 return HooksDummyClient(extras['hooks_module'])
177
177
178
178
179 def _call_hook(hook_name, extras, writer):
179 def _call_hook(hook_name, extras, writer):
180 hooks_client = _get_hooks_client(extras)
180 hooks_client = _get_hooks_client(extras)
181 log.debug('Hooks, using client:%s', hooks_client)
181 log.debug('Hooks, using client:%s', hooks_client)
182 result = hooks_client(hook_name, extras)
182 result = hooks_client(hook_name, extras)
183 log.debug('Hooks got result: %s', result)
183 log.debug('Hooks got result: %s', result)
184 _handle_exception(result)
184 _handle_exception(result)
185 writer.write(result['output'])
185 writer.write(result['output'])
186
186
187 return result['status']
187 return result['status']
188
188
189
189
190 def _extras_from_ui(ui):
190 def _extras_from_ui(ui):
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
192 if not hook_data:
192 if not hook_data:
193 # maybe it's inside environ ?
193 # maybe it's inside environ ?
194 env_hook_data = os.environ.get('RC_SCM_DATA')
194 env_hook_data = os.environ.get('RC_SCM_DATA')
195 if env_hook_data:
195 if env_hook_data:
196 hook_data = env_hook_data
196 hook_data = env_hook_data
197
197
198 extras = {}
198 extras = {}
199 if hook_data:
199 if hook_data:
200 extras = json.loads(hook_data)
200 extras = json.loads(hook_data)
201 return extras
201 return extras
202
202
203
203
204 def _rev_range_hash(repo, node, check_heads=False):
204 def _rev_range_hash(repo, node, check_heads=False):
205 from vcsserver.hgcompat import get_ctx
205 from vcsserver.hgcompat import get_ctx
206
206
207 commits = []
207 commits = []
208 revs = []
208 revs = []
209 start = get_ctx(repo, node).rev()
209 start = get_ctx(repo, node).rev()
210 end = len(repo)
210 end = len(repo)
211 for rev in range(start, end):
211 for rev in range(start, end):
212 revs.append(rev)
212 revs.append(rev)
213 ctx = get_ctx(repo, rev)
213 ctx = get_ctx(repo, rev)
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
215 branch = safe_str(ctx.branch())
215 branch = safe_str(ctx.branch())
216 commits.append((commit_id, branch))
216 commits.append((commit_id, branch))
217
217
218 parent_heads = []
218 parent_heads = []
219 if check_heads:
219 if check_heads:
220 parent_heads = _check_heads(repo, start, end, revs)
220 parent_heads = _check_heads(repo, start, end, revs)
221 return commits, parent_heads
221 return commits, parent_heads
222
222
223
223
224 def _check_heads(repo, start, end, commits):
224 def _check_heads(repo, start, end, commits):
225 from vcsserver.hgcompat import get_ctx
225 from vcsserver.hgcompat import get_ctx
226 changelog = repo.changelog
226 changelog = repo.changelog
227 parents = set()
227 parents = set()
228
228
229 for new_rev in commits:
229 for new_rev in commits:
230 for p in changelog.parentrevs(new_rev):
230 for p in changelog.parentrevs(new_rev):
231 if p == mercurial.node.nullrev:
231 if p == mercurial.node.nullrev:
232 continue
232 continue
233 if p < start:
233 if p < start:
234 parents.add(p)
234 parents.add(p)
235
235
236 for p in parents:
236 for p in parents:
237 branch = get_ctx(repo, p).branch()
237 branch = get_ctx(repo, p).branch()
238 # The heads descending from that parent, on the same branch
238 # The heads descending from that parent, on the same branch
239 parent_heads = {p}
239 parent_heads = {p}
240 reachable = {p}
240 reachable = {p}
241 for x in range(p + 1, end):
241 for x in range(p + 1, end):
242 if get_ctx(repo, x).branch() != branch:
242 if get_ctx(repo, x).branch() != branch:
243 continue
243 continue
244 for pp in changelog.parentrevs(x):
244 for pp in changelog.parentrevs(x):
245 if pp in reachable:
245 if pp in reachable:
246 reachable.add(x)
246 reachable.add(x)
247 parent_heads.discard(pp)
247 parent_heads.discard(pp)
248 parent_heads.add(x)
248 parent_heads.add(x)
249 # More than one head? Suggest merging
249 # More than one head? Suggest merging
250 if len(parent_heads) > 1:
250 if len(parent_heads) > 1:
251 return list(parent_heads)
251 return list(parent_heads)
252
252
253 return []
253 return []
254
254
255
255
256 def _get_git_env():
256 def _get_git_env():
257 env = {}
257 env = {}
258 for k, v in os.environ.items():
258 for k, v in os.environ.items():
259 if k.startswith('GIT'):
259 if k.startswith('GIT'):
260 env[k] = v
260 env[k] = v
261
261
262 # serialized version
262 # serialized version
263 return [(k, v) for k, v in env.items()]
263 return [(k, v) for k, v in env.items()]
264
264
265
265
266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
267 env = {}
267 env = {}
268 for k, v in os.environ.items():
268 for k, v in os.environ.items():
269 if k.startswith('HG'):
269 if k.startswith('HG'):
270 env[k] = v
270 env[k] = v
271
271
272 env['HG_NODE'] = old_rev
272 env['HG_NODE'] = old_rev
273 env['HG_NODE_LAST'] = new_rev
273 env['HG_NODE_LAST'] = new_rev
274 env['HG_TXNID'] = txnid
274 env['HG_TXNID'] = txnid
275 env['HG_PENDING'] = repo_path
275 env['HG_PENDING'] = repo_path
276
276
277 return [(k, v) for k, v in env.items()]
277 return [(k, v) for k, v in env.items()]
278
278
279
279
280 def repo_size(ui, repo, **kwargs):
280 def repo_size(ui, repo, **kwargs):
281 extras = _extras_from_ui(ui)
281 extras = _extras_from_ui(ui)
282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
283
283
284
284
285 def pre_pull(ui, repo, **kwargs):
285 def pre_pull(ui, repo, **kwargs):
286 extras = _extras_from_ui(ui)
286 extras = _extras_from_ui(ui)
287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
288
288
289
289
290 def pre_pull_ssh(ui, repo, **kwargs):
290 def pre_pull_ssh(ui, repo, **kwargs):
291 extras = _extras_from_ui(ui)
291 extras = _extras_from_ui(ui)
292 if extras and extras.get('SSH'):
292 if extras and extras.get('SSH'):
293 return pre_pull(ui, repo, **kwargs)
293 return pre_pull(ui, repo, **kwargs)
294 return 0
294 return 0
295
295
296
296
297 def post_pull(ui, repo, **kwargs):
297 def post_pull(ui, repo, **kwargs):
298 extras = _extras_from_ui(ui)
298 extras = _extras_from_ui(ui)
299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
300
300
301
301
302 def post_pull_ssh(ui, repo, **kwargs):
302 def post_pull_ssh(ui, repo, **kwargs):
303 extras = _extras_from_ui(ui)
303 extras = _extras_from_ui(ui)
304 if extras and extras.get('SSH'):
304 if extras and extras.get('SSH'):
305 return post_pull(ui, repo, **kwargs)
305 return post_pull(ui, repo, **kwargs)
306 return 0
306 return 0
307
307
308
308
309 def pre_push(ui, repo, node=None, **kwargs):
309 def pre_push(ui, repo, node=None, **kwargs):
310 """
310 """
311 Mercurial pre_push hook
311 Mercurial pre_push hook
312 """
312 """
313 extras = _extras_from_ui(ui)
313 extras = _extras_from_ui(ui)
314 detect_force_push = extras.get('detect_force_push')
314 detect_force_push = extras.get('detect_force_push')
315
315
316 rev_data = []
316 rev_data = []
317 hook_type: str = safe_str(kwargs.get('hooktype'))
317 hook_type: str = safe_str(kwargs.get('hooktype'))
318
318
319 if node and hook_type == 'pretxnchangegroup':
319 if node and hook_type == 'pretxnchangegroup':
320 branches = collections.defaultdict(list)
320 branches = collections.defaultdict(list)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
322 for commit_id, branch in commits:
322 for commit_id, branch in commits:
323 branches[branch].append(commit_id)
323 branches[branch].append(commit_id)
324
324
325 for branch, commits in branches.items():
325 for branch, commits in branches.items():
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
327 rev_data.append({
327 rev_data.append({
328 'total_commits': len(commits),
328 'total_commits': len(commits),
329 'old_rev': old_rev,
329 'old_rev': old_rev,
330 'new_rev': commits[-1],
330 'new_rev': commits[-1],
331 'ref': '',
331 'ref': '',
332 'type': 'branch',
332 'type': 'branch',
333 'name': branch,
333 'name': branch,
334 })
334 })
335
335
336 for push_ref in rev_data:
336 for push_ref in rev_data:
337 push_ref['multiple_heads'] = _heads
337 push_ref['multiple_heads'] = _heads
338
338
339 repo_path = os.path.join(
339 repo_path = os.path.join(
340 extras.get('repo_store', ''), extras.get('repository', ''))
340 extras.get('repo_store', ''), extras.get('repository', ''))
341 push_ref['hg_env'] = _get_hg_env(
341 push_ref['hg_env'] = _get_hg_env(
342 old_rev=push_ref['old_rev'],
342 old_rev=push_ref['old_rev'],
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
344 repo_path=repo_path)
344 repo_path=repo_path)
345
345
346 extras['hook_type'] = hook_type or 'pre_push'
346 extras['hook_type'] = hook_type or 'pre_push'
347 extras['commit_ids'] = rev_data
347 extras['commit_ids'] = rev_data
348
348
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
350
350
351
351
352 def pre_push_ssh(ui, repo, node=None, **kwargs):
352 def pre_push_ssh(ui, repo, node=None, **kwargs):
353 extras = _extras_from_ui(ui)
353 extras = _extras_from_ui(ui)
354 if extras.get('SSH'):
354 if extras.get('SSH'):
355 return pre_push(ui, repo, node, **kwargs)
355 return pre_push(ui, repo, node, **kwargs)
356
356
357 return 0
357 return 0
358
358
359
359
360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
361 """
361 """
362 Mercurial pre_push hook for SSH
362 Mercurial pre_push hook for SSH
363 """
363 """
364 extras = _extras_from_ui(ui)
364 extras = _extras_from_ui(ui)
365 if extras.get('SSH'):
365 if extras.get('SSH'):
366 permission = extras['SSH_PERMISSIONS']
366 permission = extras['SSH_PERMISSIONS']
367
367
368 if 'repository.write' == permission or 'repository.admin' == permission:
368 if 'repository.write' == permission or 'repository.admin' == permission:
369 return 0
369 return 0
370
370
371 # non-zero ret code
371 # non-zero ret code
372 return 1
372 return 1
373
373
374 return 0
374 return 0
375
375
376
376
377 def post_push(ui, repo, node, **kwargs):
377 def post_push(ui, repo, node, **kwargs):
378 """
378 """
379 Mercurial post_push hook
379 Mercurial post_push hook
380 """
380 """
381 extras = _extras_from_ui(ui)
381 extras = _extras_from_ui(ui)
382
382
383 commit_ids = []
383 commit_ids = []
384 branches = []
384 branches = []
385 bookmarks = []
385 bookmarks = []
386 tags = []
386 tags = []
387 hook_type: str = safe_str(kwargs.get('hooktype'))
387 hook_type: str = safe_str(kwargs.get('hooktype'))
388
388
389 commits, _heads = _rev_range_hash(repo, node)
389 commits, _heads = _rev_range_hash(repo, node)
390 for commit_id, branch in commits:
390 for commit_id, branch in commits:
391 commit_ids.append(commit_id)
391 commit_ids.append(commit_id)
392 if branch not in branches:
392 if branch not in branches:
393 branches.append(branch)
393 branches.append(branch)
394
394
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
396 bookmarks = ui._rc_pushkey_bookmarks
396 bookmarks = ui._rc_pushkey_bookmarks
397
397
398 extras['hook_type'] = hook_type or 'post_push'
398 extras['hook_type'] = hook_type or 'post_push'
399 extras['commit_ids'] = commit_ids
399 extras['commit_ids'] = commit_ids
400
400
401 extras['new_refs'] = {
401 extras['new_refs'] = {
402 'branches': branches,
402 'branches': branches,
403 'bookmarks': bookmarks,
403 'bookmarks': bookmarks,
404 'tags': tags
404 'tags': tags
405 }
405 }
406
406
407 return _call_hook('post_push', extras, HgMessageWriter(ui))
407 return _call_hook('post_push', extras, HgMessageWriter(ui))
408
408
409
409
410 def post_push_ssh(ui, repo, node, **kwargs):
410 def post_push_ssh(ui, repo, node, **kwargs):
411 """
411 """
412 Mercurial post_push hook for SSH
412 Mercurial post_push hook for SSH
413 """
413 """
414 if _extras_from_ui(ui).get('SSH'):
414 if _extras_from_ui(ui).get('SSH'):
415 return post_push(ui, repo, node, **kwargs)
415 return post_push(ui, repo, node, **kwargs)
416 return 0
416 return 0
417
417
418
418
419 def key_push(ui, repo, **kwargs):
419 def key_push(ui, repo, **kwargs):
420 from vcsserver.hgcompat import get_ctx
420 from vcsserver.hgcompat import get_ctx
421
421
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
423 # store new bookmarks in our UI object propagated later to post_push
423 # store new bookmarks in our UI object propagated later to post_push
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
425 return
425 return
426
426
427
427
428 # backward compat
428 # backward compat
429 log_pull_action = post_pull
429 log_pull_action = post_pull
430
430
431 # backward compat
431 # backward compat
432 log_push_action = post_push
432 log_push_action = post_push
433
433
434
434
435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
436 """
436 """
437 Old hook name: keep here for backward compatibility.
437 Old hook name: keep here for backward compatibility.
438
438
439 This is only required when the installed git hooks are not upgraded.
439 This is only required when the installed git hooks are not upgraded.
440 """
440 """
441 pass
441 pass
442
442
443
443
444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
445 """
445 """
446 Old hook name: keep here for backward compatibility.
446 Old hook name: keep here for backward compatibility.
447
447
448 This is only required when the installed git hooks are not upgraded.
448 This is only required when the installed git hooks are not upgraded.
449 """
449 """
450 pass
450 pass
451
451
452
452
453 @dataclasses.dataclass
453 @dataclasses.dataclass
454 class HookResponse:
454 class HookResponse:
455 status: int
455 status: int
456 output: str
456 output: str
457
457
458
458
459 def git_pre_pull(extras) -> HookResponse:
459 def git_pre_pull(extras) -> HookResponse:
460 """
460 """
461 Pre pull hook.
461 Pre pull hook.
462
462
463 :param extras: dictionary containing the keys defined in simplevcs
463 :param extras: dictionary containing the keys defined in simplevcs
464 :type extras: dict
464 :type extras: dict
465
465
466 :return: status code of the hook. 0 for success.
466 :return: status code of the hook. 0 for success.
467 :rtype: int
467 :rtype: int
468 """
468 """
469
469
470 if 'pull' not in extras['hooks']:
470 if 'pull' not in extras['hooks']:
471 return HookResponse(0, '')
471 return HookResponse(0, '')
472
472
473 stdout = io.StringIO()
473 stdout = io.StringIO()
474 try:
474 try:
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
476
476
477 except Exception as error:
477 except Exception as error:
478 log.exception('Failed to call pre_pull hook')
478 log.exception('Failed to call pre_pull hook')
479 status_code = 128
479 status_code = 128
480 stdout.write(f'ERROR: {error}\n')
480 stdout.write(f'ERROR: {error}\n')
481
481
482 return HookResponse(status_code, stdout.getvalue())
482 return HookResponse(status_code, stdout.getvalue())
483
483
484
484
485 def git_post_pull(extras) -> HookResponse:
485 def git_post_pull(extras) -> HookResponse:
486 """
486 """
487 Post pull hook.
487 Post pull hook.
488
488
489 :param extras: dictionary containing the keys defined in simplevcs
489 :param extras: dictionary containing the keys defined in simplevcs
490 :type extras: dict
490 :type extras: dict
491
491
492 :return: status code of the hook. 0 for success.
492 :return: status code of the hook. 0 for success.
493 :rtype: int
493 :rtype: int
494 """
494 """
495 if 'pull' not in extras['hooks']:
495 if 'pull' not in extras['hooks']:
496 return HookResponse(0, '')
496 return HookResponse(0, '')
497
497
498 stdout = io.StringIO()
498 stdout = io.StringIO()
499 try:
499 try:
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
501 except Exception as error:
501 except Exception as error:
502 status = 128
502 status = 128
503 stdout.write(f'ERROR: {error}\n')
503 stdout.write(f'ERROR: {error}\n')
504
504
505 return HookResponse(status, stdout.getvalue())
505 return HookResponse(status, stdout.getvalue())
506
506
507
507
508 def _parse_git_ref_lines(revision_lines):
508 def _parse_git_ref_lines(revision_lines):
509 rev_data = []
509 rev_data = []
510 for revision_line in revision_lines or []:
510 for revision_line in revision_lines or []:
511 old_rev, new_rev, ref = revision_line.strip().split(' ')
511 old_rev, new_rev, ref = revision_line.strip().split(' ')
512 ref_data = ref.split('/', 2)
512 ref_data = ref.split('/', 2)
513 if ref_data[1] in ('tags', 'heads'):
513 if ref_data[1] in ('tags', 'heads'):
514 rev_data.append({
514 rev_data.append({
515 # NOTE(marcink):
515 # NOTE(marcink):
516 # we're unable to tell total_commits for git at this point
516 # we're unable to tell total_commits for git at this point
517 # but we set the variable for consistency with GIT
517 # but we set the variable for consistency with GIT
518 'total_commits': -1,
518 'total_commits': -1,
519 'old_rev': old_rev,
519 'old_rev': old_rev,
520 'new_rev': new_rev,
520 'new_rev': new_rev,
521 'ref': ref,
521 'ref': ref,
522 'type': ref_data[1],
522 'type': ref_data[1],
523 'name': ref_data[2],
523 'name': ref_data[2],
524 })
524 })
525 return rev_data
525 return rev_data
526
526
527
527
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
529 """
529 """
530 Pre push hook.
530 Pre push hook.
531
531
532 :return: status code of the hook. 0 for success.
532 :return: status code of the hook. 0 for success.
533 """
533 """
534 extras = json.loads(env['RC_SCM_DATA'])
534 extras = json.loads(env['RC_SCM_DATA'])
535 rev_data = _parse_git_ref_lines(revision_lines)
535 rev_data = _parse_git_ref_lines(revision_lines)
536 if 'push' not in extras['hooks']:
536 if 'push' not in extras['hooks']:
537 return 0
537 return 0
538 empty_commit_id = '0' * 40
538 empty_commit_id = '0' * 40
539
539
540 detect_force_push = extras.get('detect_force_push')
540 detect_force_push = extras.get('detect_force_push')
541
541
542 for push_ref in rev_data:
542 for push_ref in rev_data:
543 # store our git-env which holds the temp store
543 # store our git-env which holds the temp store
544 push_ref['git_env'] = _get_git_env()
544 push_ref['git_env'] = _get_git_env()
545 push_ref['pruned_sha'] = ''
545 push_ref['pruned_sha'] = ''
546 if not detect_force_push:
546 if not detect_force_push:
547 # don't check for forced-push when we don't need to
547 # don't check for forced-push when we don't need to
548 continue
548 continue
549
549
550 type_ = push_ref['type']
550 type_ = push_ref['type']
551 new_branch = push_ref['old_rev'] == empty_commit_id
551 new_branch = push_ref['old_rev'] == empty_commit_id
552 delete_branch = push_ref['new_rev'] == empty_commit_id
552 delete_branch = push_ref['new_rev'] == empty_commit_id
553 if type_ == 'heads' and not (new_branch or delete_branch):
553 if type_ == 'heads' and not (new_branch or delete_branch):
554 old_rev = push_ref['old_rev']
554 old_rev = push_ref['old_rev']
555 new_rev = push_ref['new_rev']
555 new_rev = push_ref['new_rev']
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
557 stdout, stderr = subprocessio.run_command(
557 stdout, stderr = subprocessio.run_command(
558 cmd, env=os.environ.copy())
558 cmd, env=os.environ.copy())
559 # means we're having some non-reachable objects, this forced push was used
559 # means we're having some non-reachable objects, this forced push was used
560 if stdout:
560 if stdout:
561 push_ref['pruned_sha'] = stdout.splitlines()
561 push_ref['pruned_sha'] = stdout.splitlines()
562
562
563 extras['hook_type'] = 'pre_receive'
563 extras['hook_type'] = 'pre_receive'
564 extras['commit_ids'] = rev_data
564 extras['commit_ids'] = rev_data
565
565
566 stdout = sys.stdout
566 stdout = sys.stdout
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
568
568
569 return status_code
569 return status_code
570
570
571
571
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
573 """
573 """
574 Post push hook.
574 Post push hook.
575
575
576 :return: status code of the hook. 0 for success.
576 :return: status code of the hook. 0 for success.
577 """
577 """
578 extras = json.loads(env['RC_SCM_DATA'])
578 extras = json.loads(env['RC_SCM_DATA'])
579 if 'push' not in extras['hooks']:
579 if 'push' not in extras['hooks']:
580 return 0
580 return 0
581
581
582 rev_data = _parse_git_ref_lines(revision_lines)
582 rev_data = _parse_git_ref_lines(revision_lines)
583
583
584 git_revs = []
584 git_revs = []
585
585
586 # N.B.(skreft): it is ok to just call git, as git before calling a
586 # N.B.(skreft): it is ok to just call git, as git before calling a
587 # subcommand sets the PATH environment variable so that it point to the
587 # subcommand sets the PATH environment variable so that it point to the
588 # correct version of the git executable.
588 # correct version of the git executable.
589 empty_commit_id = '0' * 40
589 empty_commit_id = '0' * 40
590 branches = []
590 branches = []
591 tags = []
591 tags = []
592 for push_ref in rev_data:
592 for push_ref in rev_data:
593 type_ = push_ref['type']
593 type_ = push_ref['type']
594
594
595 if type_ == 'heads':
595 if type_ == 'heads':
596 # starting new branch case
596 # starting new branch case
597 if push_ref['old_rev'] == empty_commit_id:
597 if push_ref['old_rev'] == empty_commit_id:
598 push_ref_name = push_ref['name']
598 push_ref_name = push_ref['name']
599
599
600 if push_ref_name not in branches:
600 if push_ref_name not in branches:
601 branches.append(push_ref_name)
601 branches.append(push_ref_name)
602
602
603 need_head_set = ''
603 need_head_set = ''
604 with Repository(os.getcwd()) as repo:
604 with Repository(os.getcwd()) as repo:
605 try:
605 try:
606 repo.head
606 repo.head
607 except pygit2.GitError:
607 except pygit2.GitError:
608 need_head_set = f'refs/heads/{push_ref_name}'
608 need_head_set = f'refs/heads/{push_ref_name}'
609
609
610 if need_head_set:
610 if need_head_set:
611 repo.set_head(need_head_set)
611 repo.set_head(need_head_set)
612 print(f"Setting default branch to {push_ref_name}")
612 print(f"Setting default branch to {push_ref_name}")
613
613
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
615 stdout, stderr = subprocessio.run_command(
615 stdout, stderr = subprocessio.run_command(
616 cmd, env=os.environ.copy())
616 cmd, env=os.environ.copy())
617 heads = safe_str(stdout)
617 heads = safe_str(stdout)
618 heads = heads.replace(push_ref['ref'], '')
618 heads = heads.replace(push_ref['ref'], '')
619 heads = ' '.join(head for head
619 heads = ' '.join(head for head
620 in heads.splitlines() if head) or '.'
620 in heads.splitlines() if head) or '.'
621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
622 '--pretty=format:%H', '--', push_ref['new_rev'],
622 '--pretty=format:%H', '--', push_ref['new_rev'],
623 '--not', heads]
623 '--not', heads]
624 stdout, stderr = subprocessio.run_command(
624 stdout, stderr = subprocessio.run_command(
625 cmd, env=os.environ.copy())
625 cmd, env=os.environ.copy())
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
627
627
628 # delete branch case
628 # delete branch case
629 elif push_ref['new_rev'] == empty_commit_id:
629 elif push_ref['new_rev'] == empty_commit_id:
630 git_revs.append('delete_branch=>%s' % push_ref['name'])
630 git_revs.append('delete_branch=>%s' % push_ref['name'])
631 else:
631 else:
632 if push_ref['name'] not in branches:
632 if push_ref['name'] not in branches:
633 branches.append(push_ref['name'])
633 branches.append(push_ref['name'])
634
634
635 cmd = [settings.GIT_EXECUTABLE, 'log',
635 cmd = [settings.GIT_EXECUTABLE, 'log',
636 '{old_rev}..{new_rev}'.format(**push_ref),
636 '{old_rev}..{new_rev}'.format(**push_ref),
637 '--reverse', '--pretty=format:%H']
637 '--reverse', '--pretty=format:%H']
638 stdout, stderr = subprocessio.run_command(
638 stdout, stderr = subprocessio.run_command(
639 cmd, env=os.environ.copy())
639 cmd, env=os.environ.copy())
640 # we get bytes from stdout, we need str to be consistent
640 # we get bytes from stdout, we need str to be consistent
641 log_revs = list(map(ascii_str, stdout.splitlines()))
641 log_revs = list(map(ascii_str, stdout.splitlines()))
642 git_revs.extend(log_revs)
642 git_revs.extend(log_revs)
643
643
644 # Pure pygit2 impl. but still 2-3x slower :/
644 # Pure pygit2 impl. but still 2-3x slower :/
645 # results = []
645 # results = []
646 #
646 #
647 # with Repository(os.getcwd()) as repo:
647 # with Repository(os.getcwd()) as repo:
648 # repo_new_rev = repo[push_ref['new_rev']]
648 # repo_new_rev = repo[push_ref['new_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
651 #
651 #
652 # for commit in walker:
652 # for commit in walker:
653 # if commit.id == repo_old_rev.id:
653 # if commit.id == repo_old_rev.id:
654 # break
654 # break
655 # results.append(commit.id.hex)
655 # results.append(commit.id.hex)
656 # # reverse the order, can't use GIT_SORT_REVERSE
656 # # reverse the order, can't use GIT_SORT_REVERSE
657 # log_revs = results[::-1]
657 # log_revs = results[::-1]
658
658
659 elif type_ == 'tags':
659 elif type_ == 'tags':
660 if push_ref['name'] not in tags:
660 if push_ref['name'] not in tags:
661 tags.append(push_ref['name'])
661 tags.append(push_ref['name'])
662 git_revs.append('tag=>%s' % push_ref['name'])
662 git_revs.append('tag=>%s' % push_ref['name'])
663
663
664 extras['hook_type'] = 'post_receive'
664 extras['hook_type'] = 'post_receive'
665 extras['commit_ids'] = git_revs
665 extras['commit_ids'] = git_revs
666 extras['new_refs'] = {
666 extras['new_refs'] = {
667 'branches': branches,
667 'branches': branches,
668 'bookmarks': [],
668 'bookmarks': [],
669 'tags': tags,
669 'tags': tags,
670 }
670 }
671
671
672 stdout = sys.stdout
672 stdout = sys.stdout
673
673
674 if 'repo_size' in extras['hooks']:
674 if 'repo_size' in extras['hooks']:
675 try:
675 try:
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
677 except Exception:
677 except Exception:
678 pass
678 pass
679
679
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
681 return status_code
681 return status_code
682
682
683
683
684 def _get_extras_from_txn_id(path, txn_id):
684 def _get_extras_from_txn_id(path, txn_id):
685 extras = {}
685 extras = {}
686 try:
686 try:
687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
688 '-t', txn_id,
688 '-t', txn_id,
689 '--revprop', path, 'rc-scm-extras']
689 '--revprop', path, 'rc-scm-extras']
690 stdout, stderr = subprocessio.run_command(
690 stdout, stderr = subprocessio.run_command(
691 cmd, env=os.environ.copy())
691 cmd, env=os.environ.copy())
692 extras = json.loads(base64.urlsafe_b64decode(stdout))
692 extras = json.loads(base64.urlsafe_b64decode(stdout))
693 except Exception:
693 except Exception:
694 log.exception('Failed to extract extras info from txn_id')
694 log.exception('Failed to extract extras info from txn_id')
695
695
696 return extras
696 return extras
697
697
698
698
699 def _get_extras_from_commit_id(commit_id, path):
699 def _get_extras_from_commit_id(commit_id, path):
700 extras = {}
700 extras = {}
701 try:
701 try:
702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
703 '-r', commit_id,
703 '-r', commit_id,
704 '--revprop', path, 'rc-scm-extras']
704 '--revprop', path, 'rc-scm-extras']
705 stdout, stderr = subprocessio.run_command(
705 stdout, stderr = subprocessio.run_command(
706 cmd, env=os.environ.copy())
706 cmd, env=os.environ.copy())
707 extras = json.loads(base64.urlsafe_b64decode(stdout))
707 extras = json.loads(base64.urlsafe_b64decode(stdout))
708 except Exception:
708 except Exception:
709 log.exception('Failed to extract extras info from commit_id')
709 log.exception('Failed to extract extras info from commit_id')
710
710
711 return extras
711 return extras
712
712
713
713
714 def svn_pre_commit(repo_path, commit_data, env):
714 def svn_pre_commit(repo_path, commit_data, env):
715 path, txn_id = commit_data
715 path, txn_id = commit_data
716 branches = []
716 branches = []
717 tags = []
717 tags = []
718
718
719 if env.get('RC_SCM_DATA'):
719 if env.get('RC_SCM_DATA'):
720 extras = json.loads(env['RC_SCM_DATA'])
720 extras = json.loads(env['RC_SCM_DATA'])
721 else:
721 else:
722 # fallback method to read from TXN-ID stored data
722 # fallback method to read from TXN-ID stored data
723 extras = _get_extras_from_txn_id(path, txn_id)
723 extras = _get_extras_from_txn_id(path, txn_id)
724 if not extras:
724 if not extras:
725 return 0
725 return 0
726
726
727 extras['hook_type'] = 'pre_commit'
727 extras['hook_type'] = 'pre_commit'
728 extras['commit_ids'] = [txn_id]
728 extras['commit_ids'] = [txn_id]
729 extras['txn_id'] = txn_id
729 extras['txn_id'] = txn_id
730 extras['new_refs'] = {
730 extras['new_refs'] = {
731 'total_commits': 1,
731 'total_commits': 1,
732 'branches': branches,
732 'branches': branches,
733 'bookmarks': [],
733 'bookmarks': [],
734 'tags': tags,
734 'tags': tags,
735 }
735 }
736
736
737 return _call_hook('pre_push', extras, SvnMessageWriter())
737 return _call_hook('pre_push', extras, SvnMessageWriter())
738
738
739
739
740 def svn_post_commit(repo_path, commit_data, env):
740 def svn_post_commit(repo_path, commit_data, env):
741 """
741 """
742 commit_data is path, rev, txn_id
742 commit_data is path, rev, txn_id
743 """
743 """
744 if len(commit_data) == 3:
744 if len(commit_data) == 3:
745 path, commit_id, txn_id = commit_data
745 path, commit_id, txn_id = commit_data
746 elif len(commit_data) == 2:
746 elif len(commit_data) == 2:
747 log.error('Failed to extract txn_id from commit_data using legacy method. '
747 log.error('Failed to extract txn_id from commit_data using legacy method. '
748 'Some functionality might be limited')
748 'Some functionality might be limited')
749 path, commit_id = commit_data
749 path, commit_id = commit_data
750 txn_id = None
750 txn_id = None
751
751
752 branches = []
752 branches = []
753 tags = []
753 tags = []
754
754
755 if env.get('RC_SCM_DATA'):
755 if env.get('RC_SCM_DATA'):
756 extras = json.loads(env['RC_SCM_DATA'])
756 extras = json.loads(env['RC_SCM_DATA'])
757 else:
757 else:
758 # fallback method to read from TXN-ID stored data
758 # fallback method to read from TXN-ID stored data
759 extras = _get_extras_from_commit_id(commit_id, path)
759 extras = _get_extras_from_commit_id(commit_id, path)
760 if not extras:
760 if not extras:
761 return 0
761 return 0
762
762
763 extras['hook_type'] = 'post_commit'
763 extras['hook_type'] = 'post_commit'
764 extras['commit_ids'] = [commit_id]
764 extras['commit_ids'] = [commit_id]
765 extras['txn_id'] = txn_id
765 extras['txn_id'] = txn_id
766 extras['new_refs'] = {
766 extras['new_refs'] = {
767 'branches': branches,
767 'branches': branches,
768 'bookmarks': [],
768 'bookmarks': [],
769 'tags': tags,
769 'tags': tags,
770 'total_commits': 1,
770 'total_commits': 1,
771 }
771 }
772
772
773 if 'repo_size' in extras['hooks']:
773 if 'repo_size' in extras['hooks']:
774 try:
774 try:
775 _call_hook('repo_size', extras, SvnMessageWriter())
775 _call_hook('repo_size', extras, SvnMessageWriter())
776 except Exception:
776 except Exception:
777 pass
777 pass
778
778
779 return _call_hook('post_push', extras, SvnMessageWriter())
779 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,774 +1,768 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import time
24 import time
25 import wsgiref.util
25 import wsgiref.util
26 import tempfile
26 import tempfile
27 import psutil
27 import psutil
28
28
29 from itertools import chain
29 from itertools import chain
30
30
31 import msgpack
31 import msgpack
32 import configparser
32 import configparser
33
33
34 from pyramid.config import Configurator
34 from pyramid.config import Configurator
35 from pyramid.wsgi import wsgiapp
35 from pyramid.wsgi import wsgiapp
36 from pyramid.response import Response
36 from pyramid.response import Response
37
37
38 from vcsserver.base import BytesEnvelope, BinaryEnvelope
38 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 from vcsserver.lib.rc_json import json
39 from vcsserver.lib.rc_json import json
40 from vcsserver.config.settings_maker import SettingsMaker
40 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.str_utils import safe_int
41 from vcsserver.str_utils import safe_int
42 from vcsserver.lib.statsd_client import StatsdClient
42 from vcsserver.lib.statsd_client import StatsdClient
43 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
43 from vcsserver.tweens.request_wrapper import get_headers_call_context
44
45 log = logging.getLogger(__name__)
46
47 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
48 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
49
50 try:
51 locale.setlocale(locale.LC_ALL, '')
52 except locale.Error as e:
53 log.error(
54 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
55 os.environ['LC_ALL'] = 'C'
56
57
44
58 import vcsserver
45 import vcsserver
59 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
46 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
60 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
47 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
61 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
48 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
62 from vcsserver.echo_stub.echo_app import EchoApp
49 from vcsserver.echo_stub.echo_app import EchoApp
63 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
50 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
64 from vcsserver.lib.exc_tracking import store_exception, format_exc
51 from vcsserver.lib.exc_tracking import store_exception, format_exc
65 from vcsserver.server import VcsServer
52 from vcsserver.server import VcsServer
66
53
67 strict_vcs = True
54 strict_vcs = True
68
55
69 git_import_err = None
56 git_import_err = None
70 try:
57 try:
71 from vcsserver.remote.git import GitFactory, GitRemote
58 from vcsserver.remote.git_remote import GitFactory, GitRemote
72 except ImportError as e:
59 except ImportError as e:
73 GitFactory = None
60 GitFactory = None
74 GitRemote = None
61 GitRemote = None
75 git_import_err = e
62 git_import_err = e
76 if strict_vcs:
63 if strict_vcs:
77 raise
64 raise
78
65
79
66
80 hg_import_err = None
67 hg_import_err = None
81 try:
68 try:
82 from vcsserver.remote.hg import MercurialFactory, HgRemote
69 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
83 except ImportError as e:
70 except ImportError as e:
84 MercurialFactory = None
71 MercurialFactory = None
85 HgRemote = None
72 HgRemote = None
86 hg_import_err = e
73 hg_import_err = e
87 if strict_vcs:
74 if strict_vcs:
88 raise
75 raise
89
76
90
77
91 svn_import_err = None
78 svn_import_err = None
92 try:
79 try:
93 from vcsserver.remote.svn import SubversionFactory, SvnRemote
80 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
94 except ImportError as e:
81 except ImportError as e:
95 SubversionFactory = None
82 SubversionFactory = None
96 SvnRemote = None
83 SvnRemote = None
97 svn_import_err = e
84 svn_import_err = e
98 if strict_vcs:
85 if strict_vcs:
99 raise
86 raise
100
87
88 log = logging.getLogger(__name__)
89
90 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
91 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
92
93 try:
94 locale.setlocale(locale.LC_ALL, '')
95 except locale.Error as e:
96 log.error(
97 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
98 os.environ['LC_ALL'] = 'C'
99
101
100
102 def _is_request_chunked(environ):
101 def _is_request_chunked(environ):
103 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
102 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 return stream
103 return stream
105
104
106
105
107 def log_max_fd():
106 def log_max_fd():
108 try:
107 try:
109 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
108 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
110 log.info('Max file descriptors value: %s', maxfd)
109 log.info('Max file descriptors value: %s', maxfd)
111 except Exception:
110 except Exception:
112 pass
111 pass
113
112
114
113
115 class VCS(object):
114 class VCS(object):
116 def __init__(self, locale_conf=None, cache_config=None):
115 def __init__(self, locale_conf=None, cache_config=None):
117 self.locale = locale_conf
116 self.locale = locale_conf
118 self.cache_config = cache_config
117 self.cache_config = cache_config
119 self._configure_locale()
118 self._configure_locale()
120
119
121 log_max_fd()
120 log_max_fd()
122
121
123 if GitFactory and GitRemote:
122 if GitFactory and GitRemote:
124 git_factory = GitFactory()
123 git_factory = GitFactory()
125 self._git_remote = GitRemote(git_factory)
124 self._git_remote = GitRemote(git_factory)
126 else:
125 else:
127 log.error("Git client import failed: %s", git_import_err)
126 log.error("Git client import failed: %s", git_import_err)
128
127
129 if MercurialFactory and HgRemote:
128 if MercurialFactory and HgRemote:
130 hg_factory = MercurialFactory()
129 hg_factory = MercurialFactory()
131 self._hg_remote = HgRemote(hg_factory)
130 self._hg_remote = HgRemote(hg_factory)
132 else:
131 else:
133 log.error("Mercurial client import failed: %s", hg_import_err)
132 log.error("Mercurial client import failed: %s", hg_import_err)
134
133
135 if SubversionFactory and SvnRemote:
134 if SubversionFactory and SvnRemote:
136 svn_factory = SubversionFactory()
135 svn_factory = SubversionFactory()
137
136
138 # hg factory is used for svn url validation
137 # hg factory is used for svn url validation
139 hg_factory = MercurialFactory()
138 hg_factory = MercurialFactory()
140 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 else:
140 else:
142 log.error("Subversion client import failed: %s", svn_import_err)
141 log.error("Subversion client import failed: %s", svn_import_err)
143
142
144 self._vcsserver = VcsServer()
143 self._vcsserver = VcsServer()
145
144
146 def _configure_locale(self):
145 def _configure_locale(self):
147 if self.locale:
146 if self.locale:
148 log.info('Settings locale: `LC_ALL` to %s', self.locale)
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 else:
148 else:
150 log.info('Configuring locale subsystem based on environment variables')
149 log.info('Configuring locale subsystem based on environment variables')
151 try:
150 try:
152 # If self.locale is the empty string, then the locale
151 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
152 # module will use the environment variables. See the
154 # documentation of the package `locale`.
153 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
154 locale.setlocale(locale.LC_ALL, self.locale)
156
155
157 language_code, encoding = locale.getlocale()
156 language_code, encoding = locale.getlocale()
158 log.info(
157 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
158 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
159 language_code, encoding)
161 except locale.Error:
160 except locale.Error:
162 log.exception('Cannot set locale, not configuring the locale system')
161 log.exception('Cannot set locale, not configuring the locale system')
163
162
164
163
165 class WsgiProxy(object):
164 class WsgiProxy(object):
166 def __init__(self, wsgi):
165 def __init__(self, wsgi):
167 self.wsgi = wsgi
166 self.wsgi = wsgi
168
167
169 def __call__(self, environ, start_response):
168 def __call__(self, environ, start_response):
170 input_data = environ['wsgi.input'].read()
169 input_data = environ['wsgi.input'].read()
171 input_data = msgpack.unpackb(input_data)
170 input_data = msgpack.unpackb(input_data)
172
171
173 error = None
172 error = None
174 try:
173 try:
175 data, status, headers = self.wsgi.handle(
174 data, status, headers = self.wsgi.handle(
176 input_data['environment'], input_data['input_data'],
175 input_data['environment'], input_data['input_data'],
177 *input_data['args'], **input_data['kwargs'])
176 *input_data['args'], **input_data['kwargs'])
178 except Exception as e:
177 except Exception as e:
179 data, status, headers = [], None, None
178 data, status, headers = [], None, None
180 error = {
179 error = {
181 'message': str(e),
180 'message': str(e),
182 '_vcs_kind': getattr(e, '_vcs_kind', None)
181 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 }
182 }
184
183
185 start_response(200, {})
184 start_response(200, {})
186 return self._iterator(error, status, headers, data)
185 return self._iterator(error, status, headers, data)
187
186
188 def _iterator(self, error, status, headers, data):
187 def _iterator(self, error, status, headers, data):
189 initial_data = [
188 initial_data = [
190 error,
189 error,
191 status,
190 status,
192 headers,
191 headers,
193 ]
192 ]
194
193
195 for d in chain(initial_data, data):
194 for d in chain(initial_data, data):
196 yield msgpack.packb(d)
195 yield msgpack.packb(d)
197
196
198
197
199 def not_found(request):
198 def not_found(request):
200 return {'status': '404 NOT FOUND'}
199 return {'status': '404 NOT FOUND'}
201
200
202
201
203 class VCSViewPredicate(object):
202 class VCSViewPredicate(object):
204 def __init__(self, val, config):
203 def __init__(self, val, config):
205 self.remotes = val
204 self.remotes = val
206
205
207 def text(self):
206 def text(self):
208 return f'vcs view method = {list(self.remotes.keys())}'
207 return f'vcs view method = {list(self.remotes.keys())}'
209
208
210 phash = text
209 phash = text
211
210
212 def __call__(self, context, request):
211 def __call__(self, context, request):
213 """
212 """
214 View predicate that returns true if given backend is supported by
213 View predicate that returns true if given backend is supported by
215 defined remotes.
214 defined remotes.
216 """
215 """
217 backend = request.matchdict.get('backend')
216 backend = request.matchdict.get('backend')
218 return backend in self.remotes
217 return backend in self.remotes
219
218
220
219
221 class HTTPApplication(object):
220 class HTTPApplication(object):
222 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
221 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223
222
224 remote_wsgi = remote_wsgi
223 remote_wsgi = remote_wsgi
225 _use_echo_app = False
224 _use_echo_app = False
226
225
227 def __init__(self, settings=None, global_config=None):
226 def __init__(self, settings=None, global_config=None):
228
227
229 self.config = Configurator(settings=settings)
228 self.config = Configurator(settings=settings)
230 # Init our statsd at very start
229 # Init our statsd at very start
231 self.config.registry.statsd = StatsdClient.statsd
230 self.config.registry.statsd = StatsdClient.statsd
232 self.config.registry.vcs_call_context = {}
231 self.config.registry.vcs_call_context = {}
233
232
234 self.global_config = global_config
233 self.global_config = global_config
235 self.config.include('vcsserver.lib.rc_cache')
234 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache.archive_cache')
235 self.config.include('vcsserver.lib.rc_cache.archive_cache')
237
236
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
237 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
238 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 self._remotes = {
239 self._remotes = {
241 'hg': vcs._hg_remote,
240 'hg': vcs._hg_remote,
242 'git': vcs._git_remote,
241 'git': vcs._git_remote,
243 'svn': vcs._svn_remote,
242 'svn': vcs._svn_remote,
244 'server': vcs._vcsserver,
243 'server': vcs._vcsserver,
245 }
244 }
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
245 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 self._use_echo_app = True
246 self._use_echo_app = True
248 log.warning("Using EchoApp for VCS operations.")
247 log.warning("Using EchoApp for VCS operations.")
249 self.remote_wsgi = remote_wsgi_stub
248 self.remote_wsgi = remote_wsgi_stub
250
249
251 self._configure_settings(global_config, settings)
250 self._configure_settings(global_config, settings)
252
251
253 self._configure()
252 self._configure()
254
253
255 def _configure_settings(self, global_config, app_settings):
254 def _configure_settings(self, global_config, app_settings):
256 """
255 """
257 Configure the settings module.
256 Configure the settings module.
258 """
257 """
259 settings_merged = global_config.copy()
258 settings_merged = global_config.copy()
260 settings_merged.update(app_settings)
259 settings_merged.update(app_settings)
261
260
262 git_path = app_settings.get('git_path', None)
261 git_path = app_settings.get('git_path', None)
263 if git_path:
262 if git_path:
264 settings.GIT_EXECUTABLE = git_path
263 settings.GIT_EXECUTABLE = git_path
265 binary_dir = app_settings.get('core.binary_dir', None)
264 binary_dir = app_settings.get('core.binary_dir', None)
266 if binary_dir:
265 if binary_dir:
267 settings.BINARY_DIR = binary_dir
266 settings.BINARY_DIR = binary_dir
268
267
269 # Store the settings to make them available to other modules.
268 # Store the settings to make them available to other modules.
270 vcsserver.PYRAMID_SETTINGS = settings_merged
269 vcsserver.PYRAMID_SETTINGS = settings_merged
271 vcsserver.CONFIG = settings_merged
270 vcsserver.CONFIG = settings_merged
272
271
273 def _configure(self):
272 def _configure(self):
274 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
273 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
275
274
276 self.config.add_route('service', '/_service')
275 self.config.add_route('service', '/_service')
277 self.config.add_route('status', '/status')
276 self.config.add_route('status', '/status')
278 self.config.add_route('hg_proxy', '/proxy/hg')
277 self.config.add_route('hg_proxy', '/proxy/hg')
279 self.config.add_route('git_proxy', '/proxy/git')
278 self.config.add_route('git_proxy', '/proxy/git')
280
279
281 # rpc methods
280 # rpc methods
282 self.config.add_route('vcs', '/{backend}')
281 self.config.add_route('vcs', '/{backend}')
283
282
284 # streaming rpc remote methods
283 # streaming rpc remote methods
285 self.config.add_route('vcs_stream', '/{backend}/stream')
284 self.config.add_route('vcs_stream', '/{backend}/stream')
286
285
287 # vcs operations clone/push as streaming
286 # vcs operations clone/push as streaming
288 self.config.add_route('stream_git', '/stream/git/*repo_name')
287 self.config.add_route('stream_git', '/stream/git/*repo_name')
289 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
288 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
290
289
291 self.config.add_view(self.status_view, route_name='status', renderer='json')
290 self.config.add_view(self.status_view, route_name='status', renderer='json')
292 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
291 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
293
292
294 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
293 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
295 self.config.add_view(self.git_proxy(), route_name='git_proxy')
294 self.config.add_view(self.git_proxy(), route_name='git_proxy')
296 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
295 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
297 vcs_view=self._remotes)
296 vcs_view=self._remotes)
298 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
297 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
299 vcs_view=self._remotes)
298 vcs_view=self._remotes)
300
299
301 self.config.add_view(self.hg_stream(), route_name='stream_hg')
300 self.config.add_view(self.hg_stream(), route_name='stream_hg')
302 self.config.add_view(self.git_stream(), route_name='stream_git')
301 self.config.add_view(self.git_stream(), route_name='stream_git')
303
302
304 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
303 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
305
304
306 self.config.add_notfound_view(not_found, renderer='json')
305 self.config.add_notfound_view(not_found, renderer='json')
307
306
308 self.config.add_view(self.handle_vcs_exception, context=Exception)
307 self.config.add_view(self.handle_vcs_exception, context=Exception)
309
308
310 self.config.add_tween(
309 self.config.add_tween(
311 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
310 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
312 )
311 )
313 self.config.add_request_method(
312 self.config.add_request_method(
314 'vcsserver.lib.request_counter.get_request_counter',
313 'vcsserver.lib.request_counter.get_request_counter',
315 'request_count')
314 'request_count')
316
315
317 def wsgi_app(self):
316 def wsgi_app(self):
318 return self.config.make_wsgi_app()
317 return self.config.make_wsgi_app()
319
318
320 def _vcs_view_params(self, request):
319 def _vcs_view_params(self, request):
321 remote = self._remotes[request.matchdict['backend']]
320 remote = self._remotes[request.matchdict['backend']]
322 payload = msgpack.unpackb(request.body, use_list=True)
321 payload = msgpack.unpackb(request.body, use_list=True)
323
322
324 method = payload.get('method')
323 method = payload.get('method')
325 params = payload['params']
324 params = payload['params']
326 wire = params.get('wire')
325 wire = params.get('wire')
327 args = params.get('args')
326 args = params.get('args')
328 kwargs = params.get('kwargs')
327 kwargs = params.get('kwargs')
329 context_uid = None
328 context_uid = None
330
329
331 request.registry.vcs_call_context = {
330 request.registry.vcs_call_context = {
332 'method': method,
331 'method': method,
333 'repo_name': payload.get('_repo_name'),
332 'repo_name': payload.get('_repo_name'),
334 }
333 }
335
334
336 if wire:
335 if wire:
337 try:
336 try:
338 wire['context'] = context_uid = uuid.UUID(wire['context'])
337 wire['context'] = context_uid = uuid.UUID(wire['context'])
339 except KeyError:
338 except KeyError:
340 pass
339 pass
341 args.insert(0, wire)
340 args.insert(0, wire)
342 repo_state_uid = wire.get('repo_state_uid') if wire else None
341 repo_state_uid = wire.get('repo_state_uid') if wire else None
343
342
344 # NOTE(marcink): trading complexity for slight performance
343 # NOTE(marcink): trading complexity for slight performance
345 if log.isEnabledFor(logging.DEBUG):
344 if log.isEnabledFor(logging.DEBUG):
346 # also we SKIP printing out any of those methods args since they maybe excessive
345 # also we SKIP printing out any of those methods args since they maybe excessive
347 just_args_methods = {
346 just_args_methods = {
348 'commitctx': ('content', 'removed', 'updated'),
347 'commitctx': ('content', 'removed', 'updated'),
349 'commit': ('content', 'removed', 'updated')
348 'commit': ('content', 'removed', 'updated')
350 }
349 }
351 if method in just_args_methods:
350 if method in just_args_methods:
352 skip_args = just_args_methods[method]
351 skip_args = just_args_methods[method]
353 call_args = ''
352 call_args = ''
354 call_kwargs = {}
353 call_kwargs = {}
355 for k in kwargs:
354 for k in kwargs:
356 if k in skip_args:
355 if k in skip_args:
357 # replace our skip key with dummy
356 # replace our skip key with dummy
358 call_kwargs[k] = f'RemovedParam({k})'
357 call_kwargs[k] = f'RemovedParam({k})'
359 else:
358 else:
360 call_kwargs[k] = kwargs[k]
359 call_kwargs[k] = kwargs[k]
361 else:
360 else:
362 call_args = args[1:]
361 call_args = args[1:]
363 call_kwargs = kwargs
362 call_kwargs = kwargs
364
363
365 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
364 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
366 method, call_args, call_kwargs, context_uid, repo_state_uid)
365 method, call_args, call_kwargs, context_uid, repo_state_uid)
367
366
368 statsd = request.registry.statsd
367 statsd = request.registry.statsd
369 if statsd:
368 if statsd:
370 statsd.incr(
369 statsd.incr(
371 'vcsserver_method_total', tags=[
370 'vcsserver_method_total', tags=[
372 f"method:{method}",
371 f"method:{method}",
373 ])
372 ])
374 return payload, remote, method, args, kwargs
373 return payload, remote, method, args, kwargs
375
374
376 def vcs_view(self, request):
375 def vcs_view(self, request):
377
376
378 payload, remote, method, args, kwargs = self._vcs_view_params(request)
377 payload, remote, method, args, kwargs = self._vcs_view_params(request)
379 payload_id = payload.get('id')
378 payload_id = payload.get('id')
380
379
381 try:
380 try:
382 resp = getattr(remote, method)(*args, **kwargs)
381 resp = getattr(remote, method)(*args, **kwargs)
383 except Exception as e:
382 except Exception as e:
384 exc_info = list(sys.exc_info())
383 exc_info = list(sys.exc_info())
385 exc_type, exc_value, exc_traceback = exc_info
384 exc_type, exc_value, exc_traceback = exc_info
386
385
387 org_exc = getattr(e, '_org_exc', None)
386 org_exc = getattr(e, '_org_exc', None)
388 org_exc_name = None
387 org_exc_name = None
389 org_exc_tb = ''
388 org_exc_tb = ''
390 if org_exc:
389 if org_exc:
391 org_exc_name = org_exc.__class__.__name__
390 org_exc_name = org_exc.__class__.__name__
392 org_exc_tb = getattr(e, '_org_exc_tb', '')
391 org_exc_tb = getattr(e, '_org_exc_tb', '')
393 # replace our "faked" exception with our org
392 # replace our "faked" exception with our org
394 exc_info[0] = org_exc.__class__
393 exc_info[0] = org_exc.__class__
395 exc_info[1] = org_exc
394 exc_info[1] = org_exc
396
395
397 should_store_exc = True
396 should_store_exc = True
398 if org_exc:
397 if org_exc:
399 def get_exc_fqn(_exc_obj):
398 def get_exc_fqn(_exc_obj):
400 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
399 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
401 return module_name + '.' + org_exc_name
400 return module_name + '.' + org_exc_name
402
401
403 exc_fqn = get_exc_fqn(org_exc)
402 exc_fqn = get_exc_fqn(org_exc)
404
403
405 if exc_fqn in ['mercurial.error.RepoLookupError',
404 if exc_fqn in ['mercurial.error.RepoLookupError',
406 'vcsserver.exceptions.RefNotFoundException']:
405 'vcsserver.exceptions.RefNotFoundException']:
407 should_store_exc = False
406 should_store_exc = False
408
407
409 if should_store_exc:
408 if should_store_exc:
410 store_exception(id(exc_info), exc_info, request_path=request.path)
409 store_exception(id(exc_info), exc_info, request_path=request.path)
411
410
412 tb_info = format_exc(exc_info)
411 tb_info = format_exc(exc_info)
413
412
414 type_ = e.__class__.__name__
413 type_ = e.__class__.__name__
415 if type_ not in self.ALLOWED_EXCEPTIONS:
414 if type_ not in self.ALLOWED_EXCEPTIONS:
416 type_ = None
415 type_ = None
417
416
418 resp = {
417 resp = {
419 'id': payload_id,
418 'id': payload_id,
420 'error': {
419 'error': {
421 'message': str(e),
420 'message': str(e),
422 'traceback': tb_info,
421 'traceback': tb_info,
423 'org_exc': org_exc_name,
422 'org_exc': org_exc_name,
424 'org_exc_tb': org_exc_tb,
423 'org_exc_tb': org_exc_tb,
425 'type': type_
424 'type': type_
426 }
425 }
427 }
426 }
428
427
429 try:
428 try:
430 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
429 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
431 except AttributeError:
430 except AttributeError:
432 pass
431 pass
433 else:
432 else:
434 resp = {
433 resp = {
435 'id': payload_id,
434 'id': payload_id,
436 'result': resp
435 'result': resp
437 }
436 }
438 log.debug('Serving data for method %s', method)
437 log.debug('Serving data for method %s', method)
439 return resp
438 return resp
440
439
441 def vcs_stream_view(self, request):
440 def vcs_stream_view(self, request):
442 payload, remote, method, args, kwargs = self._vcs_view_params(request)
441 payload, remote, method, args, kwargs = self._vcs_view_params(request)
443 # this method has a stream: marker we remove it here
442 # this method has a stream: marker we remove it here
444 method = method.split('stream:')[-1]
443 method = method.split('stream:')[-1]
445 chunk_size = safe_int(payload.get('chunk_size')) or 4096
444 chunk_size = safe_int(payload.get('chunk_size')) or 4096
446
445
447 try:
446 resp = getattr(remote, method)(*args, **kwargs)
448 resp = getattr(remote, method)(*args, **kwargs)
449 except Exception as e:
450 raise
451
447
452 def get_chunked_data(method_resp):
448 def get_chunked_data(method_resp):
453 stream = io.BytesIO(method_resp)
449 stream = io.BytesIO(method_resp)
454 while 1:
450 while 1:
455 chunk = stream.read(chunk_size)
451 chunk = stream.read(chunk_size)
456 if not chunk:
452 if not chunk:
457 break
453 break
458 yield chunk
454 yield chunk
459
455
460 response = Response(app_iter=get_chunked_data(resp))
456 response = Response(app_iter=get_chunked_data(resp))
461 response.content_type = 'application/octet-stream'
457 response.content_type = 'application/octet-stream'
462
458
463 return response
459 return response
464
460
465 def status_view(self, request):
461 def status_view(self, request):
466 import vcsserver
462 import vcsserver
467 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
463 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
468 'pid': os.getpid()}
464 'pid': os.getpid()}
469
465
470 def service_view(self, request):
466 def service_view(self, request):
471 import vcsserver
467 import vcsserver
472
468
473 payload = msgpack.unpackb(request.body, use_list=True)
469 payload = msgpack.unpackb(request.body, use_list=True)
474 server_config, app_config = {}, {}
470 server_config, app_config = {}, {}
475
471
476 try:
472 try:
477 path = self.global_config['__file__']
473 path = self.global_config['__file__']
478 config = configparser.RawConfigParser()
474 config = configparser.RawConfigParser()
479
475
480 config.read(path)
476 config.read(path)
481
477
482 if config.has_section('server:main'):
478 if config.has_section('server:main'):
483 server_config = dict(config.items('server:main'))
479 server_config = dict(config.items('server:main'))
484 if config.has_section('app:main'):
480 if config.has_section('app:main'):
485 app_config = dict(config.items('app:main'))
481 app_config = dict(config.items('app:main'))
486
482
487 except Exception:
483 except Exception:
488 log.exception('Failed to read .ini file for display')
484 log.exception('Failed to read .ini file for display')
489
485
490 environ = list(os.environ.items())
486 environ = list(os.environ.items())
491
487
492 resp = {
488 resp = {
493 'id': payload.get('id'),
489 'id': payload.get('id'),
494 'result': dict(
490 'result': dict(
495 version=vcsserver.__version__,
491 version=vcsserver.__version__,
496 config=server_config,
492 config=server_config,
497 app_config=app_config,
493 app_config=app_config,
498 environ=environ,
494 environ=environ,
499 payload=payload,
495 payload=payload,
500 )
496 )
501 }
497 }
502 return resp
498 return resp
503
499
504 def _msgpack_renderer_factory(self, info):
500 def _msgpack_renderer_factory(self, info):
505
501
506 def _render(value, system):
502 def _render(value, system):
507 bin_type = False
503 bin_type = False
508 res = value.get('result')
504 res = value.get('result')
509 if isinstance(res, BytesEnvelope):
505 if isinstance(res, BytesEnvelope):
510 log.debug('Result is wrapped in BytesEnvelope type')
506 log.debug('Result is wrapped in BytesEnvelope type')
511 bin_type = True
507 bin_type = True
512 elif isinstance(res, BinaryEnvelope):
508 elif isinstance(res, BinaryEnvelope):
513 log.debug('Result is wrapped in BinaryEnvelope type')
509 log.debug('Result is wrapped in BinaryEnvelope type')
514 value['result'] = res.val
510 value['result'] = res.val
515 bin_type = True
511 bin_type = True
516
512
517 request = system.get('request')
513 request = system.get('request')
518 if request is not None:
514 if request is not None:
519 response = request.response
515 response = request.response
520 ct = response.content_type
516 ct = response.content_type
521 if ct == response.default_content_type:
517 if ct == response.default_content_type:
522 response.content_type = 'application/x-msgpack'
518 response.content_type = 'application/x-msgpack'
523 if bin_type:
519 if bin_type:
524 response.content_type = 'application/x-msgpack-bin'
520 response.content_type = 'application/x-msgpack-bin'
525
521
526 return msgpack.packb(value, use_bin_type=bin_type)
522 return msgpack.packb(value, use_bin_type=bin_type)
527 return _render
523 return _render
528
524
529 def set_env_from_config(self, environ, config):
525 def set_env_from_config(self, environ, config):
530 dict_conf = {}
526 dict_conf = {}
531 try:
527 try:
532 for elem in config:
528 for elem in config:
533 if elem[0] == 'rhodecode':
529 if elem[0] == 'rhodecode':
534 dict_conf = json.loads(elem[2])
530 dict_conf = json.loads(elem[2])
535 break
531 break
536 except Exception:
532 except Exception:
537 log.exception('Failed to fetch SCM CONFIG')
533 log.exception('Failed to fetch SCM CONFIG')
538 return
534 return
539
535
540 username = dict_conf.get('username')
536 username = dict_conf.get('username')
541 if username:
537 if username:
542 environ['REMOTE_USER'] = username
538 environ['REMOTE_USER'] = username
543 # mercurial specific, some extension api rely on this
539 # mercurial specific, some extension api rely on this
544 environ['HGUSER'] = username
540 environ['HGUSER'] = username
545
541
546 ip = dict_conf.get('ip')
542 ip = dict_conf.get('ip')
547 if ip:
543 if ip:
548 environ['REMOTE_HOST'] = ip
544 environ['REMOTE_HOST'] = ip
549
545
550 if _is_request_chunked(environ):
546 if _is_request_chunked(environ):
551 # set the compatibility flag for webob
547 # set the compatibility flag for webob
552 environ['wsgi.input_terminated'] = True
548 environ['wsgi.input_terminated'] = True
553
549
554 def hg_proxy(self):
550 def hg_proxy(self):
555 @wsgiapp
551 @wsgiapp
556 def _hg_proxy(environ, start_response):
552 def _hg_proxy(environ, start_response):
557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
558 return app(environ, start_response)
554 return app(environ, start_response)
559 return _hg_proxy
555 return _hg_proxy
560
556
561 def git_proxy(self):
557 def git_proxy(self):
562 @wsgiapp
558 @wsgiapp
563 def _git_proxy(environ, start_response):
559 def _git_proxy(environ, start_response):
564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
565 return app(environ, start_response)
561 return app(environ, start_response)
566 return _git_proxy
562 return _git_proxy
567
563
568 def hg_stream(self):
564 def hg_stream(self):
569 if self._use_echo_app:
565 if self._use_echo_app:
570 @wsgiapp
566 @wsgiapp
571 def _hg_stream(environ, start_response):
567 def _hg_stream(environ, start_response):
572 app = EchoApp('fake_path', 'fake_name', None)
568 app = EchoApp('fake_path', 'fake_name', None)
573 return app(environ, start_response)
569 return app(environ, start_response)
574 return _hg_stream
570 return _hg_stream
575 else:
571 else:
576 @wsgiapp
572 @wsgiapp
577 def _hg_stream(environ, start_response):
573 def _hg_stream(environ, start_response):
578 log.debug('http-app: handling hg stream')
574 log.debug('http-app: handling hg stream')
579 call_context = get_headers_call_context(environ)
575 call_context = get_headers_call_context(environ)
580
576
581 repo_path = call_context['repo_path']
577 repo_path = call_context['repo_path']
582 repo_name = call_context['repo_name']
578 repo_name = call_context['repo_name']
583 config = call_context['repo_config']
579 config = call_context['repo_config']
584
580
585 app = scm_app.create_hg_wsgi_app(
581 app = scm_app.create_hg_wsgi_app(
586 repo_path, repo_name, config)
582 repo_path, repo_name, config)
587
583
588 # Consistent path information for hgweb
584 # Consistent path information for hgweb
589 environ['PATH_INFO'] = call_context['path_info']
585 environ['PATH_INFO'] = call_context['path_info']
590 environ['REPO_NAME'] = repo_name
586 environ['REPO_NAME'] = repo_name
591 self.set_env_from_config(environ, config)
587 self.set_env_from_config(environ, config)
592
588
593 log.debug('http-app: starting app handler '
589 log.debug('http-app: starting app handler '
594 'with %s and process request', app)
590 'with %s and process request', app)
595 return app(environ, ResponseFilter(start_response))
591 return app(environ, ResponseFilter(start_response))
596 return _hg_stream
592 return _hg_stream
597
593
598 def git_stream(self):
594 def git_stream(self):
599 if self._use_echo_app:
595 if self._use_echo_app:
600 @wsgiapp
596 @wsgiapp
601 def _git_stream(environ, start_response):
597 def _git_stream(environ, start_response):
602 app = EchoApp('fake_path', 'fake_name', None)
598 app = EchoApp('fake_path', 'fake_name', None)
603 return app(environ, start_response)
599 return app(environ, start_response)
604 return _git_stream
600 return _git_stream
605 else:
601 else:
606 @wsgiapp
602 @wsgiapp
607 def _git_stream(environ, start_response):
603 def _git_stream(environ, start_response):
608 log.debug('http-app: handling git stream')
604 log.debug('http-app: handling git stream')
609
605
610 call_context = get_headers_call_context(environ)
606 call_context = get_headers_call_context(environ)
611
607
612 repo_path = call_context['repo_path']
608 repo_path = call_context['repo_path']
613 repo_name = call_context['repo_name']
609 repo_name = call_context['repo_name']
614 config = call_context['repo_config']
610 config = call_context['repo_config']
615
611
616 environ['PATH_INFO'] = call_context['path_info']
612 environ['PATH_INFO'] = call_context['path_info']
617 self.set_env_from_config(environ, config)
613 self.set_env_from_config(environ, config)
618
614
619 content_type = environ.get('CONTENT_TYPE', '')
615 content_type = environ.get('CONTENT_TYPE', '')
620
616
621 path = environ['PATH_INFO']
617 path = environ['PATH_INFO']
622 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
618 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
623 log.debug(
619 log.debug(
624 'LFS: Detecting if request `%s` is LFS server path based '
620 'LFS: Detecting if request `%s` is LFS server path based '
625 'on content type:`%s`, is_lfs:%s',
621 'on content type:`%s`, is_lfs:%s',
626 path, content_type, is_lfs_request)
622 path, content_type, is_lfs_request)
627
623
628 if not is_lfs_request:
624 if not is_lfs_request:
629 # fallback detection by path
625 # fallback detection by path
630 if GIT_LFS_PROTO_PAT.match(path):
626 if GIT_LFS_PROTO_PAT.match(path):
631 is_lfs_request = True
627 is_lfs_request = True
632 log.debug(
628 log.debug(
633 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
629 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
634 path, is_lfs_request)
630 path, is_lfs_request)
635
631
636 if is_lfs_request:
632 if is_lfs_request:
637 app = scm_app.create_git_lfs_wsgi_app(
633 app = scm_app.create_git_lfs_wsgi_app(
638 repo_path, repo_name, config)
634 repo_path, repo_name, config)
639 else:
635 else:
640 app = scm_app.create_git_wsgi_app(
636 app = scm_app.create_git_wsgi_app(
641 repo_path, repo_name, config)
637 repo_path, repo_name, config)
642
638
643 log.debug('http-app: starting app handler '
639 log.debug('http-app: starting app handler '
644 'with %s and process request', app)
640 'with %s and process request', app)
645
641
646 return app(environ, start_response)
642 return app(environ, start_response)
647
643
648 return _git_stream
644 return _git_stream
649
645
650 def handle_vcs_exception(self, exception, request):
646 def handle_vcs_exception(self, exception, request):
651 _vcs_kind = getattr(exception, '_vcs_kind', '')
647 _vcs_kind = getattr(exception, '_vcs_kind', '')
652
648
653 if _vcs_kind == 'repo_locked':
649 if _vcs_kind == 'repo_locked':
654 headers_call_context = get_headers_call_context(request.environ)
650 headers_call_context = get_headers_call_context(request.environ)
655 status_code = safe_int(headers_call_context['locked_status_code'])
651 status_code = safe_int(headers_call_context['locked_status_code'])
656
652
657 return HTTPRepoLocked(
653 return HTTPRepoLocked(
658 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
654 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
659
655
660 elif _vcs_kind == 'repo_branch_protected':
656 elif _vcs_kind == 'repo_branch_protected':
661 # Get custom repo-branch-protected status code if present.
657 # Get custom repo-branch-protected status code if present.
662 return HTTPRepoBranchProtected(
658 return HTTPRepoBranchProtected(
663 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
659 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
664
660
665 exc_info = request.exc_info
661 exc_info = request.exc_info
666 store_exception(id(exc_info), exc_info)
662 store_exception(id(exc_info), exc_info)
667
663
668 traceback_info = 'unavailable'
664 traceback_info = 'unavailable'
669 if request.exc_info:
665 if request.exc_info:
670 traceback_info = format_exc(request.exc_info)
666 traceback_info = format_exc(request.exc_info)
671
667
672 log.error(
668 log.error(
673 'error occurred handling this request for path: %s, \n%s',
669 'error occurred handling this request for path: %s, \n%s',
674 request.path, traceback_info)
670 request.path, traceback_info)
675
671
676 statsd = request.registry.statsd
672 statsd = request.registry.statsd
677 if statsd:
673 if statsd:
678 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
674 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
679 statsd.incr('vcsserver_exception_total',
675 statsd.incr('vcsserver_exception_total',
680 tags=[f"type:{exc_type}"])
676 tags=[f"type:{exc_type}"])
681 raise exception
677 raise exception
682
678
683
679
684 class ResponseFilter(object):
680 class ResponseFilter(object):
685
681
686 def __init__(self, start_response):
682 def __init__(self, start_response):
687 self._start_response = start_response
683 self._start_response = start_response
688
684
689 def __call__(self, status, response_headers, exc_info=None):
685 def __call__(self, status, response_headers, exc_info=None):
690 headers = tuple(
686 headers = tuple(
691 (h, v) for h, v in response_headers
687 (h, v) for h, v in response_headers
692 if not wsgiref.util.is_hop_by_hop(h))
688 if not wsgiref.util.is_hop_by_hop(h))
693 return self._start_response(status, headers, exc_info)
689 return self._start_response(status, headers, exc_info)
694
690
695
691
696 def sanitize_settings_and_apply_defaults(global_config, settings):
692 def sanitize_settings_and_apply_defaults(global_config, settings):
697 global_settings_maker = SettingsMaker(global_config)
693 _global_settings_maker = SettingsMaker(global_config)
698 settings_maker = SettingsMaker(settings)
694 settings_maker = SettingsMaker(settings)
699
695
700 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
696 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
701
697
702 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
698 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
703 settings_maker.enable_logging(logging_conf)
699 settings_maker.enable_logging(logging_conf)
704
700
705 # Default includes, possible to change as a user
701 # Default includes, possible to change as a user
706 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
702 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
707 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
703 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
708
704
709 settings_maker.make_setting('__file__', global_config.get('__file__'))
705 settings_maker.make_setting('__file__', global_config.get('__file__'))
710
706
711 settings_maker.make_setting('pyramid.default_locale_name', 'en')
707 settings_maker.make_setting('pyramid.default_locale_name', 'en')
712 settings_maker.make_setting('locale', 'en_US.UTF-8')
708 settings_maker.make_setting('locale', 'en_US.UTF-8')
713
709
714 settings_maker.make_setting('core.binary_dir', '')
710 settings_maker.make_setting('core.binary_dir', '')
715
711
716 temp_store = tempfile.gettempdir()
712 temp_store = tempfile.gettempdir()
717 default_cache_dir = os.path.join(temp_store, 'rc_cache')
713 default_cache_dir = os.path.join(temp_store, 'rc_cache')
718 # save default, cache dir, and use it for all backends later.
714 # save default, cache dir, and use it for all backends later.
719 default_cache_dir = settings_maker.make_setting(
715 default_cache_dir = settings_maker.make_setting(
720 'cache_dir',
716 'cache_dir',
721 default=default_cache_dir, default_when_empty=True,
717 default=default_cache_dir, default_when_empty=True,
722 parser='dir:ensured')
718 parser='dir:ensured')
723
719
724 # exception store cache
720 # exception store cache
725 settings_maker.make_setting(
721 settings_maker.make_setting(
726 'exception_tracker.store_path',
722 'exception_tracker.store_path',
727 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
723 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
728 parser='dir:ensured'
724 parser='dir:ensured'
729 )
725 )
730
726
731 # repo_object cache defaults
727 # repo_object cache defaults
732 settings_maker.make_setting(
728 settings_maker.make_setting(
733 'rc_cache.repo_object.backend',
729 'rc_cache.repo_object.backend',
734 default='dogpile.cache.rc.file_namespace',
730 default='dogpile.cache.rc.file_namespace',
735 parser='string')
731 parser='string')
736 settings_maker.make_setting(
732 settings_maker.make_setting(
737 'rc_cache.repo_object.expiration_time',
733 'rc_cache.repo_object.expiration_time',
738 default=30 * 24 * 60 * 60, # 30days
734 default=30 * 24 * 60 * 60, # 30days
739 parser='int')
735 parser='int')
740 settings_maker.make_setting(
736 settings_maker.make_setting(
741 'rc_cache.repo_object.arguments.filename',
737 'rc_cache.repo_object.arguments.filename',
742 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
738 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
743 parser='string')
739 parser='string')
744
740
745 # statsd
741 # statsd
746 settings_maker.make_setting('statsd.enabled', False, parser='bool')
742 settings_maker.make_setting('statsd.enabled', False, parser='bool')
747 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
743 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
748 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
744 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
749 settings_maker.make_setting('statsd.statsd_prefix', '')
745 settings_maker.make_setting('statsd.statsd_prefix', '')
750 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
746 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
751
747
752 settings_maker.env_expand()
748 settings_maker.env_expand()
753
749
754
750
755 def main(global_config, **settings):
751 def main(global_config, **settings):
756 start_time = time.time()
752 start_time = time.time()
757 log.info('Pyramid app config starting')
753 log.info('Pyramid app config starting')
758
754
759 if MercurialFactory:
755 if MercurialFactory:
760 hgpatches.patch_largefiles_capabilities()
756 hgpatches.patch_largefiles_capabilities()
761 hgpatches.patch_subrepo_type_mapping()
757 hgpatches.patch_subrepo_type_mapping()
762
758
763 # Fill in and sanitize the defaults & do ENV expansion
759 # Fill in and sanitize the defaults & do ENV expansion
764 sanitize_settings_and_apply_defaults(global_config, settings)
760 sanitize_settings_and_apply_defaults(global_config, settings)
765
761
766 # init and bootstrap StatsdClient
762 # init and bootstrap StatsdClient
767 StatsdClient.setup(settings)
763 StatsdClient.setup(settings)
768
764
769 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
765 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
770 total_time = time.time() - start_time
766 total_time = time.time() - start_time
771 log.info('Pyramid app created and configured in %.2fs', total_time)
767 log.info('Pyramid app created and configured in %.2fs', total_time)
772 return pyramid_app
768 return pyramid_app
773
774
1 NO CONTENT: file renamed from vcsserver/remote/git.py to vcsserver/remote/git_remote.py
NO CONTENT: file renamed from vcsserver/remote/git.py to vcsserver/remote/git_remote.py
1 NO CONTENT: file renamed from vcsserver/remote/hg.py to vcsserver/remote/hg_remote.py
NO CONTENT: file renamed from vcsserver/remote/hg.py to vcsserver/remote/hg_remote.py
@@ -1,935 +1,943 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
40 from vcsserver.base import (
41 RepoFactory,
42 raise_from_original,
43 ArchiveNode,
44 store_archive_in_cache,
45 BytesEnvelope,
46 BinaryEnvelope,
47 )
41 from vcsserver.exceptions import NoContentException
48 from vcsserver.exceptions import NoContentException
42 from vcsserver.str_utils import safe_str, safe_bytes
49 from vcsserver.str_utils import safe_str, safe_bytes
43 from vcsserver.type_utils import assert_bytes
50 from vcsserver.type_utils import assert_bytes
44 from vcsserver.vcs_base import RemoteBase
51 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.lib.svnremoterepo import svnremoterepo
52 from vcsserver.lib.svnremoterepo import svnremoterepo
53
46 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
47
55
48
56
49 svn_compatible_versions_map = {
57 svn_compatible_versions_map = {
50 'pre-1.4-compatible': '1.3',
58 'pre-1.4-compatible': '1.3',
51 'pre-1.5-compatible': '1.4',
59 'pre-1.5-compatible': '1.4',
52 'pre-1.6-compatible': '1.5',
60 'pre-1.6-compatible': '1.5',
53 'pre-1.8-compatible': '1.7',
61 'pre-1.8-compatible': '1.7',
54 'pre-1.9-compatible': '1.8',
62 'pre-1.9-compatible': '1.8',
55 }
63 }
56
64
57 current_compatible_version = '1.14'
65 current_compatible_version = '1.14'
58
66
59
67
60 def reraise_safe_exceptions(func):
68 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
69 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
63 try:
71 try:
64 return func(*args, **kwargs)
72 return func(*args, **kwargs)
65 except Exception as e:
73 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
74 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in svn remote call")
75 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException(e), e)
76 raise_from_original(exceptions.UnhandledException(e), e)
69 raise
77 raise
70 return wrapper
78 return wrapper
71
79
72
80
73 class SubversionFactory(RepoFactory):
81 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
82 repo_type = 'svn'
75
83
76 def _create_repo(self, wire, create, compatible_version):
84 def _create_repo(self, wire, create, compatible_version):
77 path = svn.core.svn_path_canonicalize(wire['path'])
85 path = svn.core.svn_path_canonicalize(wire['path'])
78 if create:
86 if create:
79 fs_config = {'compatible-version': current_compatible_version}
87 fs_config = {'compatible-version': current_compatible_version}
80 if compatible_version:
88 if compatible_version:
81
89
82 compatible_version_string = \
90 compatible_version_string = \
83 svn_compatible_versions_map.get(compatible_version) \
91 svn_compatible_versions_map.get(compatible_version) \
84 or compatible_version
92 or compatible_version
85 fs_config['compatible-version'] = compatible_version_string
93 fs_config['compatible-version'] = compatible_version_string
86
94
87 log.debug('Create SVN repo with config `%s`', fs_config)
95 log.debug('Create SVN repo with config `%s`', fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
96 repo = svn.repos.create(path, "", "", None, fs_config)
89 else:
97 else:
90 repo = svn.repos.open(path)
98 repo = svn.repos.open(path)
91
99
92 log.debug('repository created: got SVN object: %s', repo)
100 log.debug('repository created: got SVN object: %s', repo)
93 return repo
101 return repo
94
102
95 def repo(self, wire, create=False, compatible_version=None):
103 def repo(self, wire, create=False, compatible_version=None):
96 """
104 """
97 Get a repository instance for the given path.
105 Get a repository instance for the given path.
98 """
106 """
99 return self._create_repo(wire, create, compatible_version)
107 return self._create_repo(wire, create, compatible_version)
100
108
101
109
102 NODE_TYPE_MAPPING = {
110 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
111 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
112 svn.core.svn_node_dir: 'dir',
105 }
113 }
106
114
107
115
108 class SvnRemote(RemoteBase):
116 class SvnRemote(RemoteBase):
109
117
110 def __init__(self, factory, hg_factory=None):
118 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
119 self._factory = factory
112
120
113 self._bulk_methods = {
121 self._bulk_methods = {
114 # NOT supported in SVN ATM...
122 # NOT supported in SVN ATM...
115 }
123 }
116 self._bulk_file_methods = {
124 self._bulk_file_methods = {
117 "size": self.get_file_size,
125 "size": self.get_file_size,
118 "data": self.get_file_content,
126 "data": self.get_file_content,
119 "flags": self.get_node_type,
127 "flags": self.get_node_type,
120 "is_binary": self.is_binary,
128 "is_binary": self.is_binary,
121 "md5": self.md5_hash
129 "md5": self.md5_hash
122 }
130 }
123
131
124 @reraise_safe_exceptions
132 @reraise_safe_exceptions
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
133 def bulk_file_request(self, wire, commit_id, path, pre_load):
126 cache_on, context_uid, repo_id = self._cache_on(wire)
134 cache_on, context_uid, repo_id = self._cache_on(wire)
127 region = self._region(wire)
135 region = self._region(wire)
128
136
129 # since we use unified API, we need to cast from str to in for SVN
137 # since we use unified API, we need to cast from str to in for SVN
130 commit_id = int(commit_id)
138 commit_id = int(commit_id)
131
139
132 @region.conditional_cache_on_arguments(condition=cache_on)
140 @region.conditional_cache_on_arguments(condition=cache_on)
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
141 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
134 result = {}
142 result = {}
135 for attr in pre_load:
143 for attr in pre_load:
136 try:
144 try:
137 method = self._bulk_file_methods[attr]
145 method = self._bulk_file_methods[attr]
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
146 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
139 result[attr] = method(wire, _commit_id, _path)
147 result[attr] = method(wire, _commit_id, _path)
140 except KeyError as e:
148 except KeyError as e:
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
149 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
142 return result
150 return result
143
151
144 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
152 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
145
153
146 @reraise_safe_exceptions
154 @reraise_safe_exceptions
147 def discover_svn_version(self):
155 def discover_svn_version(self):
148 try:
156 try:
149 import svn.core
157 import svn.core
150 svn_ver = svn.core.SVN_VERSION
158 svn_ver = svn.core.SVN_VERSION
151 except ImportError:
159 except ImportError:
152 svn_ver = None
160 svn_ver = None
153 return safe_str(svn_ver)
161 return safe_str(svn_ver)
154
162
155 @reraise_safe_exceptions
163 @reraise_safe_exceptions
156 def is_empty(self, wire):
164 def is_empty(self, wire):
157 try:
165 try:
158 return self.lookup(wire, -1) == 0
166 return self.lookup(wire, -1) == 0
159 except Exception:
167 except Exception:
160 log.exception("failed to read object_store")
168 log.exception("failed to read object_store")
161 return False
169 return False
162
170
163 def check_url(self, url, config):
171 def check_url(self, url, config):
164
172
165 # uuid function gets only valid UUID from proper repo, else
173 # uuid function gets only valid UUID from proper repo, else
166 # throws exception
174 # throws exception
167 username, password, src_url = self.get_url_and_credentials(url)
175 username, password, src_url = self.get_url_and_credentials(url)
168 try:
176 try:
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
177 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
170 except Exception:
178 except Exception:
171 tb = traceback.format_exc()
179 tb = traceback.format_exc()
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
180 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
181 raise URLError(f'"{url}" is not a valid Subversion source url.')
174 return True
182 return True
175
183
176 def is_path_valid_repository(self, wire, path):
184 def is_path_valid_repository(self, wire, path):
177
185
178 # NOTE(marcink): short circuit the check for SVN repo
186 # NOTE(marcink): short circuit the check for SVN repo
179 # the repos.open might be expensive to check, but we have one cheap
187 # the repos.open might be expensive to check, but we have one cheap
180 # pre condition that we can use, to check for 'format' file
188 # pre condition that we can use, to check for 'format' file
181
189
182 if not os.path.isfile(os.path.join(path, 'format')):
190 if not os.path.isfile(os.path.join(path, 'format')):
183 return False
191 return False
184
192
185 try:
193 try:
186 svn.repos.open(path)
194 svn.repos.open(path)
187 except svn.core.SubversionException:
195 except svn.core.SubversionException:
188 tb = traceback.format_exc()
196 tb = traceback.format_exc()
189 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
197 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
190 return False
198 return False
191 return True
199 return True
192
200
193 @reraise_safe_exceptions
201 @reraise_safe_exceptions
194 def verify(self, wire,):
202 def verify(self, wire,):
195 repo_path = wire['path']
203 repo_path = wire['path']
196 if not self.is_path_valid_repository(wire, repo_path):
204 if not self.is_path_valid_repository(wire, repo_path):
197 raise Exception(
205 raise Exception(
198 "Path %s is not a valid Subversion repository." % repo_path)
206 "Path %s is not a valid Subversion repository." % repo_path)
199
207
200 cmd = ['svnadmin', 'info', repo_path]
208 cmd = ['svnadmin', 'info', repo_path]
201 stdout, stderr = subprocessio.run_command(cmd)
209 stdout, stderr = subprocessio.run_command(cmd)
202 return stdout
210 return stdout
203
211
204 @reraise_safe_exceptions
212 @reraise_safe_exceptions
205 def lookup(self, wire, revision):
213 def lookup(self, wire, revision):
206 if revision not in [-1, None, 'HEAD']:
214 if revision not in [-1, None, 'HEAD']:
207 raise NotImplementedError
215 raise NotImplementedError
208 repo = self._factory.repo(wire)
216 repo = self._factory.repo(wire)
209 fs_ptr = svn.repos.fs(repo)
217 fs_ptr = svn.repos.fs(repo)
210 head = svn.fs.youngest_rev(fs_ptr)
218 head = svn.fs.youngest_rev(fs_ptr)
211 return head
219 return head
212
220
213 @reraise_safe_exceptions
221 @reraise_safe_exceptions
214 def lookup_interval(self, wire, start_ts, end_ts):
222 def lookup_interval(self, wire, start_ts, end_ts):
215 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
216 fsobj = svn.repos.fs(repo)
224 fsobj = svn.repos.fs(repo)
217 start_rev = None
225 start_rev = None
218 end_rev = None
226 end_rev = None
219 if start_ts:
227 if start_ts:
220 start_ts_svn = apr_time_t(start_ts)
228 start_ts_svn = apr_time_t(start_ts)
221 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
229 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
222 else:
230 else:
223 start_rev = 1
231 start_rev = 1
224 if end_ts:
232 if end_ts:
225 end_ts_svn = apr_time_t(end_ts)
233 end_ts_svn = apr_time_t(end_ts)
226 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
234 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
227 else:
235 else:
228 end_rev = svn.fs.youngest_rev(fsobj)
236 end_rev = svn.fs.youngest_rev(fsobj)
229 return start_rev, end_rev
237 return start_rev, end_rev
230
238
231 @reraise_safe_exceptions
239 @reraise_safe_exceptions
232 def revision_properties(self, wire, revision):
240 def revision_properties(self, wire, revision):
233
241
234 cache_on, context_uid, repo_id = self._cache_on(wire)
242 cache_on, context_uid, repo_id = self._cache_on(wire)
235 region = self._region(wire)
243 region = self._region(wire)
236
244
237 @region.conditional_cache_on_arguments(condition=cache_on)
245 @region.conditional_cache_on_arguments(condition=cache_on)
238 def _revision_properties(_repo_id, _revision):
246 def _revision_properties(_repo_id, _revision):
239 repo = self._factory.repo(wire)
247 repo = self._factory.repo(wire)
240 fs_ptr = svn.repos.fs(repo)
248 fs_ptr = svn.repos.fs(repo)
241 return svn.fs.revision_proplist(fs_ptr, revision)
249 return svn.fs.revision_proplist(fs_ptr, revision)
242 return _revision_properties(repo_id, revision)
250 return _revision_properties(repo_id, revision)
243
251
244 def revision_changes(self, wire, revision):
252 def revision_changes(self, wire, revision):
245
253
246 repo = self._factory.repo(wire)
254 repo = self._factory.repo(wire)
247 fsobj = svn.repos.fs(repo)
255 fsobj = svn.repos.fs(repo)
248 rev_root = svn.fs.revision_root(fsobj, revision)
256 rev_root = svn.fs.revision_root(fsobj, revision)
249
257
250 editor = svn.repos.ChangeCollector(fsobj, rev_root)
258 editor = svn.repos.ChangeCollector(fsobj, rev_root)
251 editor_ptr, editor_baton = svn.delta.make_editor(editor)
259 editor_ptr, editor_baton = svn.delta.make_editor(editor)
252 base_dir = ""
260 base_dir = ""
253 send_deltas = False
261 send_deltas = False
254 svn.repos.replay2(
262 svn.repos.replay2(
255 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
263 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
256 editor_ptr, editor_baton, None)
264 editor_ptr, editor_baton, None)
257
265
258 added = []
266 added = []
259 changed = []
267 changed = []
260 removed = []
268 removed = []
261
269
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
270 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
263 for path, change in editor.changes.items():
271 for path, change in editor.changes.items():
264 # TODO: Decide what to do with directory nodes. Subversion can add
272 # TODO: Decide what to do with directory nodes. Subversion can add
265 # empty directories.
273 # empty directories.
266
274
267 if change.item_kind == svn.core.svn_node_dir:
275 if change.item_kind == svn.core.svn_node_dir:
268 continue
276 continue
269 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
277 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
270 added.append(path)
278 added.append(path)
271 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
279 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
272 svn.repos.CHANGE_ACTION_REPLACE]:
280 svn.repos.CHANGE_ACTION_REPLACE]:
273 changed.append(path)
281 changed.append(path)
274 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
282 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
275 removed.append(path)
283 removed.append(path)
276 else:
284 else:
277 raise NotImplementedError(
285 raise NotImplementedError(
278 "Action {} not supported on path {}".format(
286 "Action {} not supported on path {}".format(
279 change.action, path))
287 change.action, path))
280
288
281 changes = {
289 changes = {
282 'added': added,
290 'added': added,
283 'changed': changed,
291 'changed': changed,
284 'removed': removed,
292 'removed': removed,
285 }
293 }
286 return changes
294 return changes
287
295
288 @reraise_safe_exceptions
296 @reraise_safe_exceptions
289 def node_history(self, wire, path, revision, limit):
297 def node_history(self, wire, path, revision, limit):
290 cache_on, context_uid, repo_id = self._cache_on(wire)
298 cache_on, context_uid, repo_id = self._cache_on(wire)
291 region = self._region(wire)
299 region = self._region(wire)
292
300
293 @region.conditional_cache_on_arguments(condition=cache_on)
301 @region.conditional_cache_on_arguments(condition=cache_on)
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
302 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
295 cross_copies = False
303 cross_copies = False
296 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
297 fsobj = svn.repos.fs(repo)
305 fsobj = svn.repos.fs(repo)
298 rev_root = svn.fs.revision_root(fsobj, revision)
306 rev_root = svn.fs.revision_root(fsobj, revision)
299
307
300 history_revisions = []
308 history_revisions = []
301 history = svn.fs.node_history(rev_root, path)
309 history = svn.fs.node_history(rev_root, path)
302 history = svn.fs.history_prev(history, cross_copies)
310 history = svn.fs.history_prev(history, cross_copies)
303 while history:
311 while history:
304 __, node_revision = svn.fs.history_location(history)
312 __, node_revision = svn.fs.history_location(history)
305 history_revisions.append(node_revision)
313 history_revisions.append(node_revision)
306 if limit and len(history_revisions) >= limit:
314 if limit and len(history_revisions) >= limit:
307 break
315 break
308 history = svn.fs.history_prev(history, cross_copies)
316 history = svn.fs.history_prev(history, cross_copies)
309 return history_revisions
317 return history_revisions
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
318 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
311
319
312 @reraise_safe_exceptions
320 @reraise_safe_exceptions
313 def node_properties(self, wire, path, revision):
321 def node_properties(self, wire, path, revision):
314 cache_on, context_uid, repo_id = self._cache_on(wire)
322 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
323 region = self._region(wire)
316
324
317 @region.conditional_cache_on_arguments(condition=cache_on)
325 @region.conditional_cache_on_arguments(condition=cache_on)
318 def _node_properties(_repo_id, _path, _revision):
326 def _node_properties(_repo_id, _path, _revision):
319 repo = self._factory.repo(wire)
327 repo = self._factory.repo(wire)
320 fsobj = svn.repos.fs(repo)
328 fsobj = svn.repos.fs(repo)
321 rev_root = svn.fs.revision_root(fsobj, revision)
329 rev_root = svn.fs.revision_root(fsobj, revision)
322 return svn.fs.node_proplist(rev_root, path)
330 return svn.fs.node_proplist(rev_root, path)
323 return _node_properties(repo_id, path, revision)
331 return _node_properties(repo_id, path, revision)
324
332
325 def file_annotate(self, wire, path, revision):
333 def file_annotate(self, wire, path, revision):
326 abs_path = 'file://' + urllib.request.pathname2url(
334 abs_path = 'file://' + urllib.request.pathname2url(
327 vcspath.join(wire['path'], path))
335 vcspath.join(wire['path'], path))
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
336 file_uri = svn.core.svn_path_canonicalize(abs_path)
329
337
330 start_rev = svn_opt_revision_value_t(0)
338 start_rev = svn_opt_revision_value_t(0)
331 peg_rev = svn_opt_revision_value_t(revision)
339 peg_rev = svn_opt_revision_value_t(revision)
332 end_rev = peg_rev
340 end_rev = peg_rev
333
341
334 annotations = []
342 annotations = []
335
343
336 def receiver(line_no, revision, author, date, line, pool):
344 def receiver(line_no, revision, author, date, line, pool):
337 annotations.append((line_no, revision, line))
345 annotations.append((line_no, revision, line))
338
346
339 # TODO: Cannot use blame5, missing typemap function in the swig code
347 # TODO: Cannot use blame5, missing typemap function in the swig code
340 try:
348 try:
341 svn.client.blame2(
349 svn.client.blame2(
342 file_uri, peg_rev, start_rev, end_rev,
350 file_uri, peg_rev, start_rev, end_rev,
343 receiver, svn.client.create_context())
351 receiver, svn.client.create_context())
344 except svn.core.SubversionException as exc:
352 except svn.core.SubversionException as exc:
345 log.exception("Error during blame operation.")
353 log.exception("Error during blame operation.")
346 raise Exception(
354 raise Exception(
347 f"Blame not supported or file does not exist at path {path}. "
355 f"Blame not supported or file does not exist at path {path}. "
348 f"Error {exc}.")
356 f"Error {exc}.")
349
357
350 return BinaryEnvelope(annotations)
358 return BinaryEnvelope(annotations)
351
359
352 @reraise_safe_exceptions
360 @reraise_safe_exceptions
353 def get_node_type(self, wire, revision=None, path=''):
361 def get_node_type(self, wire, revision=None, path=''):
354
362
355 cache_on, context_uid, repo_id = self._cache_on(wire)
363 cache_on, context_uid, repo_id = self._cache_on(wire)
356 region = self._region(wire)
364 region = self._region(wire)
357
365
358 @region.conditional_cache_on_arguments(condition=cache_on)
366 @region.conditional_cache_on_arguments(condition=cache_on)
359 def _get_node_type(_repo_id, _revision, _path):
367 def _get_node_type(_repo_id, _revision, _path):
360 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
361 fs_ptr = svn.repos.fs(repo)
369 fs_ptr = svn.repos.fs(repo)
362 if _revision is None:
370 if _revision is None:
363 _revision = svn.fs.youngest_rev(fs_ptr)
371 _revision = svn.fs.youngest_rev(fs_ptr)
364 root = svn.fs.revision_root(fs_ptr, _revision)
372 root = svn.fs.revision_root(fs_ptr, _revision)
365 node = svn.fs.check_path(root, path)
373 node = svn.fs.check_path(root, path)
366 return NODE_TYPE_MAPPING.get(node, None)
374 return NODE_TYPE_MAPPING.get(node, None)
367 return _get_node_type(repo_id, revision, path)
375 return _get_node_type(repo_id, revision, path)
368
376
369 @reraise_safe_exceptions
377 @reraise_safe_exceptions
370 def get_nodes(self, wire, revision=None, path=''):
378 def get_nodes(self, wire, revision=None, path=''):
371
379
372 cache_on, context_uid, repo_id = self._cache_on(wire)
380 cache_on, context_uid, repo_id = self._cache_on(wire)
373 region = self._region(wire)
381 region = self._region(wire)
374
382
375 @region.conditional_cache_on_arguments(condition=cache_on)
383 @region.conditional_cache_on_arguments(condition=cache_on)
376 def _get_nodes(_repo_id, _path, _revision):
384 def _get_nodes(_repo_id, _path, _revision):
377 repo = self._factory.repo(wire)
385 repo = self._factory.repo(wire)
378 fsobj = svn.repos.fs(repo)
386 fsobj = svn.repos.fs(repo)
379 if _revision is None:
387 if _revision is None:
380 _revision = svn.fs.youngest_rev(fsobj)
388 _revision = svn.fs.youngest_rev(fsobj)
381 root = svn.fs.revision_root(fsobj, _revision)
389 root = svn.fs.revision_root(fsobj, _revision)
382 entries = svn.fs.dir_entries(root, path)
390 entries = svn.fs.dir_entries(root, path)
383 result = []
391 result = []
384 for entry_path, entry_info in entries.items():
392 for entry_path, entry_info in entries.items():
385 result.append(
393 result.append(
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
394 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
387 return result
395 return result
388 return _get_nodes(repo_id, path, revision)
396 return _get_nodes(repo_id, path, revision)
389
397
390 @reraise_safe_exceptions
398 @reraise_safe_exceptions
391 def get_file_content(self, wire, rev=None, path=''):
399 def get_file_content(self, wire, rev=None, path=''):
392 repo = self._factory.repo(wire)
400 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
401 fsobj = svn.repos.fs(repo)
394
402
395 if rev is None:
403 if rev is None:
396 rev = svn.fs.youngest_rev(fsobj)
404 rev = svn.fs.youngest_rev(fsobj)
397
405
398 root = svn.fs.revision_root(fsobj, rev)
406 root = svn.fs.revision_root(fsobj, rev)
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
407 content = svn.core.Stream(svn.fs.file_contents(root, path))
400 return BytesEnvelope(content.read())
408 return BytesEnvelope(content.read())
401
409
402 @reraise_safe_exceptions
410 @reraise_safe_exceptions
403 def get_file_size(self, wire, revision=None, path=''):
411 def get_file_size(self, wire, revision=None, path=''):
404
412
405 cache_on, context_uid, repo_id = self._cache_on(wire)
413 cache_on, context_uid, repo_id = self._cache_on(wire)
406 region = self._region(wire)
414 region = self._region(wire)
407
415
408 @region.conditional_cache_on_arguments(condition=cache_on)
416 @region.conditional_cache_on_arguments(condition=cache_on)
409 def _get_file_size(_repo_id, _revision, _path):
417 def _get_file_size(_repo_id, _revision, _path):
410 repo = self._factory.repo(wire)
418 repo = self._factory.repo(wire)
411 fsobj = svn.repos.fs(repo)
419 fsobj = svn.repos.fs(repo)
412 if _revision is None:
420 if _revision is None:
413 _revision = svn.fs.youngest_revision(fsobj)
421 _revision = svn.fs.youngest_revision(fsobj)
414 root = svn.fs.revision_root(fsobj, _revision)
422 root = svn.fs.revision_root(fsobj, _revision)
415 size = svn.fs.file_length(root, path)
423 size = svn.fs.file_length(root, path)
416 return size
424 return size
417 return _get_file_size(repo_id, revision, path)
425 return _get_file_size(repo_id, revision, path)
418
426
419 def create_repository(self, wire, compatible_version=None):
427 def create_repository(self, wire, compatible_version=None):
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
428 log.info('Creating Subversion repository in path "%s"', wire['path'])
421 self._factory.repo(wire, create=True,
429 self._factory.repo(wire, create=True,
422 compatible_version=compatible_version)
430 compatible_version=compatible_version)
423
431
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
432 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
425 obj = urllib.parse.urlparse(src_url)
433 obj = urllib.parse.urlparse(src_url)
426 username = obj.username or ''
434 username = obj.username or ''
427 password = obj.password or ''
435 password = obj.password or ''
428 return username, password, src_url
436 return username, password, src_url
429
437
430 def import_remote_repository(self, wire, src_url):
438 def import_remote_repository(self, wire, src_url):
431 repo_path = wire['path']
439 repo_path = wire['path']
432 if not self.is_path_valid_repository(wire, repo_path):
440 if not self.is_path_valid_repository(wire, repo_path):
433 raise Exception(
441 raise Exception(
434 "Path %s is not a valid Subversion repository." % repo_path)
442 "Path %s is not a valid Subversion repository." % repo_path)
435
443
436 username, password, src_url = self.get_url_and_credentials(src_url)
444 username, password, src_url = self.get_url_and_credentials(src_url)
437 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
445 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
438 '--trust-server-cert-failures=unknown-ca']
446 '--trust-server-cert-failures=unknown-ca']
439 if username and password:
447 if username and password:
440 rdump_cmd += ['--username', username, '--password', password]
448 rdump_cmd += ['--username', username, '--password', password]
441 rdump_cmd += [src_url]
449 rdump_cmd += [src_url]
442
450
443 rdump = subprocess.Popen(
451 rdump = subprocess.Popen(
444 rdump_cmd,
452 rdump_cmd,
445 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
453 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
446 load = subprocess.Popen(
454 load = subprocess.Popen(
447 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
455 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
448
456
449 # TODO: johbo: This can be a very long operation, might be better
457 # TODO: johbo: This can be a very long operation, might be better
450 # to track some kind of status and provide an api to check if the
458 # to track some kind of status and provide an api to check if the
451 # import is done.
459 # import is done.
452 rdump.wait()
460 rdump.wait()
453 load.wait()
461 load.wait()
454
462
455 log.debug('Return process ended with code: %s', rdump.returncode)
463 log.debug('Return process ended with code: %s', rdump.returncode)
456 if rdump.returncode != 0:
464 if rdump.returncode != 0:
457 errors = rdump.stderr.read()
465 errors = rdump.stderr.read()
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
466 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
459
467
460 reason = 'UNKNOWN'
468 reason = 'UNKNOWN'
461 if b'svnrdump: E230001:' in errors:
469 if b'svnrdump: E230001:' in errors:
462 reason = 'INVALID_CERTIFICATE'
470 reason = 'INVALID_CERTIFICATE'
463
471
464 if reason == 'UNKNOWN':
472 if reason == 'UNKNOWN':
465 reason = f'UNKNOWN:{safe_str(errors)}'
473 reason = f'UNKNOWN:{safe_str(errors)}'
466
474
467 raise Exception(
475 raise Exception(
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
476 'Failed to dump the remote repository from {}. Reason:{}'.format(
469 src_url, reason))
477 src_url, reason))
470 if load.returncode != 0:
478 if load.returncode != 0:
471 raise Exception(
479 raise Exception(
472 'Failed to load the dump of remote repository from %s.' %
480 'Failed to load the dump of remote repository from %s.' %
473 (src_url, ))
481 (src_url, ))
474
482
475 def commit(self, wire, message, author, timestamp, updated, removed):
483 def commit(self, wire, message, author, timestamp, updated, removed):
476
484
477 message = safe_bytes(message)
485 message = safe_bytes(message)
478 author = safe_bytes(author)
486 author = safe_bytes(author)
479
487
480 repo = self._factory.repo(wire)
488 repo = self._factory.repo(wire)
481 fsobj = svn.repos.fs(repo)
489 fsobj = svn.repos.fs(repo)
482
490
483 rev = svn.fs.youngest_rev(fsobj)
491 rev = svn.fs.youngest_rev(fsobj)
484 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
492 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
485 txn_root = svn.fs.txn_root(txn)
493 txn_root = svn.fs.txn_root(txn)
486
494
487 for node in updated:
495 for node in updated:
488 TxnNodeProcessor(node, txn_root).update()
496 TxnNodeProcessor(node, txn_root).update()
489 for node in removed:
497 for node in removed:
490 TxnNodeProcessor(node, txn_root).remove()
498 TxnNodeProcessor(node, txn_root).remove()
491
499
492 commit_id = svn.repos.fs_commit_txn(repo, txn)
500 commit_id = svn.repos.fs_commit_txn(repo, txn)
493
501
494 if timestamp:
502 if timestamp:
495 apr_time = apr_time_t(timestamp)
503 apr_time = apr_time_t(timestamp)
496 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
504 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
497 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
505 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
498
506
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
507 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
500 return commit_id
508 return commit_id
501
509
502 @reraise_safe_exceptions
510 @reraise_safe_exceptions
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
511 def diff(self, wire, rev1, rev2, path1=None, path2=None,
504 ignore_whitespace=False, context=3):
512 ignore_whitespace=False, context=3):
505
513
506 wire.update(cache=False)
514 wire.update(cache=False)
507 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
508 diff_creator = SvnDiffer(
516 diff_creator = SvnDiffer(
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
517 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
510 try:
518 try:
511 return BytesEnvelope(diff_creator.generate_diff())
519 return BytesEnvelope(diff_creator.generate_diff())
512 except svn.core.SubversionException as e:
520 except svn.core.SubversionException as e:
513 log.exception(
521 log.exception(
514 "Error during diff operation operation. "
522 "Error during diff operation operation. "
515 "Path might not exist %s, %s", path1, path2)
523 "Path might not exist %s, %s", path1, path2)
516 return BytesEnvelope(b'')
524 return BytesEnvelope(b'')
517
525
518 @reraise_safe_exceptions
526 @reraise_safe_exceptions
519 def is_large_file(self, wire, path):
527 def is_large_file(self, wire, path):
520 return False
528 return False
521
529
522 @reraise_safe_exceptions
530 @reraise_safe_exceptions
523 def is_binary(self, wire, rev, path):
531 def is_binary(self, wire, rev, path):
524 cache_on, context_uid, repo_id = self._cache_on(wire)
532 cache_on, context_uid, repo_id = self._cache_on(wire)
525 region = self._region(wire)
533 region = self._region(wire)
526
534
527 @region.conditional_cache_on_arguments(condition=cache_on)
535 @region.conditional_cache_on_arguments(condition=cache_on)
528 def _is_binary(_repo_id, _rev, _path):
536 def _is_binary(_repo_id, _rev, _path):
529 raw_bytes = self.get_file_content(wire, rev, path)
537 raw_bytes = self.get_file_content(wire, rev, path)
530 if not raw_bytes:
538 if not raw_bytes:
531 return False
539 return False
532 return b'\0' in raw_bytes
540 return b'\0' in raw_bytes
533
541
534 return _is_binary(repo_id, rev, path)
542 return _is_binary(repo_id, rev, path)
535
543
536 @reraise_safe_exceptions
544 @reraise_safe_exceptions
537 def md5_hash(self, wire, rev, path):
545 def md5_hash(self, wire, rev, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
546 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
547 region = self._region(wire)
540
548
541 @region.conditional_cache_on_arguments(condition=cache_on)
549 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _md5_hash(_repo_id, _rev, _path):
550 def _md5_hash(_repo_id, _rev, _path):
543 return ''
551 return ''
544
552
545 return _md5_hash(repo_id, rev, path)
553 return _md5_hash(repo_id, rev, path)
546
554
547 @reraise_safe_exceptions
555 @reraise_safe_exceptions
548 def run_svn_command(self, wire, cmd, **opts):
556 def run_svn_command(self, wire, cmd, **opts):
549 path = wire.get('path', None)
557 path = wire.get('path', None)
550
558
551 if path and os.path.isdir(path):
559 if path and os.path.isdir(path):
552 opts['cwd'] = path
560 opts['cwd'] = path
553
561
554 safe_call = opts.pop('_safe', False)
562 safe_call = opts.pop('_safe', False)
555
563
556 svnenv = os.environ.copy()
564 svnenv = os.environ.copy()
557 svnenv.update(opts.pop('extra_env', {}))
565 svnenv.update(opts.pop('extra_env', {}))
558
566
559 _opts = {'env': svnenv, 'shell': False}
567 _opts = {'env': svnenv, 'shell': False}
560
568
561 try:
569 try:
562 _opts.update(opts)
570 _opts.update(opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
571 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
564
572
565 return b''.join(proc), b''.join(proc.stderr)
573 return b''.join(proc), b''.join(proc.stderr)
566 except OSError as err:
574 except OSError as err:
567 if safe_call:
575 if safe_call:
568 return '', safe_str(err).strip()
576 return '', safe_str(err).strip()
569 else:
577 else:
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
578 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
571 tb_err = ("Couldn't run svn command (%s).\n"
579 tb_err = ("Couldn't run svn command (%s).\n"
572 "Original error was:%s\n"
580 "Original error was:%s\n"
573 "Call options:%s\n"
581 "Call options:%s\n"
574 % (cmd, err, _opts))
582 % (cmd, err, _opts))
575 log.exception(tb_err)
583 log.exception(tb_err)
576 raise exceptions.VcsException()(tb_err)
584 raise exceptions.VcsException()(tb_err)
577
585
578 @reraise_safe_exceptions
586 @reraise_safe_exceptions
579 def install_hooks(self, wire, force=False):
587 def install_hooks(self, wire, force=False):
580 from vcsserver.hook_utils import install_svn_hooks
588 from vcsserver.hook_utils import install_svn_hooks
581 repo_path = wire['path']
589 repo_path = wire['path']
582 binary_dir = settings.BINARY_DIR
590 binary_dir = settings.BINARY_DIR
583 executable = None
591 executable = None
584 if binary_dir:
592 if binary_dir:
585 executable = os.path.join(binary_dir, 'python3')
593 executable = os.path.join(binary_dir, 'python3')
586 return install_svn_hooks(repo_path, force_create=force)
594 return install_svn_hooks(repo_path, force_create=force)
587
595
588 @reraise_safe_exceptions
596 @reraise_safe_exceptions
589 def get_hooks_info(self, wire):
597 def get_hooks_info(self, wire):
590 from vcsserver.hook_utils import (
598 from vcsserver.hook_utils import (
591 get_svn_pre_hook_version, get_svn_post_hook_version)
599 get_svn_pre_hook_version, get_svn_post_hook_version)
592 repo_path = wire['path']
600 repo_path = wire['path']
593 return {
601 return {
594 'pre_version': get_svn_pre_hook_version(repo_path),
602 'pre_version': get_svn_pre_hook_version(repo_path),
595 'post_version': get_svn_post_hook_version(repo_path),
603 'post_version': get_svn_post_hook_version(repo_path),
596 }
604 }
597
605
598 @reraise_safe_exceptions
606 @reraise_safe_exceptions
599 def set_head_ref(self, wire, head_name):
607 def set_head_ref(self, wire, head_name):
600 pass
608 pass
601
609
602 @reraise_safe_exceptions
610 @reraise_safe_exceptions
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
611 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
604 archive_dir_name, commit_id, cache_config):
612 archive_dir_name, commit_id, cache_config):
605
613
606 def walk_tree(root, root_dir, _commit_id):
614 def walk_tree(root, root_dir, _commit_id):
607 """
615 """
608 Special recursive svn repo walker
616 Special recursive svn repo walker
609 """
617 """
610 root_dir = safe_bytes(root_dir)
618 root_dir = safe_bytes(root_dir)
611
619
612 filemode_default = 0o100644
620 filemode_default = 0o100644
613 filemode_executable = 0o100755
621 filemode_executable = 0o100755
614
622
615 file_iter = svn.fs.dir_entries(root, root_dir)
623 file_iter = svn.fs.dir_entries(root, root_dir)
616 for f_name in file_iter:
624 for f_name in file_iter:
617 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
625 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
618
626
619 if f_type == 'dir':
627 if f_type == 'dir':
620 # return only DIR, and then all entries in that dir
628 # return only DIR, and then all entries in that dir
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
629 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
622 new_root = os.path.join(root_dir, f_name)
630 new_root = os.path.join(root_dir, f_name)
623 yield from walk_tree(root, new_root, _commit_id)
631 yield from walk_tree(root, new_root, _commit_id)
624 else:
632 else:
625
633
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
634 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
627 prop_list = svn.fs.node_proplist(root, f_path)
635 prop_list = svn.fs.node_proplist(root, f_path)
628
636
629 f_mode = filemode_default
637 f_mode = filemode_default
630 if prop_list.get('svn:executable'):
638 if prop_list.get('svn:executable'):
631 f_mode = filemode_executable
639 f_mode = filemode_executable
632
640
633 f_is_link = False
641 f_is_link = False
634 if prop_list.get('svn:special'):
642 if prop_list.get('svn:special'):
635 f_is_link = True
643 f_is_link = True
636
644
637 data = {
645 data = {
638 'is_link': f_is_link,
646 'is_link': f_is_link,
639 'mode': f_mode,
647 'mode': f_mode,
640 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
648 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
641 }
649 }
642
650
643 yield f_path, data, f_type
651 yield f_path, data, f_type
644
652
645 def file_walker(_commit_id, path):
653 def file_walker(_commit_id, path):
646 repo = self._factory.repo(wire)
654 repo = self._factory.repo(wire)
647 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
655 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
648
656
649 def no_content():
657 def no_content():
650 raise NoContentException()
658 raise NoContentException()
651
659
652 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
660 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
653 file_path = f_name
661 file_path = f_name
654
662
655 if f_type == 'dir':
663 if f_type == 'dir':
656 mode = f_data['mode']
664 mode = f_data['mode']
657 yield ArchiveNode(file_path, mode, False, no_content)
665 yield ArchiveNode(file_path, mode, False, no_content)
658 else:
666 else:
659 mode = f_data['mode']
667 mode = f_data['mode']
660 is_link = f_data['is_link']
668 is_link = f_data['is_link']
661 data_stream = f_data['content_stream']
669 data_stream = f_data['content_stream']
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
670 yield ArchiveNode(file_path, mode, is_link, data_stream)
663
671
664 return store_archive_in_cache(
672 return store_archive_in_cache(
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
673 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
666
674
667
675
668 class SvnDiffer(object):
676 class SvnDiffer(object):
669 """
677 """
670 Utility to create diffs based on difflib and the Subversion api
678 Utility to create diffs based on difflib and the Subversion api
671 """
679 """
672
680
673 binary_content = False
681 binary_content = False
674
682
675 def __init__(
683 def __init__(
676 self, repo, src_rev, src_path, tgt_rev, tgt_path,
684 self, repo, src_rev, src_path, tgt_rev, tgt_path,
677 ignore_whitespace, context):
685 ignore_whitespace, context):
678 self.repo = repo
686 self.repo = repo
679 self.ignore_whitespace = ignore_whitespace
687 self.ignore_whitespace = ignore_whitespace
680 self.context = context
688 self.context = context
681
689
682 fsobj = svn.repos.fs(repo)
690 fsobj = svn.repos.fs(repo)
683
691
684 self.tgt_rev = tgt_rev
692 self.tgt_rev = tgt_rev
685 self.tgt_path = tgt_path or ''
693 self.tgt_path = tgt_path or ''
686 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
694 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
687 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
695 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
688
696
689 self.src_rev = src_rev
697 self.src_rev = src_rev
690 self.src_path = src_path or self.tgt_path
698 self.src_path = src_path or self.tgt_path
691 self.src_root = svn.fs.revision_root(fsobj, src_rev)
699 self.src_root = svn.fs.revision_root(fsobj, src_rev)
692 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
700 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
693
701
694 self._validate()
702 self._validate()
695
703
696 def _validate(self):
704 def _validate(self):
697 if (self.tgt_kind != svn.core.svn_node_none and
705 if (self.tgt_kind != svn.core.svn_node_none and
698 self.src_kind != svn.core.svn_node_none and
706 self.src_kind != svn.core.svn_node_none and
699 self.src_kind != self.tgt_kind):
707 self.src_kind != self.tgt_kind):
700 # TODO: johbo: proper error handling
708 # TODO: johbo: proper error handling
701 raise Exception(
709 raise Exception(
702 "Source and target are not compatible for diff generation. "
710 "Source and target are not compatible for diff generation. "
703 "Source type: %s, target type: %s" %
711 "Source type: %s, target type: %s" %
704 (self.src_kind, self.tgt_kind))
712 (self.src_kind, self.tgt_kind))
705
713
706 def generate_diff(self) -> bytes:
714 def generate_diff(self) -> bytes:
707 buf = io.BytesIO()
715 buf = io.BytesIO()
708 if self.tgt_kind == svn.core.svn_node_dir:
716 if self.tgt_kind == svn.core.svn_node_dir:
709 self._generate_dir_diff(buf)
717 self._generate_dir_diff(buf)
710 else:
718 else:
711 self._generate_file_diff(buf)
719 self._generate_file_diff(buf)
712 return buf.getvalue()
720 return buf.getvalue()
713
721
714 def _generate_dir_diff(self, buf: io.BytesIO):
722 def _generate_dir_diff(self, buf: io.BytesIO):
715 editor = DiffChangeEditor()
723 editor = DiffChangeEditor()
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
724 editor_ptr, editor_baton = svn.delta.make_editor(editor)
717 svn.repos.dir_delta2(
725 svn.repos.dir_delta2(
718 self.src_root,
726 self.src_root,
719 self.src_path,
727 self.src_path,
720 '', # src_entry
728 '', # src_entry
721 self.tgt_root,
729 self.tgt_root,
722 self.tgt_path,
730 self.tgt_path,
723 editor_ptr, editor_baton,
731 editor_ptr, editor_baton,
724 authorization_callback_allow_all,
732 authorization_callback_allow_all,
725 False, # text_deltas
733 False, # text_deltas
726 svn.core.svn_depth_infinity, # depth
734 svn.core.svn_depth_infinity, # depth
727 False, # entry_props
735 False, # entry_props
728 False, # ignore_ancestry
736 False, # ignore_ancestry
729 )
737 )
730
738
731 for path, __, change in sorted(editor.changes):
739 for path, __, change in sorted(editor.changes):
732 self._generate_node_diff(
740 self._generate_node_diff(
733 buf, change, path, self.tgt_path, path, self.src_path)
741 buf, change, path, self.tgt_path, path, self.src_path)
734
742
735 def _generate_file_diff(self, buf: io.BytesIO):
743 def _generate_file_diff(self, buf: io.BytesIO):
736 change = None
744 change = None
737 if self.src_kind == svn.core.svn_node_none:
745 if self.src_kind == svn.core.svn_node_none:
738 change = "add"
746 change = "add"
739 elif self.tgt_kind == svn.core.svn_node_none:
747 elif self.tgt_kind == svn.core.svn_node_none:
740 change = "delete"
748 change = "delete"
741 tgt_base, tgt_path = vcspath.split(self.tgt_path)
749 tgt_base, tgt_path = vcspath.split(self.tgt_path)
742 src_base, src_path = vcspath.split(self.src_path)
750 src_base, src_path = vcspath.split(self.src_path)
743 self._generate_node_diff(
751 self._generate_node_diff(
744 buf, change, tgt_path, tgt_base, src_path, src_base)
752 buf, change, tgt_path, tgt_base, src_path, src_base)
745
753
746 def _generate_node_diff(
754 def _generate_node_diff(
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
755 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
748
756
749 tgt_path_bytes = safe_bytes(tgt_path)
757 tgt_path_bytes = safe_bytes(tgt_path)
750 tgt_path = safe_str(tgt_path)
758 tgt_path = safe_str(tgt_path)
751
759
752 src_path_bytes = safe_bytes(src_path)
760 src_path_bytes = safe_bytes(src_path)
753 src_path = safe_str(src_path)
761 src_path = safe_str(src_path)
754
762
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
763 if self.src_rev == self.tgt_rev and tgt_base == src_base:
756 # makes consistent behaviour with git/hg to return empty diff if
764 # makes consistent behaviour with git/hg to return empty diff if
757 # we compare same revisions
765 # we compare same revisions
758 return
766 return
759
767
760 tgt_full_path = vcspath.join(tgt_base, tgt_path)
768 tgt_full_path = vcspath.join(tgt_base, tgt_path)
761 src_full_path = vcspath.join(src_base, src_path)
769 src_full_path = vcspath.join(src_base, src_path)
762
770
763 self.binary_content = False
771 self.binary_content = False
764 mime_type = self._get_mime_type(tgt_full_path)
772 mime_type = self._get_mime_type(tgt_full_path)
765
773
766 if mime_type and not mime_type.startswith(b'text'):
774 if mime_type and not mime_type.startswith(b'text'):
767 self.binary_content = True
775 self.binary_content = True
768 buf.write(b"=" * 67 + b'\n')
776 buf.write(b"=" * 67 + b'\n')
769 buf.write(b"Cannot display: file marked as a binary type.\n")
777 buf.write(b"Cannot display: file marked as a binary type.\n")
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
778 buf.write(b"svn:mime-type = %s\n" % mime_type)
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
779 buf.write(b"Index: %b\n" % tgt_path_bytes)
772 buf.write(b"=" * 67 + b'\n')
780 buf.write(b"=" * 67 + b'\n')
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
781 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
774
782
775 if change == 'add':
783 if change == 'add':
776 # TODO: johbo: SVN is missing a zero here compared to git
784 # TODO: johbo: SVN is missing a zero here compared to git
777 buf.write(b"new file mode 10644\n")
785 buf.write(b"new file mode 10644\n")
778
786
779 # TODO(marcink): intro to binary detection of svn patches
787 # TODO(marcink): intro to binary detection of svn patches
780 # if self.binary_content:
788 # if self.binary_content:
781 # buf.write(b'GIT binary patch\n')
789 # buf.write(b'GIT binary patch\n')
782
790
783 buf.write(b"--- /dev/null\t(revision 0)\n")
791 buf.write(b"--- /dev/null\t(revision 0)\n")
784 src_lines = []
792 src_lines = []
785 else:
793 else:
786 if change == 'delete':
794 if change == 'delete':
787 buf.write(b"deleted file mode 10644\n")
795 buf.write(b"deleted file mode 10644\n")
788
796
789 # TODO(marcink): intro to binary detection of svn patches
797 # TODO(marcink): intro to binary detection of svn patches
790 # if self.binary_content:
798 # if self.binary_content:
791 # buf.write('GIT binary patch\n')
799 # buf.write('GIT binary patch\n')
792
800
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
801 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
802 src_lines = self._svn_readlines(self.src_root, src_full_path)
795
803
796 if change == 'delete':
804 if change == 'delete':
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
805 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
798 tgt_lines = []
806 tgt_lines = []
799 else:
807 else:
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
808 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
809 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
802
810
803 # we made our diff header, time to generate the diff content into our buffer
811 # we made our diff header, time to generate the diff content into our buffer
804
812
805 if not self.binary_content:
813 if not self.binary_content:
806 udiff = svn_diff.unified_diff(
814 udiff = svn_diff.unified_diff(
807 src_lines, tgt_lines, context=self.context,
815 src_lines, tgt_lines, context=self.context,
808 ignore_blank_lines=self.ignore_whitespace,
816 ignore_blank_lines=self.ignore_whitespace,
809 ignore_case=False,
817 ignore_case=False,
810 ignore_space_changes=self.ignore_whitespace)
818 ignore_space_changes=self.ignore_whitespace)
811
819
812 buf.writelines(udiff)
820 buf.writelines(udiff)
813
821
814 def _get_mime_type(self, path) -> bytes:
822 def _get_mime_type(self, path) -> bytes:
815 try:
823 try:
816 mime_type = svn.fs.node_prop(
824 mime_type = svn.fs.node_prop(
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
825 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
818 except svn.core.SubversionException:
826 except svn.core.SubversionException:
819 mime_type = svn.fs.node_prop(
827 mime_type = svn.fs.node_prop(
820 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
828 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
821 return mime_type
829 return mime_type
822
830
823 def _svn_readlines(self, fs_root, node_path):
831 def _svn_readlines(self, fs_root, node_path):
824 if self.binary_content:
832 if self.binary_content:
825 return []
833 return []
826 node_kind = svn.fs.check_path(fs_root, node_path)
834 node_kind = svn.fs.check_path(fs_root, node_path)
827 if node_kind not in (
835 if node_kind not in (
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
836 svn.core.svn_node_file, svn.core.svn_node_symlink):
829 return []
837 return []
830 content = svn.core.Stream(
838 content = svn.core.Stream(
831 svn.fs.file_contents(fs_root, node_path)).read()
839 svn.fs.file_contents(fs_root, node_path)).read()
832
840
833 return content.splitlines(True)
841 return content.splitlines(True)
834
842
835
843
836 class DiffChangeEditor(svn.delta.Editor):
844 class DiffChangeEditor(svn.delta.Editor):
837 """
845 """
838 Records changes between two given revisions
846 Records changes between two given revisions
839 """
847 """
840
848
841 def __init__(self):
849 def __init__(self):
842 self.changes = []
850 self.changes = []
843
851
844 def delete_entry(self, path, revision, parent_baton, pool=None):
852 def delete_entry(self, path, revision, parent_baton, pool=None):
845 self.changes.append((path, None, 'delete'))
853 self.changes.append((path, None, 'delete'))
846
854
847 def add_file(
855 def add_file(
848 self, path, parent_baton, copyfrom_path, copyfrom_revision,
856 self, path, parent_baton, copyfrom_path, copyfrom_revision,
849 file_pool=None):
857 file_pool=None):
850 self.changes.append((path, 'file', 'add'))
858 self.changes.append((path, 'file', 'add'))
851
859
852 def open_file(self, path, parent_baton, base_revision, file_pool=None):
860 def open_file(self, path, parent_baton, base_revision, file_pool=None):
853 self.changes.append((path, 'file', 'change'))
861 self.changes.append((path, 'file', 'change'))
854
862
855
863
856 def authorization_callback_allow_all(root, path, pool):
864 def authorization_callback_allow_all(root, path, pool):
857 return True
865 return True
858
866
859
867
860 class TxnNodeProcessor(object):
868 class TxnNodeProcessor(object):
861 """
869 """
862 Utility to process the change of one node within a transaction root.
870 Utility to process the change of one node within a transaction root.
863
871
864 It encapsulates the knowledge of how to add, update or remove
872 It encapsulates the knowledge of how to add, update or remove
865 a node for a given transaction root. The purpose is to support the method
873 a node for a given transaction root. The purpose is to support the method
866 `SvnRemote.commit`.
874 `SvnRemote.commit`.
867 """
875 """
868
876
869 def __init__(self, node, txn_root):
877 def __init__(self, node, txn_root):
870 assert_bytes(node['path'])
878 assert_bytes(node['path'])
871
879
872 self.node = node
880 self.node = node
873 self.txn_root = txn_root
881 self.txn_root = txn_root
874
882
875 def update(self):
883 def update(self):
876 self._ensure_parent_dirs()
884 self._ensure_parent_dirs()
877 self._add_file_if_node_does_not_exist()
885 self._add_file_if_node_does_not_exist()
878 self._update_file_content()
886 self._update_file_content()
879 self._update_file_properties()
887 self._update_file_properties()
880
888
881 def remove(self):
889 def remove(self):
882 svn.fs.delete(self.txn_root, self.node['path'])
890 svn.fs.delete(self.txn_root, self.node['path'])
883 # TODO: Clean up directory if empty
891 # TODO: Clean up directory if empty
884
892
885 def _ensure_parent_dirs(self):
893 def _ensure_parent_dirs(self):
886 curdir = vcspath.dirname(self.node['path'])
894 curdir = vcspath.dirname(self.node['path'])
887 dirs_to_create = []
895 dirs_to_create = []
888 while not self._svn_path_exists(curdir):
896 while not self._svn_path_exists(curdir):
889 dirs_to_create.append(curdir)
897 dirs_to_create.append(curdir)
890 curdir = vcspath.dirname(curdir)
898 curdir = vcspath.dirname(curdir)
891
899
892 for curdir in reversed(dirs_to_create):
900 for curdir in reversed(dirs_to_create):
893 log.debug('Creating missing directory "%s"', curdir)
901 log.debug('Creating missing directory "%s"', curdir)
894 svn.fs.make_dir(self.txn_root, curdir)
902 svn.fs.make_dir(self.txn_root, curdir)
895
903
896 def _svn_path_exists(self, path):
904 def _svn_path_exists(self, path):
897 path_status = svn.fs.check_path(self.txn_root, path)
905 path_status = svn.fs.check_path(self.txn_root, path)
898 return path_status != svn.core.svn_node_none
906 return path_status != svn.core.svn_node_none
899
907
900 def _add_file_if_node_does_not_exist(self):
908 def _add_file_if_node_does_not_exist(self):
901 kind = svn.fs.check_path(self.txn_root, self.node['path'])
909 kind = svn.fs.check_path(self.txn_root, self.node['path'])
902 if kind == svn.core.svn_node_none:
910 if kind == svn.core.svn_node_none:
903 svn.fs.make_file(self.txn_root, self.node['path'])
911 svn.fs.make_file(self.txn_root, self.node['path'])
904
912
905 def _update_file_content(self):
913 def _update_file_content(self):
906 assert_bytes(self.node['content'])
914 assert_bytes(self.node['content'])
907
915
908 handler, baton = svn.fs.apply_textdelta(
916 handler, baton = svn.fs.apply_textdelta(
909 self.txn_root, self.node['path'], None, None)
917 self.txn_root, self.node['path'], None, None)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
918 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
911
919
912 def _update_file_properties(self):
920 def _update_file_properties(self):
913 properties = self.node.get('properties', {})
921 properties = self.node.get('properties', {})
914 for key, value in properties.items():
922 for key, value in properties.items():
915 svn.fs.change_node_prop(
923 svn.fs.change_node_prop(
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
924 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
917
925
918
926
919 def apr_time_t(timestamp):
927 def apr_time_t(timestamp):
920 """
928 """
921 Convert a Python timestamp into APR timestamp type apr_time_t
929 Convert a Python timestamp into APR timestamp type apr_time_t
922 """
930 """
923 return int(timestamp * 1E6)
931 return int(timestamp * 1E6)
924
932
925
933
926 def svn_opt_revision_value_t(num):
934 def svn_opt_revision_value_t(num):
927 """
935 """
928 Put `num` into a `svn_opt_revision_value_t` structure.
936 Put `num` into a `svn_opt_revision_value_t` structure.
929 """
937 """
930 value = svn.core.svn_opt_revision_value_t()
938 value = svn.core.svn_opt_revision_value_t()
931 value.number = num
939 value.number = num
932 revision = svn.core.svn_opt_revision_t()
940 revision = svn.core.svn_opt_revision_t()
933 revision.kind = svn.core.svn_opt_revision_number
941 revision.kind = svn.core.svn_opt_revision_number
934 revision.value = value
942 revision.value = value
935 return revision
943 return revision
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver.remote import git
24 from vcsserver.remote import git_remote
25
25
26 SAMPLE_REFS = {
26 SAMPLE_REFS = {
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 }
32 }
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def git_remote():
36 def git_remote_fix():
37 """
37 """
38 A GitRemote instance with a mock factory.
38 A GitRemote instance with a mock factory.
39 """
39 """
40 factory = Mock()
40 factory = Mock()
41 remote = git.GitRemote(factory)
41 remote = git_remote.GitRemote(factory)
42 return remote
42 return remote
43
43
44
44
45 def test_discover_git_version(git_remote):
45 def test_discover_git_version(git_remote_fix):
46 version = git_remote.discover_git_version()
46 version = git_remote_fix.discover_git_version()
47 assert version
47 assert version
48
48
49
49
50 class TestGitFetch(object):
50 class TestGitFetch(object):
51 def setup_method(self):
51 def setup_method(self):
52 self.mock_repo = Mock()
52 self.mock_repo = Mock()
53 factory = Mock()
53 factory = Mock()
54 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git.GitRemote(factory)
55 self.remote_git = git_remote.GitRemote(factory)
56
56
57 def test_fetches_all_when_no_commit_ids_specified(self):
57 def test_fetches_all_when_no_commit_ids_specified(self):
58 def side_effect(determine_wants, *args, **kwargs):
58 def side_effect(determine_wants, *args, **kwargs):
59 determine_wants(SAMPLE_REFS)
59 determine_wants(SAMPLE_REFS)
60
60
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 mock_fetch.side_effect = side_effect
62 mock_fetch.side_effect = side_effect
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 determine_wants = self.mock_repo.object_store.determine_wants_all
64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66
66
67 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
68 selected_refs = {
68 selected_refs = {
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 }
71 }
72
72
73 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
74 result = determine_wants(SAMPLE_REFS)
74 result = determine_wants(SAMPLE_REFS)
75 assert sorted(result) == sorted(selected_refs.values())
75 assert sorted(result) == sorted(selected_refs.values())
76 return result
76 return result
77
77
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
80 self.remote_git.pull(
80 self.remote_git.pull(
81 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
82 refs=list(selected_refs.keys()))
82 refs=list(selected_refs.keys()))
83 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
85
85
86 def test_get_remote_refs(self):
86 def test_get_remote_refs(self):
87 factory = Mock()
87 factory = Mock()
88 remote_git = git.GitRemote(factory)
88 remote_git = git_remote.GitRemote(factory)
89 url = 'http://example.com/test/test.git'
89 url = 'https://example.com/test/test.git'
90 sample_refs = {
90 sample_refs = {
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 }
93 }
94
94
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git_remote.Repo', create=False) as mock_repo:
96 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
99 assert remote_refs == sample_refs
99 assert remote_refs == sample_refs
100
100
101
101
102 class TestReraiseSafeExceptions(object):
102 class TestReraiseSafeExceptions(object):
103
103
104 def test_method_decorated_with_reraise_safe_exceptions(self):
104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 factory = Mock()
105 factory = Mock()
106 git_remote = git.GitRemote(factory)
106 git_remote_instance = git_remote.GitRemote(factory)
107
107
108 def fake_function():
108 def fake_function():
109 return None
109 return None
110
110
111 decorator = git.reraise_safe_exceptions(fake_function)
111 decorator = git_remote.reraise_safe_exceptions(fake_function)
112
112
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote_instance, predicate=inspect.ismethod)
114 for method_name, method in methods:
114 for method_name, method in methods:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 assert method.__func__.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
117
117
118 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.HangupException(), 'error'),
123 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 ])
125 ])
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git.reraise_safe_exceptions
127 @git_remote.reraise_safe_exceptions
128 def fake_method():
128 def fake_method():
129 raise side_effect
129 raise side_effect
130
130
131 with pytest.raises(Exception) as exc_info:
131 with pytest.raises(Exception) as exc_info:
132 fake_method()
132 fake_method()
133 assert type(exc_info.value) == Exception
133 assert type(exc_info.value) == Exception
134 assert exc_info.value._vcs_kind == expected_type
134 assert exc_info.value._vcs_kind == expected_type
135
135
136
136
137 class TestDulwichRepoWrapper(object):
137 class TestDulwichRepoWrapper(object):
138 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 with patch.object(git_remote.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git_remote.Repo('/tmp/abcde')
143 assert repo is not None
143 assert repo is not None
144 repo.__del__()
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
146
147 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
148
148
149
149
150 class TestGitFactory(object):
150 class TestGitFactory(object):
151 def test_create_repo_returns_dulwich_wrapper(self):
151 def test_create_repo_returns_dulwich_wrapper(self):
152
152
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 mock.side_effect = {'repo_objects': ''}
154 mock.side_effect = {'repo_objects': ''}
155 factory = git.GitFactory()
155 factory = git_remote.GitFactory()
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git_remote.Repo)
@@ -1,108 +1,112 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, patch
24 from mock import Mock, patch
25
25
26 from vcsserver import exceptions, hgcompat
26 from vcsserver import exceptions, hgcompat
27 from vcsserver.remote import hg
27 from vcsserver.remote import hg_remote
28
28
29
29
30 class TestDiff(object):
30 class TestDiff(object):
31 def test_raising_safe_exception_when_lookup_failed(self):
31 def test_raising_safe_exception_when_lookup_failed(self):
32
32
33 factory = Mock()
33 factory = Mock()
34 hg_remote = hg.HgRemote(factory)
34 hg_remote_instance = hg_remote.HgRemote(factory)
35 with patch('mercurial.patch.diff') as diff_mock:
35 with patch('mercurial.patch.diff') as diff_mock:
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
37
37
38 with pytest.raises(Exception) as exc_info:
38 with pytest.raises(Exception) as exc_info:
39 hg_remote.diff(
39 hg_remote_instance.diff(
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
41 file_filter=None, opt_git=True, opt_ignorews=True,
41 file_filter=None, opt_git=True, opt_ignorews=True,
42 context=3)
42 context=3)
43 assert type(exc_info.value) == Exception
43 assert type(exc_info.value) == Exception
44 assert exc_info.value._vcs_kind == 'lookup'
44 assert exc_info.value._vcs_kind == 'lookup'
45
45
46
46
47 class TestReraiseSafeExceptions(object):
47 class TestReraiseSafeExceptions(object):
48 original_traceback = None
49
48 def test_method_decorated_with_reraise_safe_exceptions(self):
50 def test_method_decorated_with_reraise_safe_exceptions(self):
49 factory = Mock()
51 factory = Mock()
50 hg_remote = hg.HgRemote(factory)
52 hg_remote_instance = hg_remote.HgRemote(factory)
51 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
53 methods = inspect.getmembers(hg_remote_instance, predicate=inspect.ismethod)
52 decorator = hg.reraise_safe_exceptions(None)
54 decorator = hg_remote.reraise_safe_exceptions(None)
53 for method_name, method in methods:
55 for method_name, method in methods:
54 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
56 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
55 assert method.__func__.__code__ == decorator.__code__
57 assert method.__func__.__code__ == decorator.__code__
56
58
57 @pytest.mark.parametrize('side_effect, expected_type', [
59 @pytest.mark.parametrize('side_effect, expected_type', [
58 (hgcompat.Abort('failed-abort'), 'abort'),
60 (hgcompat.Abort(b'failed-abort'), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
61 (hgcompat.InterventionRequired(b'intervention-required'), 'abort'),
60 (hgcompat.RepoLookupError(), 'lookup'),
62 (hgcompat.RepoLookupError(), 'lookup'),
61 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
63 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
62 (hgcompat.RepoError(), 'error'),
64 (hgcompat.RepoError(), 'error'),
63 (hgcompat.RequirementError(), 'requirement'),
65 (hgcompat.RequirementError(), 'requirement'),
64 ])
66 ])
65 def test_safe_exceptions_reraised(self, side_effect, expected_type):
67 def test_safe_exceptions_reraised(self, side_effect, expected_type):
66 @hg.reraise_safe_exceptions
68 @hg_remote.reraise_safe_exceptions
67 def fake_method():
69 def fake_method():
68 raise side_effect
70 raise side_effect
69
71
70 with pytest.raises(Exception) as exc_info:
72 with pytest.raises(Exception) as exc_info:
71 fake_method()
73 fake_method()
72 assert type(exc_info.value) == Exception
74 assert type(exc_info.value) == Exception
73 assert exc_info.value._vcs_kind == expected_type
75 assert exc_info.value._vcs_kind == expected_type
74
76
75 def test_keeps_original_traceback(self):
77 def test_keeps_original_traceback(self):
76 @hg.reraise_safe_exceptions
78
79 @hg_remote.reraise_safe_exceptions
77 def fake_method():
80 def fake_method():
78 try:
81 try:
79 raise hgcompat.Abort('test-abort')
82 raise hgcompat.Abort(b'test-abort')
80 except:
83 except:
81 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
84 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
82 raise
85 raise
83
86
87 new_traceback = None
84 try:
88 try:
85 fake_method()
89 fake_method()
86 except Exception:
90 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
91 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
92
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
93 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
94 assert new_traceback_tail == self.original_traceback
91
95
92 def test_maps_unknow_exceptions_to_unhandled(self):
96 def test_maps_unknown_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
97 @hg_remote.reraise_safe_exceptions
94 def stub_method():
98 def stub_method():
95 raise ValueError('stub')
99 raise ValueError('stub')
96
100
97 with pytest.raises(Exception) as exc_info:
101 with pytest.raises(Exception) as exc_info:
98 stub_method()
102 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
103 assert exc_info.value._vcs_kind == 'unhandled'
100
104
101 def test_does_not_map_known_exceptions(self):
105 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
106 @hg_remote.reraise_safe_exceptions
103 def stub_method():
107 def stub_method():
104 raise exceptions.LookupException()('stub')
108 raise exceptions.LookupException()('stub')
105
109
106 with pytest.raises(Exception) as exc_info:
110 with pytest.raises(Exception) as exc_info:
107 stub_method()
111 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
112 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,103 +1,103 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23 from vcsserver.str_utils import ascii_bytes
23 from vcsserver.str_utils import ascii_bytes
24
24
25
25
26 class MockPopen(object):
26 class MockPopen(object):
27 def __init__(self, stderr):
27 def __init__(self, stderr):
28 self.stdout = io.BytesIO(b'')
28 self.stdout = io.BytesIO(b'')
29 self.stderr = io.BytesIO(stderr)
29 self.stderr = io.BytesIO(stderr)
30 self.returncode = 1
30 self.returncode = 1
31
31
32 def wait(self):
32 def wait(self):
33 pass
33 pass
34
34
35
35
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
36 INVALID_CERTIFICATE_STDERR = '\n'.join([
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
37 'svnrdump: E230001: Unable to connect to a repository at URL url',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
39 ])
39 ])
40
40
41
41
42 @pytest.mark.parametrize('stderr,expected_reason', [
42 @pytest.mark.parametrize('stderr,expected_reason', [
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
46 @pytest.mark.xfail(sys.platform == "cygwin",
46 @pytest.mark.xfail(sys.platform == "cygwin",
47 reason="SVN not packaged for Cygwin")
47 reason="SVN not packaged for Cygwin")
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
49 from vcsserver.remote import svn
49 from vcsserver.remote import svn_remote
50 factory = mock.Mock()
50 factory = mock.Mock()
51 factory.repo = mock.Mock(return_value=mock.Mock())
51 factory.repo = mock.Mock(return_value=mock.Mock())
52
52
53 remote = svn.SvnRemote(factory)
53 remote = svn_remote.SvnRemote(factory)
54 remote.is_path_valid_repository = lambda wire, path: True
54 remote.is_path_valid_repository = lambda wire, path: True
55
55
56 with mock.patch('subprocess.Popen',
56 with mock.patch('subprocess.Popen',
57 return_value=MockPopen(ascii_bytes(stderr))):
57 return_value=MockPopen(ascii_bytes(stderr))):
58 with pytest.raises(Exception) as excinfo:
58 with pytest.raises(Exception) as excinfo:
59 remote.import_remote_repository({'path': 'path'}, 'url')
59 remote.import_remote_repository({'path': 'path'}, 'url')
60
60
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
62
62
63 assert excinfo.value.args[0] == expected_error_args
63 assert excinfo.value.args[0] == expected_error_args
64
64
65
65
66 def test_svn_libraries_can_be_imported():
66 def test_svn_libraries_can_be_imported():
67 import svn.client
67 import svn.client # noqa
68 assert svn.client is not None
68 assert svn.client is not None
69
69
70
70
71 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
72 ('http://server.com', ('', '', 'http://server.com')),
72 ('http://server.com', ('', '', 'http://server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 ('<script>', ('', '', '<script>')),
75 ('<script>', ('', '', '<script>')),
76 ('http://', ('', '', 'http://')),
76 ('http://', ('', '', 'http://')),
77 ])
77 ])
78 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver.remote import svn
79 from vcsserver.remote import svn_remote
80
80
81 factory = mock.Mock()
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
83
84 remote = svn.SvnRemote(factory)
84 remote = svn_remote.SvnRemote(factory)
85 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
86
86
87 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
88
88
89
89
90 @pytest.mark.parametrize('call_url', [
90 @pytest.mark.parametrize('call_url', [
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 ])
94 ])
95 def test_check_url(call_url):
95 def test_check_url(call_url):
96 from vcsserver.remote import svn
96 from vcsserver.remote import svn_remote
97 factory = mock.Mock()
97 factory = mock.Mock()
98 factory.repo = mock.Mock(return_value=mock.Mock())
98 factory.repo = mock.Mock(return_value=mock.Mock())
99
99
100 remote = svn.SvnRemote(factory)
100 remote = svn_remote.SvnRemote(factory)
101 remote.is_path_valid_repository = lambda wire, path: True
101 remote.is_path_valid_repository = lambda wire, path: True
102 assert remote.check_url(call_url, {'dummy': 'config'})
102 assert remote.check_url(call_url, {'dummy': 'config'})
103
103
@@ -1,123 +1,123 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import base64
17 import base64
18 import logging
18 import time
19 import time
19 import logging
20
20
21 import msgpack
21 import msgpack
22
22
23 import vcsserver
23 import vcsserver
24 from vcsserver.str_utils import safe_str, ascii_str
24 from vcsserver.str_utils import safe_str
25
25
26 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
27
27
28
28
29 def get_access_path(environ):
29 def get_access_path(environ):
30 path = environ.get('PATH_INFO')
30 path = environ.get('PATH_INFO')
31 return path
31 return path
32
32
33
33
34 def get_user_agent(environ):
34 def get_user_agent(environ):
35 return environ.get('HTTP_USER_AGENT')
35 return environ.get('HTTP_USER_AGENT')
36
36
37
37
38 def get_call_context(request) -> dict:
38 def get_call_context(request) -> dict:
39 cc = {}
39 cc = {}
40 registry = request.registry
40 registry = request.registry
41 if hasattr(registry, 'vcs_call_context'):
41 if hasattr(registry, 'vcs_call_context'):
42 cc.update({
42 cc.update({
43 'X-RC-Method': registry.vcs_call_context.get('method'),
43 'X-RC-Method': registry.vcs_call_context.get('method'),
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
45 })
45 })
46
46
47 return cc
47 return cc
48
48
49
49
50 def get_headers_call_context(environ, strict=True):
50 def get_headers_call_context(environ, strict=True):
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
53 return msgpack.unpackb(packed_cc)
53 return msgpack.unpackb(packed_cc)
54 elif strict:
54 elif strict:
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
56
56
57
57
58 class RequestWrapperTween(object):
58 class RequestWrapperTween(object):
59 def __init__(self, handler, registry):
59 def __init__(self, handler, registry):
60 self.handler = handler
60 self.handler = handler
61 self.registry = registry
61 self.registry = registry
62
62
63 # one-time configuration code goes here
63 # one-time configuration code goes here
64
64
65 def __call__(self, request):
65 def __call__(self, request):
66 start = time.time()
66 start = time.time()
67 log.debug('Starting request time measurement')
67 log.debug('Starting request time measurement')
68 response = None
68 response = None
69
69
70 try:
70 try:
71 response = self.handler(request)
71 response = self.handler(request)
72 finally:
72 finally:
73 ua = get_user_agent(request.environ)
73 ua = get_user_agent(request.environ)
74 call_context = get_call_context(request)
74 call_context = get_call_context(request)
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
76 repo_name = call_context.get('X-RC-Repo-Name', '')
77
77
78 count = request.request_count()
78 count = request.request_count()
79 _ver_ = vcsserver.__version__
79 _ver_ = vcsserver.__version__
80 _path = safe_str(get_access_path(request.environ))
80 _path = safe_str(get_access_path(request.environ))
81
81
82 ip = '127.0.0.1'
82 ip = '127.0.0.1'
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
85
85
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
87
87
88 total = time.time() - start
88 total = time.time() - start
89
89
90 log.info(
90 log.info(
91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
92 count, ip, request.environ.get('REQUEST_METHOD'),
92 count, ip, request.environ.get('REQUEST_METHOD'),
93 _view_path, total, ua, _ver_,
93 _view_path, total, ua, _ver_,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
94 extra={"time": total, "ver": _ver_, "code": resp_code,
95 "path": _path, "view_name": match_route, "user_agent": ua,
95 "path": _path, "view_name": match_route, "user_agent": ua,
96 "vcs_method": vcs_method, "repo_name": repo_name}
96 "vcs_method": vcs_method, "repo_name": repo_name}
97 )
97 )
98
98
99 statsd = request.registry.statsd
99 statsd = request.registry.statsd
100 if statsd:
100 if statsd:
101 match_route = request.matched_route.name if request.matched_route else _path
101 match_route = request.matched_route.name if request.matched_route else _path
102 elapsed_time_ms = round(1000.0 * total) # use ms only
102 elapsed_time_ms = round(1000.0 * total) # use ms only
103 statsd.timing(
103 statsd.timing(
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
104 "vcsserver_req_timing.histogram", elapsed_time_ms,
105 tags=[
105 tags=[
106 f"view_name:{match_route}",
106 f"view_name:{match_route}",
107 f"code:{resp_code}"
107 f"code:{resp_code}"
108 ],
108 ],
109 use_decimals=False
109 use_decimals=False
110 )
110 )
111 statsd.incr(
111 statsd.incr(
112 "vcsserver_req_total", tags=[
112 "vcsserver_req_total", tags=[
113 f"view_name:{match_route}",
113 f"view_name:{match_route}",
114 f"code:{resp_code}"
114 f"code:{resp_code}"
115 ])
115 ])
116
116
117 return response
117 return response
118
118
119
119
120 def includeme(config):
120 def includeme(config):
121 config.add_tween(
121 config.add_tween(
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
123 )
123 )
General Comments 0
You need to be logged in to leave comments. Login now