##// END OF EJS Templates
core: renamed remote packages to prevent conflicts with builtin libraries like svn core library called same as svn remote
super-admin -
r1145:a44d603f default
parent child Browse files
Show More
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 from .app import create_app
19 from .app import create_app # noqa
@@ -1,292 +1,291 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import re
19 19 import logging
20 from wsgiref.util import FileWrapper
21 20
22 21 from pyramid.config import Configurator
23 22 from pyramid.response import Response, FileIter
24 23 from pyramid.httpexceptions import (
25 24 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 25 HTTPUnprocessableEntity)
27 26
28 27 from vcsserver.lib.rc_json import json
29 28 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 29 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 30 from vcsserver.str_utils import safe_int
32 31
33 32 log = logging.getLogger(__name__)
34 33
35 34
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
35 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
37 36 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 37
39 38
40 39 def write_response_error(http_exception, text=None):
41 40 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 41 _exception = http_exception(content_type=content_type)
43 42 _exception.content_type = content_type
44 43 if text:
45 44 _exception.body = json.dumps({'message': text})
46 45 log.debug('LFS: writing response of type %s to client with text:%s',
47 46 http_exception, text)
48 47 return _exception
49 48
50 49
51 50 class AuthHeaderRequired(object):
52 51 """
53 52 Decorator to check if request has proper auth-header
54 53 """
55 54
56 55 def __call__(self, func):
57 56 return get_cython_compat_decorator(self.__wrapper, func)
58 57
59 58 def __wrapper(self, func, *fargs, **fkwargs):
60 59 request = fargs[1]
61 60 auth = request.authorization
62 61 if not auth:
63 62 return write_response_error(HTTPForbidden)
64 63 return func(*fargs[1:], **fkwargs)
65 64
66 65
67 66 # views
68 67
69 68 def lfs_objects(request):
70 69 # indicate not supported, V1 API
71 70 log.warning('LFS: v1 api not supported, reporting it back to client')
72 71 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 72
74 73
75 74 @AuthHeaderRequired()
76 75 def lfs_objects_batch(request):
77 76 """
78 77 The client sends the following information to the Batch endpoint to transfer some objects:
79 78
80 79 operation - Should be download or upload.
81 80 transfers - An optional Array of String identifiers for transfer
82 81 adapters that the client has configured. If omitted, the basic
83 82 transfer adapter MUST be assumed by the server.
84 83 objects - An Array of objects to download.
85 84 oid - String OID of the LFS object.
86 85 size - Integer byte size of the LFS object. Must be at least zero.
87 86 """
88 87 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 88 auth = request.authorization
90 89 repo = request.matchdict.get('repo')
91 90 data = request.json
92 91 operation = data.get('operation')
93 92 http_scheme = request.registry.git_lfs_http_scheme
94 93
95 94 if operation not in ('download', 'upload'):
96 95 log.debug('LFS: unsupported operation:%s', operation)
97 96 return write_response_error(
98 97 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99 98
100 99 if 'objects' not in data:
101 100 log.debug('LFS: missing objects data')
102 101 return write_response_error(
103 102 HTTPBadRequest, 'missing objects data')
104 103
105 104 log.debug('LFS: handling operation of type: %s', operation)
106 105
107 106 objects = []
108 107 for o in data['objects']:
109 108 try:
110 109 oid = o['oid']
111 110 obj_size = o['size']
112 111 except KeyError:
113 112 log.exception('LFS, failed to extract data')
114 113 return write_response_error(
115 114 HTTPBadRequest, 'unsupported data in objects')
116 115
117 116 obj_data = {'oid': oid}
118 117
119 118 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 119 _scheme=http_scheme)
121 120 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 121 _scheme=http_scheme)
123 122 store = LFSOidStore(
124 123 oid, repo, store_location=request.registry.git_lfs_store_path)
125 124 handler = OidHandler(
126 125 store, repo, auth, oid, obj_size, obj_data,
127 126 obj_href, obj_verify_href)
128 127
129 128 # this verifies also OIDs
130 129 actions, errors = handler.exec_operation(operation)
131 130 if errors:
132 131 log.warning('LFS: got following errors: %s', errors)
133 132 obj_data['errors'] = errors
134 133
135 134 if actions:
136 135 obj_data['actions'] = actions
137 136
138 137 obj_data['size'] = obj_size
139 138 obj_data['authenticated'] = True
140 139 objects.append(obj_data)
141 140
142 141 result = {'objects': objects, 'transfer': 'basic'}
143 142 log.debug('LFS Response %s', safe_result(result))
144 143
145 144 return result
146 145
147 146
148 147 def lfs_objects_oid_upload(request):
149 148 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 149 repo = request.matchdict.get('repo')
151 150 oid = request.matchdict.get('oid')
152 151 store = LFSOidStore(
153 152 oid, repo, store_location=request.registry.git_lfs_store_path)
154 153 engine = store.get_engine(mode='wb')
155 154 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156 155
157 156 body = request.environ['wsgi.input']
158 157
159 158 with engine as f:
160 159 blksize = 64 * 1024 # 64kb
161 160 while True:
162 161 # read in chunks as stream comes in from Gunicorn
163 162 # this is a specific Gunicorn support function.
164 163 # might work differently on waitress
165 164 chunk = body.read(blksize)
166 165 if not chunk:
167 166 break
168 167 f.write(chunk)
169 168
170 169 return {'upload': 'ok'}
171 170
172 171
173 172 def lfs_objects_oid_download(request):
174 173 repo = request.matchdict.get('repo')
175 174 oid = request.matchdict.get('oid')
176 175
177 176 store = LFSOidStore(
178 177 oid, repo, store_location=request.registry.git_lfs_store_path)
179 178 if not store.has_oid():
180 179 log.debug('LFS: oid %s does not exists in store', oid)
181 180 return write_response_error(
182 181 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183 182
184 183 # TODO(marcink): support range header ?
185 184 # Range: bytes=0-, `bytes=(\d+)\-.*`
186 185
187 186 f = open(store.oid_path, 'rb')
188 187 response = Response(
189 188 content_type='application/octet-stream', app_iter=FileIter(f))
190 189 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 190 return response
192 191
193 192
194 193 def lfs_objects_verify(request):
195 194 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 195 repo = request.matchdict.get('repo')
197 196
198 197 data = request.json
199 198 oid = data.get('oid')
200 199 size = safe_int(data.get('size'))
201 200
202 201 if not (oid and size):
203 202 return write_response_error(
204 203 HTTPBadRequest, 'missing oid and size in request data')
205 204
206 205 store = LFSOidStore(
207 206 oid, repo, store_location=request.registry.git_lfs_store_path)
208 207 if not store.has_oid():
209 208 log.debug('LFS: oid %s does not exists in store', oid)
210 209 return write_response_error(
211 210 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212 211
213 212 store_size = store.size_oid()
214 213 if store_size != size:
215 214 msg = 'requested file size mismatch store size:{} requested:{}'.format(
216 215 store_size, size)
217 216 return write_response_error(
218 217 HTTPUnprocessableEntity, msg)
219 218
220 219 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221 220
222 221
223 222 def lfs_objects_lock(request):
224 223 return write_response_error(
225 224 HTTPNotImplemented, 'GIT LFS locking api not supported')
226 225
227 226
228 227 def not_found(request):
229 228 return write_response_error(
230 229 HTTPNotFound, 'request path not found')
231 230
232 231
233 232 def lfs_disabled(request):
234 233 return write_response_error(
235 234 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236 235
237 236
238 237 def git_lfs_app(config):
239 238
240 239 # v1 API deprecation endpoint
241 240 config.add_route('lfs_objects',
242 241 '/{repo:.*?[^/]}/info/lfs/objects')
243 242 config.add_view(lfs_objects, route_name='lfs_objects',
244 243 request_method='POST', renderer='json')
245 244
246 245 # locking API
247 246 config.add_route('lfs_objects_lock',
248 247 '/{repo:.*?[^/]}/info/lfs/locks')
249 248 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 249 request_method=('POST', 'GET'), renderer='json')
251 250
252 251 config.add_route('lfs_objects_lock_verify',
253 252 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 253 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 254 request_method=('POST', 'GET'), renderer='json')
256 255
257 256 # batch API
258 257 config.add_route('lfs_objects_batch',
259 258 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 259 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 260 request_method='POST', renderer='json')
262 261
263 262 # oid upload/download API
264 263 config.add_route('lfs_objects_oid',
265 264 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 265 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 266 request_method='PUT', renderer='json')
268 267 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 268 request_method='GET', renderer='json')
270 269
271 270 # verification API
272 271 config.add_route('lfs_objects_verify',
273 272 '/{repo:.*?[^/]}/info/lfs/verify')
274 273 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 274 request_method='POST', renderer='json')
276 275
277 276 # not found handler for API
278 277 config.add_notfound_view(not_found, renderer='json')
279 278
280 279
281 280 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 281 config = Configurator()
283 282 if git_lfs_enabled:
284 283 config.include(git_lfs_app)
285 284 config.registry.git_lfs_store_path = git_lfs_store_path
286 285 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 286 else:
288 287 # not found handler for API, reporting disabled LFS support
289 288 config.add_notfound_view(lfs_disabled, renderer='json')
290 289
291 290 app = config.make_wsgi_app()
292 291 return app
@@ -1,779 +1,779 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21 import logging
22 22 import collections
23 23 import importlib
24 24 import base64
25 25 import msgpack
26 26 import dataclasses
27 27 import pygit2
28 28
29 29 import http.client
30 30
31 31
32 32 import mercurial.scmutil
33 33 import mercurial.node
34 34
35 35 from vcsserver.lib.rc_json import json
36 36 from vcsserver import exceptions, subprocessio, settings
37 37 from vcsserver.str_utils import ascii_str, safe_str
38 from vcsserver.remote.git import Repository
38 from vcsserver.remote.git_remote import Repository
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 class HooksHttpClient(object):
44 44 proto = 'msgpack.v1'
45 45 connection = None
46 46
47 47 def __init__(self, hooks_uri):
48 48 self.hooks_uri = hooks_uri
49 49
50 50 def __repr__(self):
51 51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52 52
53 53 def __call__(self, method, extras):
54 54 connection = http.client.HTTPConnection(self.hooks_uri)
55 55 # binary msgpack body
56 56 headers, body = self._serialize(method, extras)
57 57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58 58
59 59 try:
60 60 try:
61 61 connection.request('POST', '/', body, headers)
62 62 except Exception as error:
63 63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 64 raise
65 65
66 66 response = connection.getresponse()
67 67 try:
68 68 return msgpack.load(response)
69 69 except Exception:
70 70 response_data = response.read()
71 71 log.exception('Failed to decode hook response json data. '
72 72 'response_code:%s, raw_data:%s',
73 73 response.status, response_data)
74 74 raise
75 75 finally:
76 76 connection.close()
77 77
78 78 @classmethod
79 79 def _serialize(cls, hook_name, extras):
80 80 data = {
81 81 'method': hook_name,
82 82 'extras': extras
83 83 }
84 84 headers = {
85 85 "rc-hooks-protocol": cls.proto,
86 86 "Connection": "keep-alive"
87 87 }
88 88 return headers, msgpack.packb(data)
89 89
90 90
91 91 class HooksDummyClient(object):
92 92 def __init__(self, hooks_module):
93 93 self._hooks_module = importlib.import_module(hooks_module)
94 94
95 95 def __call__(self, hook_name, extras):
96 96 with self._hooks_module.Hooks() as hooks:
97 97 return getattr(hooks, hook_name)(extras)
98 98
99 99
100 100 class HooksShadowRepoClient(object):
101 101
102 102 def __call__(self, hook_name, extras):
103 103 return {'output': '', 'status': 0}
104 104
105 105
106 106 class RemoteMessageWriter(object):
107 107 """Writer base class."""
108 108 def write(self, message):
109 109 raise NotImplementedError()
110 110
111 111
112 112 class HgMessageWriter(RemoteMessageWriter):
113 113 """Writer that knows how to send messages to mercurial clients."""
114 114
115 115 def __init__(self, ui):
116 116 self.ui = ui
117 117
118 118 def write(self, message: str):
119 119 # TODO: Check why the quiet flag is set by default.
120 120 old = self.ui.quiet
121 121 self.ui.quiet = False
122 122 self.ui.status(message.encode('utf-8'))
123 123 self.ui.quiet = old
124 124
125 125
126 126 class GitMessageWriter(RemoteMessageWriter):
127 127 """Writer that knows how to send messages to git clients."""
128 128
129 129 def __init__(self, stdout=None):
130 130 self.stdout = stdout or sys.stdout
131 131
132 132 def write(self, message: str):
133 133 self.stdout.write(message)
134 134
135 135
136 136 class SvnMessageWriter(RemoteMessageWriter):
137 137 """Writer that knows how to send messages to svn clients."""
138 138
139 139 def __init__(self, stderr=None):
140 140 # SVN needs data sent to stderr for back-to-client messaging
141 141 self.stderr = stderr or sys.stderr
142 142
143 143 def write(self, message):
144 144 self.stderr.write(message.encode('utf-8'))
145 145
146 146
147 147 def _handle_exception(result):
148 148 exception_class = result.get('exception')
149 149 exception_traceback = result.get('exception_traceback')
150 150 log.debug('Handling hook-call exception: %s', exception_class)
151 151
152 152 if exception_traceback:
153 153 log.error('Got traceback from remote call:%s', exception_traceback)
154 154
155 155 if exception_class == 'HTTPLockedRC':
156 156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
157 157 elif exception_class == 'HTTPBranchProtected':
158 158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
159 159 elif exception_class == 'RepositoryError':
160 160 raise exceptions.VcsException()(*result['exception_args'])
161 161 elif exception_class:
162 162 raise Exception(
163 163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
164 164 )
165 165
166 166
167 167 def _get_hooks_client(extras):
168 168 hooks_uri = extras.get('hooks_uri')
169 169 is_shadow_repo = extras.get('is_shadow_repo')
170 170
171 171 if hooks_uri:
172 172 return HooksHttpClient(extras['hooks_uri'])
173 173 elif is_shadow_repo:
174 174 return HooksShadowRepoClient()
175 175 else:
176 176 return HooksDummyClient(extras['hooks_module'])
177 177
178 178
179 179 def _call_hook(hook_name, extras, writer):
180 180 hooks_client = _get_hooks_client(extras)
181 181 log.debug('Hooks, using client:%s', hooks_client)
182 182 result = hooks_client(hook_name, extras)
183 183 log.debug('Hooks got result: %s', result)
184 184 _handle_exception(result)
185 185 writer.write(result['output'])
186 186
187 187 return result['status']
188 188
189 189
190 190 def _extras_from_ui(ui):
191 191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
192 192 if not hook_data:
193 193 # maybe it's inside environ ?
194 194 env_hook_data = os.environ.get('RC_SCM_DATA')
195 195 if env_hook_data:
196 196 hook_data = env_hook_data
197 197
198 198 extras = {}
199 199 if hook_data:
200 200 extras = json.loads(hook_data)
201 201 return extras
202 202
203 203
204 204 def _rev_range_hash(repo, node, check_heads=False):
205 205 from vcsserver.hgcompat import get_ctx
206 206
207 207 commits = []
208 208 revs = []
209 209 start = get_ctx(repo, node).rev()
210 210 end = len(repo)
211 211 for rev in range(start, end):
212 212 revs.append(rev)
213 213 ctx = get_ctx(repo, rev)
214 214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
215 215 branch = safe_str(ctx.branch())
216 216 commits.append((commit_id, branch))
217 217
218 218 parent_heads = []
219 219 if check_heads:
220 220 parent_heads = _check_heads(repo, start, end, revs)
221 221 return commits, parent_heads
222 222
223 223
224 224 def _check_heads(repo, start, end, commits):
225 225 from vcsserver.hgcompat import get_ctx
226 226 changelog = repo.changelog
227 227 parents = set()
228 228
229 229 for new_rev in commits:
230 230 for p in changelog.parentrevs(new_rev):
231 231 if p == mercurial.node.nullrev:
232 232 continue
233 233 if p < start:
234 234 parents.add(p)
235 235
236 236 for p in parents:
237 237 branch = get_ctx(repo, p).branch()
238 238 # The heads descending from that parent, on the same branch
239 239 parent_heads = {p}
240 240 reachable = {p}
241 241 for x in range(p + 1, end):
242 242 if get_ctx(repo, x).branch() != branch:
243 243 continue
244 244 for pp in changelog.parentrevs(x):
245 245 if pp in reachable:
246 246 reachable.add(x)
247 247 parent_heads.discard(pp)
248 248 parent_heads.add(x)
249 249 # More than one head? Suggest merging
250 250 if len(parent_heads) > 1:
251 251 return list(parent_heads)
252 252
253 253 return []
254 254
255 255
256 256 def _get_git_env():
257 257 env = {}
258 258 for k, v in os.environ.items():
259 259 if k.startswith('GIT'):
260 260 env[k] = v
261 261
262 262 # serialized version
263 263 return [(k, v) for k, v in env.items()]
264 264
265 265
266 266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
267 267 env = {}
268 268 for k, v in os.environ.items():
269 269 if k.startswith('HG'):
270 270 env[k] = v
271 271
272 272 env['HG_NODE'] = old_rev
273 273 env['HG_NODE_LAST'] = new_rev
274 274 env['HG_TXNID'] = txnid
275 275 env['HG_PENDING'] = repo_path
276 276
277 277 return [(k, v) for k, v in env.items()]
278 278
279 279
280 280 def repo_size(ui, repo, **kwargs):
281 281 extras = _extras_from_ui(ui)
282 282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
283 283
284 284
285 285 def pre_pull(ui, repo, **kwargs):
286 286 extras = _extras_from_ui(ui)
287 287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
288 288
289 289
290 290 def pre_pull_ssh(ui, repo, **kwargs):
291 291 extras = _extras_from_ui(ui)
292 292 if extras and extras.get('SSH'):
293 293 return pre_pull(ui, repo, **kwargs)
294 294 return 0
295 295
296 296
297 297 def post_pull(ui, repo, **kwargs):
298 298 extras = _extras_from_ui(ui)
299 299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
300 300
301 301
302 302 def post_pull_ssh(ui, repo, **kwargs):
303 303 extras = _extras_from_ui(ui)
304 304 if extras and extras.get('SSH'):
305 305 return post_pull(ui, repo, **kwargs)
306 306 return 0
307 307
308 308
309 309 def pre_push(ui, repo, node=None, **kwargs):
310 310 """
311 311 Mercurial pre_push hook
312 312 """
313 313 extras = _extras_from_ui(ui)
314 314 detect_force_push = extras.get('detect_force_push')
315 315
316 316 rev_data = []
317 317 hook_type: str = safe_str(kwargs.get('hooktype'))
318 318
319 319 if node and hook_type == 'pretxnchangegroup':
320 320 branches = collections.defaultdict(list)
321 321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
322 322 for commit_id, branch in commits:
323 323 branches[branch].append(commit_id)
324 324
325 325 for branch, commits in branches.items():
326 326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
327 327 rev_data.append({
328 328 'total_commits': len(commits),
329 329 'old_rev': old_rev,
330 330 'new_rev': commits[-1],
331 331 'ref': '',
332 332 'type': 'branch',
333 333 'name': branch,
334 334 })
335 335
336 336 for push_ref in rev_data:
337 337 push_ref['multiple_heads'] = _heads
338 338
339 339 repo_path = os.path.join(
340 340 extras.get('repo_store', ''), extras.get('repository', ''))
341 341 push_ref['hg_env'] = _get_hg_env(
342 342 old_rev=push_ref['old_rev'],
343 343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
344 344 repo_path=repo_path)
345 345
346 346 extras['hook_type'] = hook_type or 'pre_push'
347 347 extras['commit_ids'] = rev_data
348 348
349 349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
350 350
351 351
352 352 def pre_push_ssh(ui, repo, node=None, **kwargs):
353 353 extras = _extras_from_ui(ui)
354 354 if extras.get('SSH'):
355 355 return pre_push(ui, repo, node, **kwargs)
356 356
357 357 return 0
358 358
359 359
360 360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
361 361 """
362 362 Mercurial pre_push hook for SSH
363 363 """
364 364 extras = _extras_from_ui(ui)
365 365 if extras.get('SSH'):
366 366 permission = extras['SSH_PERMISSIONS']
367 367
368 368 if 'repository.write' == permission or 'repository.admin' == permission:
369 369 return 0
370 370
371 371 # non-zero ret code
372 372 return 1
373 373
374 374 return 0
375 375
376 376
377 377 def post_push(ui, repo, node, **kwargs):
378 378 """
379 379 Mercurial post_push hook
380 380 """
381 381 extras = _extras_from_ui(ui)
382 382
383 383 commit_ids = []
384 384 branches = []
385 385 bookmarks = []
386 386 tags = []
387 387 hook_type: str = safe_str(kwargs.get('hooktype'))
388 388
389 389 commits, _heads = _rev_range_hash(repo, node)
390 390 for commit_id, branch in commits:
391 391 commit_ids.append(commit_id)
392 392 if branch not in branches:
393 393 branches.append(branch)
394 394
395 395 if hasattr(ui, '_rc_pushkey_bookmarks'):
396 396 bookmarks = ui._rc_pushkey_bookmarks
397 397
398 398 extras['hook_type'] = hook_type or 'post_push'
399 399 extras['commit_ids'] = commit_ids
400 400
401 401 extras['new_refs'] = {
402 402 'branches': branches,
403 403 'bookmarks': bookmarks,
404 404 'tags': tags
405 405 }
406 406
407 407 return _call_hook('post_push', extras, HgMessageWriter(ui))
408 408
409 409
410 410 def post_push_ssh(ui, repo, node, **kwargs):
411 411 """
412 412 Mercurial post_push hook for SSH
413 413 """
414 414 if _extras_from_ui(ui).get('SSH'):
415 415 return post_push(ui, repo, node, **kwargs)
416 416 return 0
417 417
418 418
419 419 def key_push(ui, repo, **kwargs):
420 420 from vcsserver.hgcompat import get_ctx
421 421
422 422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
423 423 # store new bookmarks in our UI object propagated later to post_push
424 424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
425 425 return
426 426
427 427
428 428 # backward compat
429 429 log_pull_action = post_pull
430 430
431 431 # backward compat
432 432 log_push_action = post_push
433 433
434 434
435 435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
436 436 """
437 437 Old hook name: keep here for backward compatibility.
438 438
439 439 This is only required when the installed git hooks are not upgraded.
440 440 """
441 441 pass
442 442
443 443
444 444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
445 445 """
446 446 Old hook name: keep here for backward compatibility.
447 447
448 448 This is only required when the installed git hooks are not upgraded.
449 449 """
450 450 pass
451 451
452 452
453 453 @dataclasses.dataclass
454 454 class HookResponse:
455 455 status: int
456 456 output: str
457 457
458 458
459 459 def git_pre_pull(extras) -> HookResponse:
460 460 """
461 461 Pre pull hook.
462 462
463 463 :param extras: dictionary containing the keys defined in simplevcs
464 464 :type extras: dict
465 465
466 466 :return: status code of the hook. 0 for success.
467 467 :rtype: int
468 468 """
469 469
470 470 if 'pull' not in extras['hooks']:
471 471 return HookResponse(0, '')
472 472
473 473 stdout = io.StringIO()
474 474 try:
475 475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
476 476
477 477 except Exception as error:
478 478 log.exception('Failed to call pre_pull hook')
479 479 status_code = 128
480 480 stdout.write(f'ERROR: {error}\n')
481 481
482 482 return HookResponse(status_code, stdout.getvalue())
483 483
484 484
485 485 def git_post_pull(extras) -> HookResponse:
486 486 """
487 487 Post pull hook.
488 488
489 489 :param extras: dictionary containing the keys defined in simplevcs
490 490 :type extras: dict
491 491
492 492 :return: status code of the hook. 0 for success.
493 493 :rtype: int
494 494 """
495 495 if 'pull' not in extras['hooks']:
496 496 return HookResponse(0, '')
497 497
498 498 stdout = io.StringIO()
499 499 try:
500 500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
501 501 except Exception as error:
502 502 status = 128
503 503 stdout.write(f'ERROR: {error}\n')
504 504
505 505 return HookResponse(status, stdout.getvalue())
506 506
507 507
508 508 def _parse_git_ref_lines(revision_lines):
509 509 rev_data = []
510 510 for revision_line in revision_lines or []:
511 511 old_rev, new_rev, ref = revision_line.strip().split(' ')
512 512 ref_data = ref.split('/', 2)
513 513 if ref_data[1] in ('tags', 'heads'):
514 514 rev_data.append({
515 515 # NOTE(marcink):
516 516 # we're unable to tell total_commits for git at this point
517 517 # but we set the variable for consistency with GIT
518 518 'total_commits': -1,
519 519 'old_rev': old_rev,
520 520 'new_rev': new_rev,
521 521 'ref': ref,
522 522 'type': ref_data[1],
523 523 'name': ref_data[2],
524 524 })
525 525 return rev_data
526 526
527 527
528 528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
529 529 """
530 530 Pre push hook.
531 531
532 532 :return: status code of the hook. 0 for success.
533 533 """
534 534 extras = json.loads(env['RC_SCM_DATA'])
535 535 rev_data = _parse_git_ref_lines(revision_lines)
536 536 if 'push' not in extras['hooks']:
537 537 return 0
538 538 empty_commit_id = '0' * 40
539 539
540 540 detect_force_push = extras.get('detect_force_push')
541 541
542 542 for push_ref in rev_data:
543 543 # store our git-env which holds the temp store
544 544 push_ref['git_env'] = _get_git_env()
545 545 push_ref['pruned_sha'] = ''
546 546 if not detect_force_push:
547 547 # don't check for forced-push when we don't need to
548 548 continue
549 549
550 550 type_ = push_ref['type']
551 551 new_branch = push_ref['old_rev'] == empty_commit_id
552 552 delete_branch = push_ref['new_rev'] == empty_commit_id
553 553 if type_ == 'heads' and not (new_branch or delete_branch):
554 554 old_rev = push_ref['old_rev']
555 555 new_rev = push_ref['new_rev']
556 556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
557 557 stdout, stderr = subprocessio.run_command(
558 558 cmd, env=os.environ.copy())
559 559 # means we're having some non-reachable objects, this forced push was used
560 560 if stdout:
561 561 push_ref['pruned_sha'] = stdout.splitlines()
562 562
563 563 extras['hook_type'] = 'pre_receive'
564 564 extras['commit_ids'] = rev_data
565 565
566 566 stdout = sys.stdout
567 567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
568 568
569 569 return status_code
570 570
571 571
572 572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
573 573 """
574 574 Post push hook.
575 575
576 576 :return: status code of the hook. 0 for success.
577 577 """
578 578 extras = json.loads(env['RC_SCM_DATA'])
579 579 if 'push' not in extras['hooks']:
580 580 return 0
581 581
582 582 rev_data = _parse_git_ref_lines(revision_lines)
583 583
584 584 git_revs = []
585 585
586 586 # N.B.(skreft): it is ok to just call git, as git before calling a
587 587 # subcommand sets the PATH environment variable so that it point to the
588 588 # correct version of the git executable.
589 589 empty_commit_id = '0' * 40
590 590 branches = []
591 591 tags = []
592 592 for push_ref in rev_data:
593 593 type_ = push_ref['type']
594 594
595 595 if type_ == 'heads':
596 596 # starting new branch case
597 597 if push_ref['old_rev'] == empty_commit_id:
598 598 push_ref_name = push_ref['name']
599 599
600 600 if push_ref_name not in branches:
601 601 branches.append(push_ref_name)
602 602
603 603 need_head_set = ''
604 604 with Repository(os.getcwd()) as repo:
605 605 try:
606 606 repo.head
607 607 except pygit2.GitError:
608 608 need_head_set = f'refs/heads/{push_ref_name}'
609 609
610 610 if need_head_set:
611 611 repo.set_head(need_head_set)
612 612 print(f"Setting default branch to {push_ref_name}")
613 613
614 614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
615 615 stdout, stderr = subprocessio.run_command(
616 616 cmd, env=os.environ.copy())
617 617 heads = safe_str(stdout)
618 618 heads = heads.replace(push_ref['ref'], '')
619 619 heads = ' '.join(head for head
620 620 in heads.splitlines() if head) or '.'
621 621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
622 622 '--pretty=format:%H', '--', push_ref['new_rev'],
623 623 '--not', heads]
624 624 stdout, stderr = subprocessio.run_command(
625 625 cmd, env=os.environ.copy())
626 626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
627 627
628 628 # delete branch case
629 629 elif push_ref['new_rev'] == empty_commit_id:
630 630 git_revs.append('delete_branch=>%s' % push_ref['name'])
631 631 else:
632 632 if push_ref['name'] not in branches:
633 633 branches.append(push_ref['name'])
634 634
635 635 cmd = [settings.GIT_EXECUTABLE, 'log',
636 636 '{old_rev}..{new_rev}'.format(**push_ref),
637 637 '--reverse', '--pretty=format:%H']
638 638 stdout, stderr = subprocessio.run_command(
639 639 cmd, env=os.environ.copy())
640 640 # we get bytes from stdout, we need str to be consistent
641 641 log_revs = list(map(ascii_str, stdout.splitlines()))
642 642 git_revs.extend(log_revs)
643 643
644 644 # Pure pygit2 impl. but still 2-3x slower :/
645 645 # results = []
646 646 #
647 647 # with Repository(os.getcwd()) as repo:
648 648 # repo_new_rev = repo[push_ref['new_rev']]
649 649 # repo_old_rev = repo[push_ref['old_rev']]
650 650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
651 651 #
652 652 # for commit in walker:
653 653 # if commit.id == repo_old_rev.id:
654 654 # break
655 655 # results.append(commit.id.hex)
656 656 # # reverse the order, can't use GIT_SORT_REVERSE
657 657 # log_revs = results[::-1]
658 658
659 659 elif type_ == 'tags':
660 660 if push_ref['name'] not in tags:
661 661 tags.append(push_ref['name'])
662 662 git_revs.append('tag=>%s' % push_ref['name'])
663 663
664 664 extras['hook_type'] = 'post_receive'
665 665 extras['commit_ids'] = git_revs
666 666 extras['new_refs'] = {
667 667 'branches': branches,
668 668 'bookmarks': [],
669 669 'tags': tags,
670 670 }
671 671
672 672 stdout = sys.stdout
673 673
674 674 if 'repo_size' in extras['hooks']:
675 675 try:
676 676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
677 677 except Exception:
678 678 pass
679 679
680 680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
681 681 return status_code
682 682
683 683
684 684 def _get_extras_from_txn_id(path, txn_id):
685 685 extras = {}
686 686 try:
687 687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
688 688 '-t', txn_id,
689 689 '--revprop', path, 'rc-scm-extras']
690 690 stdout, stderr = subprocessio.run_command(
691 691 cmd, env=os.environ.copy())
692 692 extras = json.loads(base64.urlsafe_b64decode(stdout))
693 693 except Exception:
694 694 log.exception('Failed to extract extras info from txn_id')
695 695
696 696 return extras
697 697
698 698
699 699 def _get_extras_from_commit_id(commit_id, path):
700 700 extras = {}
701 701 try:
702 702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
703 703 '-r', commit_id,
704 704 '--revprop', path, 'rc-scm-extras']
705 705 stdout, stderr = subprocessio.run_command(
706 706 cmd, env=os.environ.copy())
707 707 extras = json.loads(base64.urlsafe_b64decode(stdout))
708 708 except Exception:
709 709 log.exception('Failed to extract extras info from commit_id')
710 710
711 711 return extras
712 712
713 713
714 714 def svn_pre_commit(repo_path, commit_data, env):
715 715 path, txn_id = commit_data
716 716 branches = []
717 717 tags = []
718 718
719 719 if env.get('RC_SCM_DATA'):
720 720 extras = json.loads(env['RC_SCM_DATA'])
721 721 else:
722 722 # fallback method to read from TXN-ID stored data
723 723 extras = _get_extras_from_txn_id(path, txn_id)
724 724 if not extras:
725 725 return 0
726 726
727 727 extras['hook_type'] = 'pre_commit'
728 728 extras['commit_ids'] = [txn_id]
729 729 extras['txn_id'] = txn_id
730 730 extras['new_refs'] = {
731 731 'total_commits': 1,
732 732 'branches': branches,
733 733 'bookmarks': [],
734 734 'tags': tags,
735 735 }
736 736
737 737 return _call_hook('pre_push', extras, SvnMessageWriter())
738 738
739 739
740 740 def svn_post_commit(repo_path, commit_data, env):
741 741 """
742 742 commit_data is path, rev, txn_id
743 743 """
744 744 if len(commit_data) == 3:
745 745 path, commit_id, txn_id = commit_data
746 746 elif len(commit_data) == 2:
747 747 log.error('Failed to extract txn_id from commit_data using legacy method. '
748 748 'Some functionality might be limited')
749 749 path, commit_id = commit_data
750 750 txn_id = None
751 751
752 752 branches = []
753 753 tags = []
754 754
755 755 if env.get('RC_SCM_DATA'):
756 756 extras = json.loads(env['RC_SCM_DATA'])
757 757 else:
758 758 # fallback method to read from TXN-ID stored data
759 759 extras = _get_extras_from_commit_id(commit_id, path)
760 760 if not extras:
761 761 return 0
762 762
763 763 extras['hook_type'] = 'post_commit'
764 764 extras['commit_ids'] = [commit_id]
765 765 extras['txn_id'] = txn_id
766 766 extras['new_refs'] = {
767 767 'branches': branches,
768 768 'bookmarks': [],
769 769 'tags': tags,
770 770 'total_commits': 1,
771 771 }
772 772
773 773 if 'repo_size' in extras['hooks']:
774 774 try:
775 775 _call_hook('repo_size', extras, SvnMessageWriter())
776 776 except Exception:
777 777 pass
778 778
779 779 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,774 +1,768 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import time
25 25 import wsgiref.util
26 26 import tempfile
27 27 import psutil
28 28
29 29 from itertools import chain
30 30
31 31 import msgpack
32 32 import configparser
33 33
34 34 from pyramid.config import Configurator
35 35 from pyramid.wsgi import wsgiapp
36 36 from pyramid.response import Response
37 37
38 38 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 39 from vcsserver.lib.rc_json import json
40 40 from vcsserver.config.settings_maker import SettingsMaker
41 41 from vcsserver.str_utils import safe_int
42 42 from vcsserver.lib.statsd_client import StatsdClient
43 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
44
45 log = logging.getLogger(__name__)
46
47 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
48 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
49
50 try:
51 locale.setlocale(locale.LC_ALL, '')
52 except locale.Error as e:
53 log.error(
54 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
55 os.environ['LC_ALL'] = 'C'
56
43 from vcsserver.tweens.request_wrapper import get_headers_call_context
57 44
58 45 import vcsserver
59 46 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
60 47 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
61 48 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
62 49 from vcsserver.echo_stub.echo_app import EchoApp
63 50 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
64 51 from vcsserver.lib.exc_tracking import store_exception, format_exc
65 52 from vcsserver.server import VcsServer
66 53
67 54 strict_vcs = True
68 55
69 56 git_import_err = None
70 57 try:
71 from vcsserver.remote.git import GitFactory, GitRemote
58 from vcsserver.remote.git_remote import GitFactory, GitRemote
72 59 except ImportError as e:
73 60 GitFactory = None
74 61 GitRemote = None
75 62 git_import_err = e
76 63 if strict_vcs:
77 64 raise
78 65
79 66
80 67 hg_import_err = None
81 68 try:
82 from vcsserver.remote.hg import MercurialFactory, HgRemote
69 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
83 70 except ImportError as e:
84 71 MercurialFactory = None
85 72 HgRemote = None
86 73 hg_import_err = e
87 74 if strict_vcs:
88 75 raise
89 76
90 77
91 78 svn_import_err = None
92 79 try:
93 from vcsserver.remote.svn import SubversionFactory, SvnRemote
80 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
94 81 except ImportError as e:
95 82 SubversionFactory = None
96 83 SvnRemote = None
97 84 svn_import_err = e
98 85 if strict_vcs:
99 86 raise
100 87
88 log = logging.getLogger(__name__)
89
90 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
91 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
92
93 try:
94 locale.setlocale(locale.LC_ALL, '')
95 except locale.Error as e:
96 log.error(
97 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
98 os.environ['LC_ALL'] = 'C'
99
101 100
102 101 def _is_request_chunked(environ):
103 102 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 103 return stream
105 104
106 105
107 106 def log_max_fd():
108 107 try:
109 108 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
110 109 log.info('Max file descriptors value: %s', maxfd)
111 110 except Exception:
112 111 pass
113 112
114 113
115 114 class VCS(object):
116 115 def __init__(self, locale_conf=None, cache_config=None):
117 116 self.locale = locale_conf
118 117 self.cache_config = cache_config
119 118 self._configure_locale()
120 119
121 120 log_max_fd()
122 121
123 122 if GitFactory and GitRemote:
124 123 git_factory = GitFactory()
125 124 self._git_remote = GitRemote(git_factory)
126 125 else:
127 126 log.error("Git client import failed: %s", git_import_err)
128 127
129 128 if MercurialFactory and HgRemote:
130 129 hg_factory = MercurialFactory()
131 130 self._hg_remote = HgRemote(hg_factory)
132 131 else:
133 132 log.error("Mercurial client import failed: %s", hg_import_err)
134 133
135 134 if SubversionFactory and SvnRemote:
136 135 svn_factory = SubversionFactory()
137 136
138 137 # hg factory is used for svn url validation
139 138 hg_factory = MercurialFactory()
140 139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 140 else:
142 141 log.error("Subversion client import failed: %s", svn_import_err)
143 142
144 143 self._vcsserver = VcsServer()
145 144
146 145 def _configure_locale(self):
147 146 if self.locale:
148 147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 148 else:
150 149 log.info('Configuring locale subsystem based on environment variables')
151 150 try:
152 151 # If self.locale is the empty string, then the locale
153 152 # module will use the environment variables. See the
154 153 # documentation of the package `locale`.
155 154 locale.setlocale(locale.LC_ALL, self.locale)
156 155
157 156 language_code, encoding = locale.getlocale()
158 157 log.info(
159 158 'Locale set to language code "%s" with encoding "%s".',
160 159 language_code, encoding)
161 160 except locale.Error:
162 161 log.exception('Cannot set locale, not configuring the locale system')
163 162
164 163
165 164 class WsgiProxy(object):
166 165 def __init__(self, wsgi):
167 166 self.wsgi = wsgi
168 167
169 168 def __call__(self, environ, start_response):
170 169 input_data = environ['wsgi.input'].read()
171 170 input_data = msgpack.unpackb(input_data)
172 171
173 172 error = None
174 173 try:
175 174 data, status, headers = self.wsgi.handle(
176 175 input_data['environment'], input_data['input_data'],
177 176 *input_data['args'], **input_data['kwargs'])
178 177 except Exception as e:
179 178 data, status, headers = [], None, None
180 179 error = {
181 180 'message': str(e),
182 181 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 182 }
184 183
185 184 start_response(200, {})
186 185 return self._iterator(error, status, headers, data)
187 186
188 187 def _iterator(self, error, status, headers, data):
189 188 initial_data = [
190 189 error,
191 190 status,
192 191 headers,
193 192 ]
194 193
195 194 for d in chain(initial_data, data):
196 195 yield msgpack.packb(d)
197 196
198 197
199 198 def not_found(request):
200 199 return {'status': '404 NOT FOUND'}
201 200
202 201
203 202 class VCSViewPredicate(object):
204 203 def __init__(self, val, config):
205 204 self.remotes = val
206 205
207 206 def text(self):
208 207 return f'vcs view method = {list(self.remotes.keys())}'
209 208
210 209 phash = text
211 210
212 211 def __call__(self, context, request):
213 212 """
214 213 View predicate that returns true if given backend is supported by
215 214 defined remotes.
216 215 """
217 216 backend = request.matchdict.get('backend')
218 217 return backend in self.remotes
219 218
220 219
221 220 class HTTPApplication(object):
222 221 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223 222
224 223 remote_wsgi = remote_wsgi
225 224 _use_echo_app = False
226 225
227 226 def __init__(self, settings=None, global_config=None):
228 227
229 228 self.config = Configurator(settings=settings)
230 229 # Init our statsd at very start
231 230 self.config.registry.statsd = StatsdClient.statsd
232 231 self.config.registry.vcs_call_context = {}
233 232
234 233 self.global_config = global_config
235 234 self.config.include('vcsserver.lib.rc_cache')
236 235 self.config.include('vcsserver.lib.rc_cache.archive_cache')
237 236
238 237 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 238 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 239 self._remotes = {
241 240 'hg': vcs._hg_remote,
242 241 'git': vcs._git_remote,
243 242 'svn': vcs._svn_remote,
244 243 'server': vcs._vcsserver,
245 244 }
246 245 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 246 self._use_echo_app = True
248 247 log.warning("Using EchoApp for VCS operations.")
249 248 self.remote_wsgi = remote_wsgi_stub
250 249
251 250 self._configure_settings(global_config, settings)
252 251
253 252 self._configure()
254 253
255 254 def _configure_settings(self, global_config, app_settings):
256 255 """
257 256 Configure the settings module.
258 257 """
259 258 settings_merged = global_config.copy()
260 259 settings_merged.update(app_settings)
261 260
262 261 git_path = app_settings.get('git_path', None)
263 262 if git_path:
264 263 settings.GIT_EXECUTABLE = git_path
265 264 binary_dir = app_settings.get('core.binary_dir', None)
266 265 if binary_dir:
267 266 settings.BINARY_DIR = binary_dir
268 267
269 268 # Store the settings to make them available to other modules.
270 269 vcsserver.PYRAMID_SETTINGS = settings_merged
271 270 vcsserver.CONFIG = settings_merged
272 271
273 272 def _configure(self):
274 273 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
275 274
276 275 self.config.add_route('service', '/_service')
277 276 self.config.add_route('status', '/status')
278 277 self.config.add_route('hg_proxy', '/proxy/hg')
279 278 self.config.add_route('git_proxy', '/proxy/git')
280 279
281 280 # rpc methods
282 281 self.config.add_route('vcs', '/{backend}')
283 282
284 283 # streaming rpc remote methods
285 284 self.config.add_route('vcs_stream', '/{backend}/stream')
286 285
287 286 # vcs operations clone/push as streaming
288 287 self.config.add_route('stream_git', '/stream/git/*repo_name')
289 288 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
290 289
291 290 self.config.add_view(self.status_view, route_name='status', renderer='json')
292 291 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
293 292
294 293 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
295 294 self.config.add_view(self.git_proxy(), route_name='git_proxy')
296 295 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
297 296 vcs_view=self._remotes)
298 297 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
299 298 vcs_view=self._remotes)
300 299
301 300 self.config.add_view(self.hg_stream(), route_name='stream_hg')
302 301 self.config.add_view(self.git_stream(), route_name='stream_git')
303 302
304 303 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
305 304
306 305 self.config.add_notfound_view(not_found, renderer='json')
307 306
308 307 self.config.add_view(self.handle_vcs_exception, context=Exception)
309 308
310 309 self.config.add_tween(
311 310 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
312 311 )
313 312 self.config.add_request_method(
314 313 'vcsserver.lib.request_counter.get_request_counter',
315 314 'request_count')
316 315
317 316 def wsgi_app(self):
318 317 return self.config.make_wsgi_app()
319 318
320 319 def _vcs_view_params(self, request):
321 320 remote = self._remotes[request.matchdict['backend']]
322 321 payload = msgpack.unpackb(request.body, use_list=True)
323 322
324 323 method = payload.get('method')
325 324 params = payload['params']
326 325 wire = params.get('wire')
327 326 args = params.get('args')
328 327 kwargs = params.get('kwargs')
329 328 context_uid = None
330 329
331 330 request.registry.vcs_call_context = {
332 331 'method': method,
333 332 'repo_name': payload.get('_repo_name'),
334 333 }
335 334
336 335 if wire:
337 336 try:
338 337 wire['context'] = context_uid = uuid.UUID(wire['context'])
339 338 except KeyError:
340 339 pass
341 340 args.insert(0, wire)
342 341 repo_state_uid = wire.get('repo_state_uid') if wire else None
343 342
344 343 # NOTE(marcink): trading complexity for slight performance
345 344 if log.isEnabledFor(logging.DEBUG):
346 345 # also we SKIP printing out any of those methods args since they maybe excessive
347 346 just_args_methods = {
348 347 'commitctx': ('content', 'removed', 'updated'),
349 348 'commit': ('content', 'removed', 'updated')
350 349 }
351 350 if method in just_args_methods:
352 351 skip_args = just_args_methods[method]
353 352 call_args = ''
354 353 call_kwargs = {}
355 354 for k in kwargs:
356 355 if k in skip_args:
357 356 # replace our skip key with dummy
358 357 call_kwargs[k] = f'RemovedParam({k})'
359 358 else:
360 359 call_kwargs[k] = kwargs[k]
361 360 else:
362 361 call_args = args[1:]
363 362 call_kwargs = kwargs
364 363
365 364 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
366 365 method, call_args, call_kwargs, context_uid, repo_state_uid)
367 366
368 367 statsd = request.registry.statsd
369 368 if statsd:
370 369 statsd.incr(
371 370 'vcsserver_method_total', tags=[
372 371 f"method:{method}",
373 372 ])
374 373 return payload, remote, method, args, kwargs
375 374
376 375 def vcs_view(self, request):
377 376
378 377 payload, remote, method, args, kwargs = self._vcs_view_params(request)
379 378 payload_id = payload.get('id')
380 379
381 380 try:
382 381 resp = getattr(remote, method)(*args, **kwargs)
383 382 except Exception as e:
384 383 exc_info = list(sys.exc_info())
385 384 exc_type, exc_value, exc_traceback = exc_info
386 385
387 386 org_exc = getattr(e, '_org_exc', None)
388 387 org_exc_name = None
389 388 org_exc_tb = ''
390 389 if org_exc:
391 390 org_exc_name = org_exc.__class__.__name__
392 391 org_exc_tb = getattr(e, '_org_exc_tb', '')
393 392 # replace our "faked" exception with our org
394 393 exc_info[0] = org_exc.__class__
395 394 exc_info[1] = org_exc
396 395
397 396 should_store_exc = True
398 397 if org_exc:
399 398 def get_exc_fqn(_exc_obj):
400 399 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
401 400 return module_name + '.' + org_exc_name
402 401
403 402 exc_fqn = get_exc_fqn(org_exc)
404 403
405 404 if exc_fqn in ['mercurial.error.RepoLookupError',
406 405 'vcsserver.exceptions.RefNotFoundException']:
407 406 should_store_exc = False
408 407
409 408 if should_store_exc:
410 409 store_exception(id(exc_info), exc_info, request_path=request.path)
411 410
412 411 tb_info = format_exc(exc_info)
413 412
414 413 type_ = e.__class__.__name__
415 414 if type_ not in self.ALLOWED_EXCEPTIONS:
416 415 type_ = None
417 416
418 417 resp = {
419 418 'id': payload_id,
420 419 'error': {
421 420 'message': str(e),
422 421 'traceback': tb_info,
423 422 'org_exc': org_exc_name,
424 423 'org_exc_tb': org_exc_tb,
425 424 'type': type_
426 425 }
427 426 }
428 427
429 428 try:
430 429 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
431 430 except AttributeError:
432 431 pass
433 432 else:
434 433 resp = {
435 434 'id': payload_id,
436 435 'result': resp
437 436 }
438 437 log.debug('Serving data for method %s', method)
439 438 return resp
440 439
441 440 def vcs_stream_view(self, request):
442 441 payload, remote, method, args, kwargs = self._vcs_view_params(request)
443 442 # this method has a stream: marker we remove it here
444 443 method = method.split('stream:')[-1]
445 444 chunk_size = safe_int(payload.get('chunk_size')) or 4096
446 445
447 try:
448 resp = getattr(remote, method)(*args, **kwargs)
449 except Exception as e:
450 raise
446 resp = getattr(remote, method)(*args, **kwargs)
451 447
452 448 def get_chunked_data(method_resp):
453 449 stream = io.BytesIO(method_resp)
454 450 while 1:
455 451 chunk = stream.read(chunk_size)
456 452 if not chunk:
457 453 break
458 454 yield chunk
459 455
460 456 response = Response(app_iter=get_chunked_data(resp))
461 457 response.content_type = 'application/octet-stream'
462 458
463 459 return response
464 460
465 461 def status_view(self, request):
466 462 import vcsserver
467 463 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
468 464 'pid': os.getpid()}
469 465
470 466 def service_view(self, request):
471 467 import vcsserver
472 468
473 469 payload = msgpack.unpackb(request.body, use_list=True)
474 470 server_config, app_config = {}, {}
475 471
476 472 try:
477 473 path = self.global_config['__file__']
478 474 config = configparser.RawConfigParser()
479 475
480 476 config.read(path)
481 477
482 478 if config.has_section('server:main'):
483 479 server_config = dict(config.items('server:main'))
484 480 if config.has_section('app:main'):
485 481 app_config = dict(config.items('app:main'))
486 482
487 483 except Exception:
488 484 log.exception('Failed to read .ini file for display')
489 485
490 486 environ = list(os.environ.items())
491 487
492 488 resp = {
493 489 'id': payload.get('id'),
494 490 'result': dict(
495 491 version=vcsserver.__version__,
496 492 config=server_config,
497 493 app_config=app_config,
498 494 environ=environ,
499 495 payload=payload,
500 496 )
501 497 }
502 498 return resp
503 499
504 500 def _msgpack_renderer_factory(self, info):
505 501
506 502 def _render(value, system):
507 503 bin_type = False
508 504 res = value.get('result')
509 505 if isinstance(res, BytesEnvelope):
510 506 log.debug('Result is wrapped in BytesEnvelope type')
511 507 bin_type = True
512 508 elif isinstance(res, BinaryEnvelope):
513 509 log.debug('Result is wrapped in BinaryEnvelope type')
514 510 value['result'] = res.val
515 511 bin_type = True
516 512
517 513 request = system.get('request')
518 514 if request is not None:
519 515 response = request.response
520 516 ct = response.content_type
521 517 if ct == response.default_content_type:
522 518 response.content_type = 'application/x-msgpack'
523 519 if bin_type:
524 520 response.content_type = 'application/x-msgpack-bin'
525 521
526 522 return msgpack.packb(value, use_bin_type=bin_type)
527 523 return _render
528 524
529 525 def set_env_from_config(self, environ, config):
530 526 dict_conf = {}
531 527 try:
532 528 for elem in config:
533 529 if elem[0] == 'rhodecode':
534 530 dict_conf = json.loads(elem[2])
535 531 break
536 532 except Exception:
537 533 log.exception('Failed to fetch SCM CONFIG')
538 534 return
539 535
540 536 username = dict_conf.get('username')
541 537 if username:
542 538 environ['REMOTE_USER'] = username
543 539 # mercurial specific, some extension api rely on this
544 540 environ['HGUSER'] = username
545 541
546 542 ip = dict_conf.get('ip')
547 543 if ip:
548 544 environ['REMOTE_HOST'] = ip
549 545
550 546 if _is_request_chunked(environ):
551 547 # set the compatibility flag for webob
552 548 environ['wsgi.input_terminated'] = True
553 549
554 550 def hg_proxy(self):
555 551 @wsgiapp
556 552 def _hg_proxy(environ, start_response):
557 553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
558 554 return app(environ, start_response)
559 555 return _hg_proxy
560 556
561 557 def git_proxy(self):
562 558 @wsgiapp
563 559 def _git_proxy(environ, start_response):
564 560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
565 561 return app(environ, start_response)
566 562 return _git_proxy
567 563
568 564 def hg_stream(self):
569 565 if self._use_echo_app:
570 566 @wsgiapp
571 567 def _hg_stream(environ, start_response):
572 568 app = EchoApp('fake_path', 'fake_name', None)
573 569 return app(environ, start_response)
574 570 return _hg_stream
575 571 else:
576 572 @wsgiapp
577 573 def _hg_stream(environ, start_response):
578 574 log.debug('http-app: handling hg stream')
579 575 call_context = get_headers_call_context(environ)
580 576
581 577 repo_path = call_context['repo_path']
582 578 repo_name = call_context['repo_name']
583 579 config = call_context['repo_config']
584 580
585 581 app = scm_app.create_hg_wsgi_app(
586 582 repo_path, repo_name, config)
587 583
588 584 # Consistent path information for hgweb
589 585 environ['PATH_INFO'] = call_context['path_info']
590 586 environ['REPO_NAME'] = repo_name
591 587 self.set_env_from_config(environ, config)
592 588
593 589 log.debug('http-app: starting app handler '
594 590 'with %s and process request', app)
595 591 return app(environ, ResponseFilter(start_response))
596 592 return _hg_stream
597 593
598 594 def git_stream(self):
599 595 if self._use_echo_app:
600 596 @wsgiapp
601 597 def _git_stream(environ, start_response):
602 598 app = EchoApp('fake_path', 'fake_name', None)
603 599 return app(environ, start_response)
604 600 return _git_stream
605 601 else:
606 602 @wsgiapp
607 603 def _git_stream(environ, start_response):
608 604 log.debug('http-app: handling git stream')
609 605
610 606 call_context = get_headers_call_context(environ)
611 607
612 608 repo_path = call_context['repo_path']
613 609 repo_name = call_context['repo_name']
614 610 config = call_context['repo_config']
615 611
616 612 environ['PATH_INFO'] = call_context['path_info']
617 613 self.set_env_from_config(environ, config)
618 614
619 615 content_type = environ.get('CONTENT_TYPE', '')
620 616
621 617 path = environ['PATH_INFO']
622 618 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
623 619 log.debug(
624 620 'LFS: Detecting if request `%s` is LFS server path based '
625 621 'on content type:`%s`, is_lfs:%s',
626 622 path, content_type, is_lfs_request)
627 623
628 624 if not is_lfs_request:
629 625 # fallback detection by path
630 626 if GIT_LFS_PROTO_PAT.match(path):
631 627 is_lfs_request = True
632 628 log.debug(
633 629 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
634 630 path, is_lfs_request)
635 631
636 632 if is_lfs_request:
637 633 app = scm_app.create_git_lfs_wsgi_app(
638 634 repo_path, repo_name, config)
639 635 else:
640 636 app = scm_app.create_git_wsgi_app(
641 637 repo_path, repo_name, config)
642 638
643 639 log.debug('http-app: starting app handler '
644 640 'with %s and process request', app)
645 641
646 642 return app(environ, start_response)
647 643
648 644 return _git_stream
649 645
650 646 def handle_vcs_exception(self, exception, request):
651 647 _vcs_kind = getattr(exception, '_vcs_kind', '')
652 648
653 649 if _vcs_kind == 'repo_locked':
654 650 headers_call_context = get_headers_call_context(request.environ)
655 651 status_code = safe_int(headers_call_context['locked_status_code'])
656 652
657 653 return HTTPRepoLocked(
658 654 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
659 655
660 656 elif _vcs_kind == 'repo_branch_protected':
661 657 # Get custom repo-branch-protected status code if present.
662 658 return HTTPRepoBranchProtected(
663 659 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
664 660
665 661 exc_info = request.exc_info
666 662 store_exception(id(exc_info), exc_info)
667 663
668 664 traceback_info = 'unavailable'
669 665 if request.exc_info:
670 666 traceback_info = format_exc(request.exc_info)
671 667
672 668 log.error(
673 669 'error occurred handling this request for path: %s, \n%s',
674 670 request.path, traceback_info)
675 671
676 672 statsd = request.registry.statsd
677 673 if statsd:
678 674 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
679 675 statsd.incr('vcsserver_exception_total',
680 676 tags=[f"type:{exc_type}"])
681 677 raise exception
682 678
683 679
684 680 class ResponseFilter(object):
685 681
686 682 def __init__(self, start_response):
687 683 self._start_response = start_response
688 684
689 685 def __call__(self, status, response_headers, exc_info=None):
690 686 headers = tuple(
691 687 (h, v) for h, v in response_headers
692 688 if not wsgiref.util.is_hop_by_hop(h))
693 689 return self._start_response(status, headers, exc_info)
694 690
695 691
696 692 def sanitize_settings_and_apply_defaults(global_config, settings):
697 global_settings_maker = SettingsMaker(global_config)
693 _global_settings_maker = SettingsMaker(global_config)
698 694 settings_maker = SettingsMaker(settings)
699 695
700 696 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
701 697
702 698 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
703 699 settings_maker.enable_logging(logging_conf)
704 700
705 701 # Default includes, possible to change as a user
706 702 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
707 703 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
708 704
709 705 settings_maker.make_setting('__file__', global_config.get('__file__'))
710 706
711 707 settings_maker.make_setting('pyramid.default_locale_name', 'en')
712 708 settings_maker.make_setting('locale', 'en_US.UTF-8')
713 709
714 710 settings_maker.make_setting('core.binary_dir', '')
715 711
716 712 temp_store = tempfile.gettempdir()
717 713 default_cache_dir = os.path.join(temp_store, 'rc_cache')
718 714 # save default, cache dir, and use it for all backends later.
719 715 default_cache_dir = settings_maker.make_setting(
720 716 'cache_dir',
721 717 default=default_cache_dir, default_when_empty=True,
722 718 parser='dir:ensured')
723 719
724 720 # exception store cache
725 721 settings_maker.make_setting(
726 722 'exception_tracker.store_path',
727 723 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
728 724 parser='dir:ensured'
729 725 )
730 726
731 727 # repo_object cache defaults
732 728 settings_maker.make_setting(
733 729 'rc_cache.repo_object.backend',
734 730 default='dogpile.cache.rc.file_namespace',
735 731 parser='string')
736 732 settings_maker.make_setting(
737 733 'rc_cache.repo_object.expiration_time',
738 734 default=30 * 24 * 60 * 60, # 30days
739 735 parser='int')
740 736 settings_maker.make_setting(
741 737 'rc_cache.repo_object.arguments.filename',
742 738 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
743 739 parser='string')
744 740
745 741 # statsd
746 742 settings_maker.make_setting('statsd.enabled', False, parser='bool')
747 743 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
748 744 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
749 745 settings_maker.make_setting('statsd.statsd_prefix', '')
750 746 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
751 747
752 748 settings_maker.env_expand()
753 749
754 750
755 751 def main(global_config, **settings):
756 752 start_time = time.time()
757 753 log.info('Pyramid app config starting')
758 754
759 755 if MercurialFactory:
760 756 hgpatches.patch_largefiles_capabilities()
761 757 hgpatches.patch_subrepo_type_mapping()
762 758
763 759 # Fill in and sanitize the defaults & do ENV expansion
764 760 sanitize_settings_and_apply_defaults(global_config, settings)
765 761
766 762 # init and bootstrap StatsdClient
767 763 StatsdClient.setup(settings)
768 764
769 765 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
770 766 total_time = time.time() - start_time
771 767 log.info('Pyramid app created and configured in %.2fs', total_time)
772 768 return pyramid_app
773
774
1 NO CONTENT: file renamed from vcsserver/remote/git.py to vcsserver/remote/git_remote.py
1 NO CONTENT: file renamed from vcsserver/remote/hg.py to vcsserver/remote/hg_remote.py
@@ -1,935 +1,943 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 import os
20 20 import subprocess
21 21 from urllib.error import URLError
22 22 import urllib.parse
23 23 import logging
24 24 import posixpath as vcspath
25 25 import io
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29 import traceback
30 30
31 31
32 32 import svn.client # noqa
33 33 import svn.core # noqa
34 34 import svn.delta # noqa
35 35 import svn.diff # noqa
36 36 import svn.fs # noqa
37 37 import svn.repos # noqa
38 38
39 39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
40 from vcsserver.base import (
41 RepoFactory,
42 raise_from_original,
43 ArchiveNode,
44 store_archive_in_cache,
45 BytesEnvelope,
46 BinaryEnvelope,
47 )
41 48 from vcsserver.exceptions import NoContentException
42 49 from vcsserver.str_utils import safe_str, safe_bytes
43 50 from vcsserver.type_utils import assert_bytes
44 51 from vcsserver.vcs_base import RemoteBase
45 52 from vcsserver.lib.svnremoterepo import svnremoterepo
53
46 54 log = logging.getLogger(__name__)
47 55
48 56
49 57 svn_compatible_versions_map = {
50 58 'pre-1.4-compatible': '1.3',
51 59 'pre-1.5-compatible': '1.4',
52 60 'pre-1.6-compatible': '1.5',
53 61 'pre-1.8-compatible': '1.7',
54 62 'pre-1.9-compatible': '1.8',
55 63 }
56 64
57 65 current_compatible_version = '1.14'
58 66
59 67
60 68 def reraise_safe_exceptions(func):
61 69 """Decorator for converting svn exceptions to something neutral."""
62 70 def wrapper(*args, **kwargs):
63 71 try:
64 72 return func(*args, **kwargs)
65 73 except Exception as e:
66 74 if not hasattr(e, '_vcs_kind'):
67 75 log.exception("Unhandled exception in svn remote call")
68 76 raise_from_original(exceptions.UnhandledException(e), e)
69 77 raise
70 78 return wrapper
71 79
72 80
73 81 class SubversionFactory(RepoFactory):
74 82 repo_type = 'svn'
75 83
76 84 def _create_repo(self, wire, create, compatible_version):
77 85 path = svn.core.svn_path_canonicalize(wire['path'])
78 86 if create:
79 87 fs_config = {'compatible-version': current_compatible_version}
80 88 if compatible_version:
81 89
82 90 compatible_version_string = \
83 91 svn_compatible_versions_map.get(compatible_version) \
84 92 or compatible_version
85 93 fs_config['compatible-version'] = compatible_version_string
86 94
87 95 log.debug('Create SVN repo with config `%s`', fs_config)
88 96 repo = svn.repos.create(path, "", "", None, fs_config)
89 97 else:
90 98 repo = svn.repos.open(path)
91 99
92 100 log.debug('repository created: got SVN object: %s', repo)
93 101 return repo
94 102
95 103 def repo(self, wire, create=False, compatible_version=None):
96 104 """
97 105 Get a repository instance for the given path.
98 106 """
99 107 return self._create_repo(wire, create, compatible_version)
100 108
101 109
102 110 NODE_TYPE_MAPPING = {
103 111 svn.core.svn_node_file: 'file',
104 112 svn.core.svn_node_dir: 'dir',
105 113 }
106 114
107 115
108 116 class SvnRemote(RemoteBase):
109 117
110 118 def __init__(self, factory, hg_factory=None):
111 119 self._factory = factory
112 120
113 121 self._bulk_methods = {
114 122 # NOT supported in SVN ATM...
115 123 }
116 124 self._bulk_file_methods = {
117 125 "size": self.get_file_size,
118 126 "data": self.get_file_content,
119 127 "flags": self.get_node_type,
120 128 "is_binary": self.is_binary,
121 129 "md5": self.md5_hash
122 130 }
123 131
124 132 @reraise_safe_exceptions
125 133 def bulk_file_request(self, wire, commit_id, path, pre_load):
126 134 cache_on, context_uid, repo_id = self._cache_on(wire)
127 135 region = self._region(wire)
128 136
129 137 # since we use unified API, we need to cast from str to in for SVN
130 138 commit_id = int(commit_id)
131 139
132 140 @region.conditional_cache_on_arguments(condition=cache_on)
133 141 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
134 142 result = {}
135 143 for attr in pre_load:
136 144 try:
137 145 method = self._bulk_file_methods[attr]
138 146 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
139 147 result[attr] = method(wire, _commit_id, _path)
140 148 except KeyError as e:
141 149 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
142 150 return result
143 151
144 152 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
145 153
146 154 @reraise_safe_exceptions
147 155 def discover_svn_version(self):
148 156 try:
149 157 import svn.core
150 158 svn_ver = svn.core.SVN_VERSION
151 159 except ImportError:
152 160 svn_ver = None
153 161 return safe_str(svn_ver)
154 162
155 163 @reraise_safe_exceptions
156 164 def is_empty(self, wire):
157 165 try:
158 166 return self.lookup(wire, -1) == 0
159 167 except Exception:
160 168 log.exception("failed to read object_store")
161 169 return False
162 170
163 171 def check_url(self, url, config):
164 172
165 173 # uuid function gets only valid UUID from proper repo, else
166 174 # throws exception
167 175 username, password, src_url = self.get_url_and_credentials(url)
168 176 try:
169 177 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
170 178 except Exception:
171 179 tb = traceback.format_exc()
172 180 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
173 181 raise URLError(f'"{url}" is not a valid Subversion source url.')
174 182 return True
175 183
176 184 def is_path_valid_repository(self, wire, path):
177 185
178 186 # NOTE(marcink): short circuit the check for SVN repo
179 187 # the repos.open might be expensive to check, but we have one cheap
180 188 # pre condition that we can use, to check for 'format' file
181 189
182 190 if not os.path.isfile(os.path.join(path, 'format')):
183 191 return False
184 192
185 193 try:
186 194 svn.repos.open(path)
187 195 except svn.core.SubversionException:
188 196 tb = traceback.format_exc()
189 197 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
190 198 return False
191 199 return True
192 200
193 201 @reraise_safe_exceptions
194 202 def verify(self, wire,):
195 203 repo_path = wire['path']
196 204 if not self.is_path_valid_repository(wire, repo_path):
197 205 raise Exception(
198 206 "Path %s is not a valid Subversion repository." % repo_path)
199 207
200 208 cmd = ['svnadmin', 'info', repo_path]
201 209 stdout, stderr = subprocessio.run_command(cmd)
202 210 return stdout
203 211
204 212 @reraise_safe_exceptions
205 213 def lookup(self, wire, revision):
206 214 if revision not in [-1, None, 'HEAD']:
207 215 raise NotImplementedError
208 216 repo = self._factory.repo(wire)
209 217 fs_ptr = svn.repos.fs(repo)
210 218 head = svn.fs.youngest_rev(fs_ptr)
211 219 return head
212 220
213 221 @reraise_safe_exceptions
214 222 def lookup_interval(self, wire, start_ts, end_ts):
215 223 repo = self._factory.repo(wire)
216 224 fsobj = svn.repos.fs(repo)
217 225 start_rev = None
218 226 end_rev = None
219 227 if start_ts:
220 228 start_ts_svn = apr_time_t(start_ts)
221 229 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
222 230 else:
223 231 start_rev = 1
224 232 if end_ts:
225 233 end_ts_svn = apr_time_t(end_ts)
226 234 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
227 235 else:
228 236 end_rev = svn.fs.youngest_rev(fsobj)
229 237 return start_rev, end_rev
230 238
231 239 @reraise_safe_exceptions
232 240 def revision_properties(self, wire, revision):
233 241
234 242 cache_on, context_uid, repo_id = self._cache_on(wire)
235 243 region = self._region(wire)
236 244
237 245 @region.conditional_cache_on_arguments(condition=cache_on)
238 246 def _revision_properties(_repo_id, _revision):
239 247 repo = self._factory.repo(wire)
240 248 fs_ptr = svn.repos.fs(repo)
241 249 return svn.fs.revision_proplist(fs_ptr, revision)
242 250 return _revision_properties(repo_id, revision)
243 251
244 252 def revision_changes(self, wire, revision):
245 253
246 254 repo = self._factory.repo(wire)
247 255 fsobj = svn.repos.fs(repo)
248 256 rev_root = svn.fs.revision_root(fsobj, revision)
249 257
250 258 editor = svn.repos.ChangeCollector(fsobj, rev_root)
251 259 editor_ptr, editor_baton = svn.delta.make_editor(editor)
252 260 base_dir = ""
253 261 send_deltas = False
254 262 svn.repos.replay2(
255 263 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
256 264 editor_ptr, editor_baton, None)
257 265
258 266 added = []
259 267 changed = []
260 268 removed = []
261 269
262 270 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
263 271 for path, change in editor.changes.items():
264 272 # TODO: Decide what to do with directory nodes. Subversion can add
265 273 # empty directories.
266 274
267 275 if change.item_kind == svn.core.svn_node_dir:
268 276 continue
269 277 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
270 278 added.append(path)
271 279 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
272 280 svn.repos.CHANGE_ACTION_REPLACE]:
273 281 changed.append(path)
274 282 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
275 283 removed.append(path)
276 284 else:
277 285 raise NotImplementedError(
278 286 "Action {} not supported on path {}".format(
279 287 change.action, path))
280 288
281 289 changes = {
282 290 'added': added,
283 291 'changed': changed,
284 292 'removed': removed,
285 293 }
286 294 return changes
287 295
288 296 @reraise_safe_exceptions
289 297 def node_history(self, wire, path, revision, limit):
290 298 cache_on, context_uid, repo_id = self._cache_on(wire)
291 299 region = self._region(wire)
292 300
293 301 @region.conditional_cache_on_arguments(condition=cache_on)
294 302 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
295 303 cross_copies = False
296 304 repo = self._factory.repo(wire)
297 305 fsobj = svn.repos.fs(repo)
298 306 rev_root = svn.fs.revision_root(fsobj, revision)
299 307
300 308 history_revisions = []
301 309 history = svn.fs.node_history(rev_root, path)
302 310 history = svn.fs.history_prev(history, cross_copies)
303 311 while history:
304 312 __, node_revision = svn.fs.history_location(history)
305 313 history_revisions.append(node_revision)
306 314 if limit and len(history_revisions) >= limit:
307 315 break
308 316 history = svn.fs.history_prev(history, cross_copies)
309 317 return history_revisions
310 318 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
311 319
312 320 @reraise_safe_exceptions
313 321 def node_properties(self, wire, path, revision):
314 322 cache_on, context_uid, repo_id = self._cache_on(wire)
315 323 region = self._region(wire)
316 324
317 325 @region.conditional_cache_on_arguments(condition=cache_on)
318 326 def _node_properties(_repo_id, _path, _revision):
319 327 repo = self._factory.repo(wire)
320 328 fsobj = svn.repos.fs(repo)
321 329 rev_root = svn.fs.revision_root(fsobj, revision)
322 330 return svn.fs.node_proplist(rev_root, path)
323 331 return _node_properties(repo_id, path, revision)
324 332
325 333 def file_annotate(self, wire, path, revision):
326 334 abs_path = 'file://' + urllib.request.pathname2url(
327 335 vcspath.join(wire['path'], path))
328 336 file_uri = svn.core.svn_path_canonicalize(abs_path)
329 337
330 338 start_rev = svn_opt_revision_value_t(0)
331 339 peg_rev = svn_opt_revision_value_t(revision)
332 340 end_rev = peg_rev
333 341
334 342 annotations = []
335 343
336 344 def receiver(line_no, revision, author, date, line, pool):
337 345 annotations.append((line_no, revision, line))
338 346
339 347 # TODO: Cannot use blame5, missing typemap function in the swig code
340 348 try:
341 349 svn.client.blame2(
342 350 file_uri, peg_rev, start_rev, end_rev,
343 351 receiver, svn.client.create_context())
344 352 except svn.core.SubversionException as exc:
345 353 log.exception("Error during blame operation.")
346 354 raise Exception(
347 355 f"Blame not supported or file does not exist at path {path}. "
348 356 f"Error {exc}.")
349 357
350 358 return BinaryEnvelope(annotations)
351 359
352 360 @reraise_safe_exceptions
353 361 def get_node_type(self, wire, revision=None, path=''):
354 362
355 363 cache_on, context_uid, repo_id = self._cache_on(wire)
356 364 region = self._region(wire)
357 365
358 366 @region.conditional_cache_on_arguments(condition=cache_on)
359 367 def _get_node_type(_repo_id, _revision, _path):
360 368 repo = self._factory.repo(wire)
361 369 fs_ptr = svn.repos.fs(repo)
362 370 if _revision is None:
363 371 _revision = svn.fs.youngest_rev(fs_ptr)
364 372 root = svn.fs.revision_root(fs_ptr, _revision)
365 373 node = svn.fs.check_path(root, path)
366 374 return NODE_TYPE_MAPPING.get(node, None)
367 375 return _get_node_type(repo_id, revision, path)
368 376
369 377 @reraise_safe_exceptions
370 378 def get_nodes(self, wire, revision=None, path=''):
371 379
372 380 cache_on, context_uid, repo_id = self._cache_on(wire)
373 381 region = self._region(wire)
374 382
375 383 @region.conditional_cache_on_arguments(condition=cache_on)
376 384 def _get_nodes(_repo_id, _path, _revision):
377 385 repo = self._factory.repo(wire)
378 386 fsobj = svn.repos.fs(repo)
379 387 if _revision is None:
380 388 _revision = svn.fs.youngest_rev(fsobj)
381 389 root = svn.fs.revision_root(fsobj, _revision)
382 390 entries = svn.fs.dir_entries(root, path)
383 391 result = []
384 392 for entry_path, entry_info in entries.items():
385 393 result.append(
386 394 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
387 395 return result
388 396 return _get_nodes(repo_id, path, revision)
389 397
390 398 @reraise_safe_exceptions
391 399 def get_file_content(self, wire, rev=None, path=''):
392 400 repo = self._factory.repo(wire)
393 401 fsobj = svn.repos.fs(repo)
394 402
395 403 if rev is None:
396 404 rev = svn.fs.youngest_rev(fsobj)
397 405
398 406 root = svn.fs.revision_root(fsobj, rev)
399 407 content = svn.core.Stream(svn.fs.file_contents(root, path))
400 408 return BytesEnvelope(content.read())
401 409
402 410 @reraise_safe_exceptions
403 411 def get_file_size(self, wire, revision=None, path=''):
404 412
405 413 cache_on, context_uid, repo_id = self._cache_on(wire)
406 414 region = self._region(wire)
407 415
408 416 @region.conditional_cache_on_arguments(condition=cache_on)
409 417 def _get_file_size(_repo_id, _revision, _path):
410 418 repo = self._factory.repo(wire)
411 419 fsobj = svn.repos.fs(repo)
412 420 if _revision is None:
413 421 _revision = svn.fs.youngest_revision(fsobj)
414 422 root = svn.fs.revision_root(fsobj, _revision)
415 423 size = svn.fs.file_length(root, path)
416 424 return size
417 425 return _get_file_size(repo_id, revision, path)
418 426
419 427 def create_repository(self, wire, compatible_version=None):
420 428 log.info('Creating Subversion repository in path "%s"', wire['path'])
421 429 self._factory.repo(wire, create=True,
422 430 compatible_version=compatible_version)
423 431
424 432 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
425 433 obj = urllib.parse.urlparse(src_url)
426 434 username = obj.username or ''
427 435 password = obj.password or ''
428 436 return username, password, src_url
429 437
430 438 def import_remote_repository(self, wire, src_url):
431 439 repo_path = wire['path']
432 440 if not self.is_path_valid_repository(wire, repo_path):
433 441 raise Exception(
434 442 "Path %s is not a valid Subversion repository." % repo_path)
435 443
436 444 username, password, src_url = self.get_url_and_credentials(src_url)
437 445 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
438 446 '--trust-server-cert-failures=unknown-ca']
439 447 if username and password:
440 448 rdump_cmd += ['--username', username, '--password', password]
441 449 rdump_cmd += [src_url]
442 450
443 451 rdump = subprocess.Popen(
444 452 rdump_cmd,
445 453 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
446 454 load = subprocess.Popen(
447 455 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
448 456
449 457 # TODO: johbo: This can be a very long operation, might be better
450 458 # to track some kind of status and provide an api to check if the
451 459 # import is done.
452 460 rdump.wait()
453 461 load.wait()
454 462
455 463 log.debug('Return process ended with code: %s', rdump.returncode)
456 464 if rdump.returncode != 0:
457 465 errors = rdump.stderr.read()
458 466 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
459 467
460 468 reason = 'UNKNOWN'
461 469 if b'svnrdump: E230001:' in errors:
462 470 reason = 'INVALID_CERTIFICATE'
463 471
464 472 if reason == 'UNKNOWN':
465 473 reason = f'UNKNOWN:{safe_str(errors)}'
466 474
467 475 raise Exception(
468 476 'Failed to dump the remote repository from {}. Reason:{}'.format(
469 477 src_url, reason))
470 478 if load.returncode != 0:
471 479 raise Exception(
472 480 'Failed to load the dump of remote repository from %s.' %
473 481 (src_url, ))
474 482
475 483 def commit(self, wire, message, author, timestamp, updated, removed):
476 484
477 485 message = safe_bytes(message)
478 486 author = safe_bytes(author)
479 487
480 488 repo = self._factory.repo(wire)
481 489 fsobj = svn.repos.fs(repo)
482 490
483 491 rev = svn.fs.youngest_rev(fsobj)
484 492 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
485 493 txn_root = svn.fs.txn_root(txn)
486 494
487 495 for node in updated:
488 496 TxnNodeProcessor(node, txn_root).update()
489 497 for node in removed:
490 498 TxnNodeProcessor(node, txn_root).remove()
491 499
492 500 commit_id = svn.repos.fs_commit_txn(repo, txn)
493 501
494 502 if timestamp:
495 503 apr_time = apr_time_t(timestamp)
496 504 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
497 505 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
498 506
499 507 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
500 508 return commit_id
501 509
502 510 @reraise_safe_exceptions
503 511 def diff(self, wire, rev1, rev2, path1=None, path2=None,
504 512 ignore_whitespace=False, context=3):
505 513
506 514 wire.update(cache=False)
507 515 repo = self._factory.repo(wire)
508 516 diff_creator = SvnDiffer(
509 517 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
510 518 try:
511 519 return BytesEnvelope(diff_creator.generate_diff())
512 520 except svn.core.SubversionException as e:
513 521 log.exception(
514 522 "Error during diff operation operation. "
515 523 "Path might not exist %s, %s", path1, path2)
516 524 return BytesEnvelope(b'')
517 525
518 526 @reraise_safe_exceptions
519 527 def is_large_file(self, wire, path):
520 528 return False
521 529
522 530 @reraise_safe_exceptions
523 531 def is_binary(self, wire, rev, path):
524 532 cache_on, context_uid, repo_id = self._cache_on(wire)
525 533 region = self._region(wire)
526 534
527 535 @region.conditional_cache_on_arguments(condition=cache_on)
528 536 def _is_binary(_repo_id, _rev, _path):
529 537 raw_bytes = self.get_file_content(wire, rev, path)
530 538 if not raw_bytes:
531 539 return False
532 540 return b'\0' in raw_bytes
533 541
534 542 return _is_binary(repo_id, rev, path)
535 543
536 544 @reraise_safe_exceptions
537 545 def md5_hash(self, wire, rev, path):
538 546 cache_on, context_uid, repo_id = self._cache_on(wire)
539 547 region = self._region(wire)
540 548
541 549 @region.conditional_cache_on_arguments(condition=cache_on)
542 550 def _md5_hash(_repo_id, _rev, _path):
543 551 return ''
544 552
545 553 return _md5_hash(repo_id, rev, path)
546 554
547 555 @reraise_safe_exceptions
548 556 def run_svn_command(self, wire, cmd, **opts):
549 557 path = wire.get('path', None)
550 558
551 559 if path and os.path.isdir(path):
552 560 opts['cwd'] = path
553 561
554 562 safe_call = opts.pop('_safe', False)
555 563
556 564 svnenv = os.environ.copy()
557 565 svnenv.update(opts.pop('extra_env', {}))
558 566
559 567 _opts = {'env': svnenv, 'shell': False}
560 568
561 569 try:
562 570 _opts.update(opts)
563 571 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
564 572
565 573 return b''.join(proc), b''.join(proc.stderr)
566 574 except OSError as err:
567 575 if safe_call:
568 576 return '', safe_str(err).strip()
569 577 else:
570 578 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
571 579 tb_err = ("Couldn't run svn command (%s).\n"
572 580 "Original error was:%s\n"
573 581 "Call options:%s\n"
574 582 % (cmd, err, _opts))
575 583 log.exception(tb_err)
576 584 raise exceptions.VcsException()(tb_err)
577 585
578 586 @reraise_safe_exceptions
579 587 def install_hooks(self, wire, force=False):
580 588 from vcsserver.hook_utils import install_svn_hooks
581 589 repo_path = wire['path']
582 590 binary_dir = settings.BINARY_DIR
583 591 executable = None
584 592 if binary_dir:
585 593 executable = os.path.join(binary_dir, 'python3')
586 594 return install_svn_hooks(repo_path, force_create=force)
587 595
588 596 @reraise_safe_exceptions
589 597 def get_hooks_info(self, wire):
590 598 from vcsserver.hook_utils import (
591 599 get_svn_pre_hook_version, get_svn_post_hook_version)
592 600 repo_path = wire['path']
593 601 return {
594 602 'pre_version': get_svn_pre_hook_version(repo_path),
595 603 'post_version': get_svn_post_hook_version(repo_path),
596 604 }
597 605
598 606 @reraise_safe_exceptions
599 607 def set_head_ref(self, wire, head_name):
600 608 pass
601 609
602 610 @reraise_safe_exceptions
603 611 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
604 612 archive_dir_name, commit_id, cache_config):
605 613
606 614 def walk_tree(root, root_dir, _commit_id):
607 615 """
608 616 Special recursive svn repo walker
609 617 """
610 618 root_dir = safe_bytes(root_dir)
611 619
612 620 filemode_default = 0o100644
613 621 filemode_executable = 0o100755
614 622
615 623 file_iter = svn.fs.dir_entries(root, root_dir)
616 624 for f_name in file_iter:
617 625 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
618 626
619 627 if f_type == 'dir':
620 628 # return only DIR, and then all entries in that dir
621 629 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
622 630 new_root = os.path.join(root_dir, f_name)
623 631 yield from walk_tree(root, new_root, _commit_id)
624 632 else:
625 633
626 634 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
627 635 prop_list = svn.fs.node_proplist(root, f_path)
628 636
629 637 f_mode = filemode_default
630 638 if prop_list.get('svn:executable'):
631 639 f_mode = filemode_executable
632 640
633 641 f_is_link = False
634 642 if prop_list.get('svn:special'):
635 643 f_is_link = True
636 644
637 645 data = {
638 646 'is_link': f_is_link,
639 647 'mode': f_mode,
640 648 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
641 649 }
642 650
643 651 yield f_path, data, f_type
644 652
645 653 def file_walker(_commit_id, path):
646 654 repo = self._factory.repo(wire)
647 655 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
648 656
649 657 def no_content():
650 658 raise NoContentException()
651 659
652 660 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
653 661 file_path = f_name
654 662
655 663 if f_type == 'dir':
656 664 mode = f_data['mode']
657 665 yield ArchiveNode(file_path, mode, False, no_content)
658 666 else:
659 667 mode = f_data['mode']
660 668 is_link = f_data['is_link']
661 669 data_stream = f_data['content_stream']
662 670 yield ArchiveNode(file_path, mode, is_link, data_stream)
663 671
664 672 return store_archive_in_cache(
665 673 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
666 674
667 675
668 676 class SvnDiffer(object):
669 677 """
670 678 Utility to create diffs based on difflib and the Subversion api
671 679 """
672 680
673 681 binary_content = False
674 682
675 683 def __init__(
676 684 self, repo, src_rev, src_path, tgt_rev, tgt_path,
677 685 ignore_whitespace, context):
678 686 self.repo = repo
679 687 self.ignore_whitespace = ignore_whitespace
680 688 self.context = context
681 689
682 690 fsobj = svn.repos.fs(repo)
683 691
684 692 self.tgt_rev = tgt_rev
685 693 self.tgt_path = tgt_path or ''
686 694 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
687 695 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
688 696
689 697 self.src_rev = src_rev
690 698 self.src_path = src_path or self.tgt_path
691 699 self.src_root = svn.fs.revision_root(fsobj, src_rev)
692 700 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
693 701
694 702 self._validate()
695 703
696 704 def _validate(self):
697 705 if (self.tgt_kind != svn.core.svn_node_none and
698 706 self.src_kind != svn.core.svn_node_none and
699 707 self.src_kind != self.tgt_kind):
700 708 # TODO: johbo: proper error handling
701 709 raise Exception(
702 710 "Source and target are not compatible for diff generation. "
703 711 "Source type: %s, target type: %s" %
704 712 (self.src_kind, self.tgt_kind))
705 713
706 714 def generate_diff(self) -> bytes:
707 715 buf = io.BytesIO()
708 716 if self.tgt_kind == svn.core.svn_node_dir:
709 717 self._generate_dir_diff(buf)
710 718 else:
711 719 self._generate_file_diff(buf)
712 720 return buf.getvalue()
713 721
714 722 def _generate_dir_diff(self, buf: io.BytesIO):
715 723 editor = DiffChangeEditor()
716 724 editor_ptr, editor_baton = svn.delta.make_editor(editor)
717 725 svn.repos.dir_delta2(
718 726 self.src_root,
719 727 self.src_path,
720 728 '', # src_entry
721 729 self.tgt_root,
722 730 self.tgt_path,
723 731 editor_ptr, editor_baton,
724 732 authorization_callback_allow_all,
725 733 False, # text_deltas
726 734 svn.core.svn_depth_infinity, # depth
727 735 False, # entry_props
728 736 False, # ignore_ancestry
729 737 )
730 738
731 739 for path, __, change in sorted(editor.changes):
732 740 self._generate_node_diff(
733 741 buf, change, path, self.tgt_path, path, self.src_path)
734 742
735 743 def _generate_file_diff(self, buf: io.BytesIO):
736 744 change = None
737 745 if self.src_kind == svn.core.svn_node_none:
738 746 change = "add"
739 747 elif self.tgt_kind == svn.core.svn_node_none:
740 748 change = "delete"
741 749 tgt_base, tgt_path = vcspath.split(self.tgt_path)
742 750 src_base, src_path = vcspath.split(self.src_path)
743 751 self._generate_node_diff(
744 752 buf, change, tgt_path, tgt_base, src_path, src_base)
745 753
746 754 def _generate_node_diff(
747 755 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
748 756
749 757 tgt_path_bytes = safe_bytes(tgt_path)
750 758 tgt_path = safe_str(tgt_path)
751 759
752 760 src_path_bytes = safe_bytes(src_path)
753 761 src_path = safe_str(src_path)
754 762
755 763 if self.src_rev == self.tgt_rev and tgt_base == src_base:
756 764 # makes consistent behaviour with git/hg to return empty diff if
757 765 # we compare same revisions
758 766 return
759 767
760 768 tgt_full_path = vcspath.join(tgt_base, tgt_path)
761 769 src_full_path = vcspath.join(src_base, src_path)
762 770
763 771 self.binary_content = False
764 772 mime_type = self._get_mime_type(tgt_full_path)
765 773
766 774 if mime_type and not mime_type.startswith(b'text'):
767 775 self.binary_content = True
768 776 buf.write(b"=" * 67 + b'\n')
769 777 buf.write(b"Cannot display: file marked as a binary type.\n")
770 778 buf.write(b"svn:mime-type = %s\n" % mime_type)
771 779 buf.write(b"Index: %b\n" % tgt_path_bytes)
772 780 buf.write(b"=" * 67 + b'\n')
773 781 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
774 782
775 783 if change == 'add':
776 784 # TODO: johbo: SVN is missing a zero here compared to git
777 785 buf.write(b"new file mode 10644\n")
778 786
779 787 # TODO(marcink): intro to binary detection of svn patches
780 788 # if self.binary_content:
781 789 # buf.write(b'GIT binary patch\n')
782 790
783 791 buf.write(b"--- /dev/null\t(revision 0)\n")
784 792 src_lines = []
785 793 else:
786 794 if change == 'delete':
787 795 buf.write(b"deleted file mode 10644\n")
788 796
789 797 # TODO(marcink): intro to binary detection of svn patches
790 798 # if self.binary_content:
791 799 # buf.write('GIT binary patch\n')
792 800
793 801 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
794 802 src_lines = self._svn_readlines(self.src_root, src_full_path)
795 803
796 804 if change == 'delete':
797 805 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
798 806 tgt_lines = []
799 807 else:
800 808 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
801 809 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
802 810
803 811 # we made our diff header, time to generate the diff content into our buffer
804 812
805 813 if not self.binary_content:
806 814 udiff = svn_diff.unified_diff(
807 815 src_lines, tgt_lines, context=self.context,
808 816 ignore_blank_lines=self.ignore_whitespace,
809 817 ignore_case=False,
810 818 ignore_space_changes=self.ignore_whitespace)
811 819
812 820 buf.writelines(udiff)
813 821
814 822 def _get_mime_type(self, path) -> bytes:
815 823 try:
816 824 mime_type = svn.fs.node_prop(
817 825 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
818 826 except svn.core.SubversionException:
819 827 mime_type = svn.fs.node_prop(
820 828 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
821 829 return mime_type
822 830
823 831 def _svn_readlines(self, fs_root, node_path):
824 832 if self.binary_content:
825 833 return []
826 834 node_kind = svn.fs.check_path(fs_root, node_path)
827 835 if node_kind not in (
828 836 svn.core.svn_node_file, svn.core.svn_node_symlink):
829 837 return []
830 838 content = svn.core.Stream(
831 839 svn.fs.file_contents(fs_root, node_path)).read()
832 840
833 841 return content.splitlines(True)
834 842
835 843
836 844 class DiffChangeEditor(svn.delta.Editor):
837 845 """
838 846 Records changes between two given revisions
839 847 """
840 848
841 849 def __init__(self):
842 850 self.changes = []
843 851
844 852 def delete_entry(self, path, revision, parent_baton, pool=None):
845 853 self.changes.append((path, None, 'delete'))
846 854
847 855 def add_file(
848 856 self, path, parent_baton, copyfrom_path, copyfrom_revision,
849 857 file_pool=None):
850 858 self.changes.append((path, 'file', 'add'))
851 859
852 860 def open_file(self, path, parent_baton, base_revision, file_pool=None):
853 861 self.changes.append((path, 'file', 'change'))
854 862
855 863
856 864 def authorization_callback_allow_all(root, path, pool):
857 865 return True
858 866
859 867
860 868 class TxnNodeProcessor(object):
861 869 """
862 870 Utility to process the change of one node within a transaction root.
863 871
864 872 It encapsulates the knowledge of how to add, update or remove
865 873 a node for a given transaction root. The purpose is to support the method
866 874 `SvnRemote.commit`.
867 875 """
868 876
869 877 def __init__(self, node, txn_root):
870 878 assert_bytes(node['path'])
871 879
872 880 self.node = node
873 881 self.txn_root = txn_root
874 882
875 883 def update(self):
876 884 self._ensure_parent_dirs()
877 885 self._add_file_if_node_does_not_exist()
878 886 self._update_file_content()
879 887 self._update_file_properties()
880 888
881 889 def remove(self):
882 890 svn.fs.delete(self.txn_root, self.node['path'])
883 891 # TODO: Clean up directory if empty
884 892
885 893 def _ensure_parent_dirs(self):
886 894 curdir = vcspath.dirname(self.node['path'])
887 895 dirs_to_create = []
888 896 while not self._svn_path_exists(curdir):
889 897 dirs_to_create.append(curdir)
890 898 curdir = vcspath.dirname(curdir)
891 899
892 900 for curdir in reversed(dirs_to_create):
893 901 log.debug('Creating missing directory "%s"', curdir)
894 902 svn.fs.make_dir(self.txn_root, curdir)
895 903
896 904 def _svn_path_exists(self, path):
897 905 path_status = svn.fs.check_path(self.txn_root, path)
898 906 return path_status != svn.core.svn_node_none
899 907
900 908 def _add_file_if_node_does_not_exist(self):
901 909 kind = svn.fs.check_path(self.txn_root, self.node['path'])
902 910 if kind == svn.core.svn_node_none:
903 911 svn.fs.make_file(self.txn_root, self.node['path'])
904 912
905 913 def _update_file_content(self):
906 914 assert_bytes(self.node['content'])
907 915
908 916 handler, baton = svn.fs.apply_textdelta(
909 917 self.txn_root, self.node['path'], None, None)
910 918 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
911 919
912 920 def _update_file_properties(self):
913 921 properties = self.node.get('properties', {})
914 922 for key, value in properties.items():
915 923 svn.fs.change_node_prop(
916 924 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
917 925
918 926
919 927 def apr_time_t(timestamp):
920 928 """
921 929 Convert a Python timestamp into APR timestamp type apr_time_t
922 930 """
923 931 return int(timestamp * 1E6)
924 932
925 933
926 934 def svn_opt_revision_value_t(num):
927 935 """
928 936 Put `num` into a `svn_opt_revision_value_t` structure.
929 937 """
930 938 value = svn.core.svn_opt_revision_value_t()
931 939 value.number = num
932 940 revision = svn.core.svn_opt_revision_t()
933 941 revision.kind = svn.core.svn_opt_revision_number
934 942 revision.value = value
935 943 return revision
@@ -1,162 +1,162 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 from vcsserver.remote import git
24 from vcsserver.remote import git_remote
25 25
26 26 SAMPLE_REFS = {
27 27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 32 }
33 33
34 34
35 35 @pytest.fixture
36 def git_remote():
36 def git_remote_fix():
37 37 """
38 38 A GitRemote instance with a mock factory.
39 39 """
40 40 factory = Mock()
41 remote = git.GitRemote(factory)
41 remote = git_remote.GitRemote(factory)
42 42 return remote
43 43
44 44
45 def test_discover_git_version(git_remote):
46 version = git_remote.discover_git_version()
45 def test_discover_git_version(git_remote_fix):
46 version = git_remote_fix.discover_git_version()
47 47 assert version
48 48
49 49
50 50 class TestGitFetch(object):
51 51 def setup_method(self):
52 52 self.mock_repo = Mock()
53 53 factory = Mock()
54 54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git.GitRemote(factory)
55 self.remote_git = git_remote.GitRemote(factory)
56 56
57 57 def test_fetches_all_when_no_commit_ids_specified(self):
58 58 def side_effect(determine_wants, *args, **kwargs):
59 59 determine_wants(SAMPLE_REFS)
60 60
61 61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 62 mock_fetch.side_effect = side_effect
63 63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 66
67 67 def test_fetches_specified_commits(self):
68 68 selected_refs = {
69 69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
70 70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 71 }
72 72
73 73 def side_effect(determine_wants, *args, **kwargs):
74 74 result = determine_wants(SAMPLE_REFS)
75 75 assert sorted(result) == sorted(selected_refs.values())
76 76 return result
77 77
78 78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 79 mock_fetch.side_effect = side_effect
80 80 self.remote_git.pull(
81 81 wire={}, url='/tmp/', apply_refs=False,
82 82 refs=list(selected_refs.keys()))
83 83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 84 assert determine_wants.call_count == 0
85 85
86 86 def test_get_remote_refs(self):
87 87 factory = Mock()
88 remote_git = git.GitRemote(factory)
89 url = 'http://example.com/test/test.git'
88 remote_git = git_remote.GitRemote(factory)
89 url = 'https://example.com/test/test.git'
90 90 sample_refs = {
91 91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 93 }
94 94
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git_remote.Repo', create=False) as mock_repo:
96 96 mock_repo().get_refs.return_value = sample_refs
97 97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 98 mock_repo().get_refs.assert_called_once_with()
99 99 assert remote_refs == sample_refs
100 100
101 101
102 102 class TestReraiseSafeExceptions(object):
103 103
104 104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 105 factory = Mock()
106 git_remote = git.GitRemote(factory)
106 git_remote_instance = git_remote.GitRemote(factory)
107 107
108 108 def fake_function():
109 109 return None
110 110
111 decorator = git.reraise_safe_exceptions(fake_function)
111 decorator = git_remote.reraise_safe_exceptions(fake_function)
112 112
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote_instance, predicate=inspect.ismethod)
114 114 for method_name, method in methods:
115 115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 116 assert method.__func__.__code__ == decorator.__code__
117 117
118 118 @pytest.mark.parametrize('side_effect, expected_type', [
119 119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 123 (dulwich.errors.HangupException(), 'error'),
124 124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 125 ])
126 126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git.reraise_safe_exceptions
127 @git_remote.reraise_safe_exceptions
128 128 def fake_method():
129 129 raise side_effect
130 130
131 131 with pytest.raises(Exception) as exc_info:
132 132 fake_method()
133 133 assert type(exc_info.value) == Exception
134 134 assert exc_info.value._vcs_kind == expected_type
135 135
136 136
137 137 class TestDulwichRepoWrapper(object):
138 138 def test_calls_close_on_delete(self):
139 139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 with patch.object(git_remote.Repo, 'close') as close_mock:
141 141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git_remote.Repo('/tmp/abcde')
143 143 assert repo is not None
144 144 repo.__del__()
145 145 # can't use del repo as in python3 this isn't always calling .__del__()
146 146
147 147 close_mock.assert_called_once_with()
148 148
149 149
150 150 class TestGitFactory(object):
151 151 def test_create_repo_returns_dulwich_wrapper(self):
152 152
153 153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 154 mock.side_effect = {'repo_objects': ''}
155 factory = git.GitFactory()
155 factory = git_remote.GitFactory()
156 156 wire = {
157 157 'path': '/tmp/abcde'
158 158 }
159 159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 160 with isdir_patcher:
161 161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git_remote.Repo)
@@ -1,108 +1,112 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, patch
25 25
26 26 from vcsserver import exceptions, hgcompat
27 from vcsserver.remote import hg
27 from vcsserver.remote import hg_remote
28 28
29 29
30 30 class TestDiff(object):
31 31 def test_raising_safe_exception_when_lookup_failed(self):
32 32
33 33 factory = Mock()
34 hg_remote = hg.HgRemote(factory)
34 hg_remote_instance = hg_remote.HgRemote(factory)
35 35 with patch('mercurial.patch.diff') as diff_mock:
36 36 diff_mock.side_effect = LookupError(b'deadbeef', b'index', b'message')
37 37
38 38 with pytest.raises(Exception) as exc_info:
39 hg_remote.diff(
39 hg_remote_instance.diff(
40 40 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
41 41 file_filter=None, opt_git=True, opt_ignorews=True,
42 42 context=3)
43 43 assert type(exc_info.value) == Exception
44 44 assert exc_info.value._vcs_kind == 'lookup'
45 45
46 46
47 47 class TestReraiseSafeExceptions(object):
48 original_traceback = None
49
48 50 def test_method_decorated_with_reraise_safe_exceptions(self):
49 51 factory = Mock()
50 hg_remote = hg.HgRemote(factory)
51 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
52 decorator = hg.reraise_safe_exceptions(None)
52 hg_remote_instance = hg_remote.HgRemote(factory)
53 methods = inspect.getmembers(hg_remote_instance, predicate=inspect.ismethod)
54 decorator = hg_remote.reraise_safe_exceptions(None)
53 55 for method_name, method in methods:
54 56 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
55 57 assert method.__func__.__code__ == decorator.__code__
56 58
57 59 @pytest.mark.parametrize('side_effect, expected_type', [
58 (hgcompat.Abort('failed-abort'), 'abort'),
59 (hgcompat.InterventionRequired('intervention-required'), 'abort'),
60 (hgcompat.Abort(b'failed-abort'), 'abort'),
61 (hgcompat.InterventionRequired(b'intervention-required'), 'abort'),
60 62 (hgcompat.RepoLookupError(), 'lookup'),
61 63 (hgcompat.LookupError(b'deadbeef', b'index', b'message'), 'lookup'),
62 64 (hgcompat.RepoError(), 'error'),
63 65 (hgcompat.RequirementError(), 'requirement'),
64 66 ])
65 67 def test_safe_exceptions_reraised(self, side_effect, expected_type):
66 @hg.reraise_safe_exceptions
68 @hg_remote.reraise_safe_exceptions
67 69 def fake_method():
68 70 raise side_effect
69 71
70 72 with pytest.raises(Exception) as exc_info:
71 73 fake_method()
72 74 assert type(exc_info.value) == Exception
73 75 assert exc_info.value._vcs_kind == expected_type
74 76
75 77 def test_keeps_original_traceback(self):
76 @hg.reraise_safe_exceptions
78
79 @hg_remote.reraise_safe_exceptions
77 80 def fake_method():
78 81 try:
79 raise hgcompat.Abort('test-abort')
82 raise hgcompat.Abort(b'test-abort')
80 83 except:
81 84 self.original_traceback = traceback.format_tb(sys.exc_info()[2])
82 85 raise
83 86
87 new_traceback = None
84 88 try:
85 89 fake_method()
86 90 except Exception:
87 91 new_traceback = traceback.format_tb(sys.exc_info()[2])
88 92
89 93 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 94 assert new_traceback_tail == self.original_traceback
91 95
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
96 def test_maps_unknown_exceptions_to_unhandled(self):
97 @hg_remote.reraise_safe_exceptions
94 98 def stub_method():
95 99 raise ValueError('stub')
96 100
97 101 with pytest.raises(Exception) as exc_info:
98 102 stub_method()
99 103 assert exc_info.value._vcs_kind == 'unhandled'
100 104
101 105 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
106 @hg_remote.reraise_safe_exceptions
103 107 def stub_method():
104 108 raise exceptions.LookupException()('stub')
105 109
106 110 with pytest.raises(Exception) as exc_info:
107 111 stub_method()
108 112 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,103 +1,103 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23 from vcsserver.str_utils import ascii_bytes
24 24
25 25
26 26 class MockPopen(object):
27 27 def __init__(self, stderr):
28 28 self.stdout = io.BytesIO(b'')
29 29 self.stderr = io.BytesIO(stderr)
30 30 self.returncode = 1
31 31
32 32 def wait(self):
33 33 pass
34 34
35 35
36 36 INVALID_CERTIFICATE_STDERR = '\n'.join([
37 37 'svnrdump: E230001: Unable to connect to a repository at URL url',
38 38 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
39 39 ])
40 40
41 41
42 42 @pytest.mark.parametrize('stderr,expected_reason', [
43 43 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
44 44 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
45 45 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
46 46 @pytest.mark.xfail(sys.platform == "cygwin",
47 47 reason="SVN not packaged for Cygwin")
48 48 def test_import_remote_repository_certificate_error(stderr, expected_reason):
49 from vcsserver.remote import svn
49 from vcsserver.remote import svn_remote
50 50 factory = mock.Mock()
51 51 factory.repo = mock.Mock(return_value=mock.Mock())
52 52
53 remote = svn.SvnRemote(factory)
53 remote = svn_remote.SvnRemote(factory)
54 54 remote.is_path_valid_repository = lambda wire, path: True
55 55
56 56 with mock.patch('subprocess.Popen',
57 57 return_value=MockPopen(ascii_bytes(stderr))):
58 58 with pytest.raises(Exception) as excinfo:
59 59 remote.import_remote_repository({'path': 'path'}, 'url')
60 60
61 61 expected_error_args = 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason)
62 62
63 63 assert excinfo.value.args[0] == expected_error_args
64 64
65 65
66 66 def test_svn_libraries_can_be_imported():
67 import svn.client
67 import svn.client # noqa
68 68 assert svn.client is not None
69 69
70 70
71 71 @pytest.mark.parametrize('example_url, parts', [
72 72 ('http://server.com', ('', '', 'http://server.com')),
73 73 ('http://user@server.com', ('user', '', 'http://user@server.com')),
74 74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 75 ('<script>', ('', '', '<script>')),
76 76 ('http://', ('', '', 'http://')),
77 77 ])
78 78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver.remote import svn
79 from vcsserver.remote import svn_remote
80 80
81 81 factory = mock.Mock()
82 82 factory.repo = mock.Mock(return_value=mock.Mock())
83 83
84 remote = svn.SvnRemote(factory)
84 remote = svn_remote.SvnRemote(factory)
85 85 remote.is_path_valid_repository = lambda wire, path: True
86 86
87 87 assert remote.get_url_and_credentials(example_url) == parts
88 88
89 89
90 90 @pytest.mark.parametrize('call_url', [
91 91 b'https://svn.code.sf.net/p/svnbook/source/trunk/',
92 92 b'https://marcink@svn.code.sf.net/p/svnbook/source/trunk/',
93 93 b'https://marcink:qweqwe@svn.code.sf.net/p/svnbook/source/trunk/',
94 94 ])
95 95 def test_check_url(call_url):
96 from vcsserver.remote import svn
96 from vcsserver.remote import svn_remote
97 97 factory = mock.Mock()
98 98 factory.repo = mock.Mock(return_value=mock.Mock())
99 99
100 remote = svn.SvnRemote(factory)
100 remote = svn_remote.SvnRemote(factory)
101 101 remote.is_path_valid_repository = lambda wire, path: True
102 102 assert remote.check_url(call_url, {'dummy': 'config'})
103 103
@@ -1,123 +1,123 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import base64
18 import logging
18 19 import time
19 import logging
20 20
21 21 import msgpack
22 22
23 23 import vcsserver
24 from vcsserver.str_utils import safe_str, ascii_str
24 from vcsserver.str_utils import safe_str
25 25
26 26 log = logging.getLogger(__name__)
27 27
28 28
29 29 def get_access_path(environ):
30 30 path = environ.get('PATH_INFO')
31 31 return path
32 32
33 33
34 34 def get_user_agent(environ):
35 35 return environ.get('HTTP_USER_AGENT')
36 36
37 37
38 38 def get_call_context(request) -> dict:
39 39 cc = {}
40 40 registry = request.registry
41 41 if hasattr(registry, 'vcs_call_context'):
42 42 cc.update({
43 43 'X-RC-Method': registry.vcs_call_context.get('method'),
44 44 'X-RC-Repo-Name': registry.vcs_call_context.get('repo_name')
45 45 })
46 46
47 47 return cc
48 48
49 49
50 50 def get_headers_call_context(environ, strict=True):
51 51 if 'HTTP_X_RC_VCS_STREAM_CALL_CONTEXT' in environ:
52 52 packed_cc = base64.b64decode(environ['HTTP_X_RC_VCS_STREAM_CALL_CONTEXT'])
53 53 return msgpack.unpackb(packed_cc)
54 54 elif strict:
55 55 raise ValueError('Expected header HTTP_X_RC_VCS_STREAM_CALL_CONTEXT not found')
56 56
57 57
58 58 class RequestWrapperTween(object):
59 59 def __init__(self, handler, registry):
60 60 self.handler = handler
61 61 self.registry = registry
62 62
63 63 # one-time configuration code goes here
64 64
65 65 def __call__(self, request):
66 66 start = time.time()
67 67 log.debug('Starting request time measurement')
68 68 response = None
69 69
70 70 try:
71 71 response = self.handler(request)
72 72 finally:
73 73 ua = get_user_agent(request.environ)
74 74 call_context = get_call_context(request)
75 75 vcs_method = call_context.get('X-RC-Method', '_NO_VCS_METHOD')
76 76 repo_name = call_context.get('X-RC-Repo-Name', '')
77 77
78 78 count = request.request_count()
79 79 _ver_ = vcsserver.__version__
80 80 _path = safe_str(get_access_path(request.environ))
81 81
82 82 ip = '127.0.0.1'
83 83 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
84 84 resp_code = getattr(response, 'status_code', 'UNDEFINED')
85 85
86 86 _view_path = f"{repo_name}@{_path}/{vcs_method}"
87 87
88 88 total = time.time() - start
89 89
90 90 log.info(
91 91 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
92 92 count, ip, request.environ.get('REQUEST_METHOD'),
93 93 _view_path, total, ua, _ver_,
94 94 extra={"time": total, "ver": _ver_, "code": resp_code,
95 95 "path": _path, "view_name": match_route, "user_agent": ua,
96 96 "vcs_method": vcs_method, "repo_name": repo_name}
97 97 )
98 98
99 99 statsd = request.registry.statsd
100 100 if statsd:
101 101 match_route = request.matched_route.name if request.matched_route else _path
102 102 elapsed_time_ms = round(1000.0 * total) # use ms only
103 103 statsd.timing(
104 104 "vcsserver_req_timing.histogram", elapsed_time_ms,
105 105 tags=[
106 106 f"view_name:{match_route}",
107 107 f"code:{resp_code}"
108 108 ],
109 109 use_decimals=False
110 110 )
111 111 statsd.incr(
112 112 "vcsserver_req_total", tags=[
113 113 f"view_name:{match_route}",
114 114 f"code:{resp_code}"
115 115 ])
116 116
117 117 return response
118 118
119 119
120 120 def includeme(config):
121 121 config.add_tween(
122 122 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
123 123 )
General Comments 0
You need to be logged in to leave comments. Login now