##// END OF EJS Templates
chore(release): merged default into stable branch
super-admin -
r1301:433d8917 merge v5.3.0 stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 1 [bumpversion]
2 current_version = 5.2.1
2 current_version = 5.3.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
@@ -1,102 +1,102 b''
1 1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
2 2
3 3 async-timeout==4.0.3
4 4 atomicwrites==1.4.1
5 5 celery==5.3.6
6 6 billiard==4.2.0
7 7 click==8.1.3
8 8 click-didyoumean==0.3.0
9 9 click==8.1.3
10 10 click-plugins==1.1.1
11 11 click==8.1.3
12 12 click-repl==0.2.0
13 13 click==8.1.3
14 14 prompt_toolkit==3.0.47
15 15 wcwidth==0.2.13
16 16 six==1.16.0
17 17 kombu==5.3.5
18 18 amqp==5.2.0
19 19 vine==5.1.0
20 20 vine==5.1.0
21 21 python-dateutil==2.8.2
22 22 six==1.16.0
23 23 tzdata==2024.1
24 24 vine==5.1.0
25 25 contextlib2==21.6.0
26 26 dogpile.cache==1.3.3
27 27 decorator==5.1.1
28 28 stevedore==5.1.0
29 29 pbr==5.11.1
30 30 dulwich==0.21.6
31 31 urllib3==1.26.14
32 32 fsspec==2024.9.0
33 33 gunicorn==23.0.0
34 34 packaging==24.1
35 35 hg-evolve==11.1.3
36 36 importlib-metadata==6.0.0
37 37 zipp==3.15.0
38 38 mercurial==6.7.4
39 39 more-itertools==9.1.0
40 40 msgpack==1.0.8
41 41 orjson==3.10.7
42 42 psutil==5.9.8
43 43 py==1.11.0
44 44 pygit2==1.13.3
45 45 cffi==1.16.0
46 46 pycparser==2.21
47 47 pygments==2.18.0
48 48 pyparsing==3.1.1
49 49 pyramid==2.0.2
50 50 hupper==1.12
51 51 plaster==1.1.2
52 52 plaster-pastedeploy==1.0.1
53 53 pastedeploy==3.1.0
54 54 plaster==1.1.2
55 55 translationstring==1.4
56 56 venusian==3.0.0
57 57 webob==1.8.7
58 58 zope.deprecation==5.0.0
59 zope.interface==6.4.post2
59 zope.interface==7.0.3
60 60 redis==5.1.0
61 61 async-timeout==4.0.3
62 62 repoze.lru==0.7
63 63 s3fs==2024.9.0
64 64 aiobotocore==2.13.0
65 65 aiohttp==3.9.5
66 66 aiosignal==1.3.1
67 67 frozenlist==1.4.1
68 68 attrs==22.2.0
69 69 frozenlist==1.4.1
70 70 multidict==6.0.5
71 71 yarl==1.9.4
72 72 idna==3.4
73 73 multidict==6.0.5
74 74 aioitertools==0.11.0
75 75 botocore==1.34.106
76 76 jmespath==1.0.1
77 77 python-dateutil==2.8.2
78 78 six==1.16.0
79 79 urllib3==1.26.14
80 80 wrapt==1.16.0
81 81 aiohttp==3.9.5
82 82 aiosignal==1.3.1
83 83 frozenlist==1.4.1
84 84 attrs==22.2.0
85 85 frozenlist==1.4.1
86 86 multidict==6.0.5
87 87 yarl==1.9.4
88 88 idna==3.4
89 89 multidict==6.0.5
90 90 fsspec==2024.9.0
91 91 scandir==1.10.0
92 92 setproctitle==1.3.3
93 93 subvertpy==0.11.0
94 94 waitress==3.0.0
95 95 wcwidth==0.2.13
96 96
97 97
98 98 ## test related requirements
99 99 #-r requirements_test.txt
100 100
101 101 ## uncomment to add the debug libraries
102 102 #-r requirements_debug.txt
@@ -1,1 +1,1 b''
1 5.2.1 No newline at end of file
1 5.3.0
@@ -1,302 +1,314 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import hashlib
18 18 import re
19 19 import logging
20 20
21 21 from gunicorn.http.errors import NoMoreData
22 22 from pyramid.config import Configurator
23 23 from pyramid.response import Response, FileIter
24 24 from pyramid.httpexceptions import (
25 25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 26 HTTPUnprocessableEntity)
27 27
28 28 from vcsserver.lib.ext_json import json
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.lib.str_utils import safe_int
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
37 37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 38
39 39
40 40 def write_response_error(http_exception, text=None):
41 41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 45 _exception.body = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
49 49
50 50
51 51 class AuthHeaderRequired:
52 52 """
53 53 Decorator to check if request has proper auth-header
54 54 """
55 55
56 56 def __call__(self, func):
57 57 return get_cython_compat_decorator(self.__wrapper, func)
58 58
59 59 def __wrapper(self, func, *fargs, **fkwargs):
60 60 request = fargs[1]
61 61 auth = request.authorization
62 62 if not auth:
63 log.debug('No auth header found, returning 403')
63 64 return write_response_error(HTTPForbidden)
64 65 return func(*fargs[1:], **fkwargs)
65 66
66 67
67 68 # views
68 69
69 70 def lfs_objects(request):
70 71 # indicate not supported, V1 API
71 72 log.warning('LFS: v1 api not supported, reporting it back to client')
72 73 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 74
74 75
75 76 @AuthHeaderRequired()
76 77 def lfs_objects_batch(request):
77 78 """
78 79 The client sends the following information to the Batch endpoint to transfer some objects:
79 80
80 81 operation - Should be download or upload.
81 82 transfers - An optional Array of String identifiers for transfer
82 83 adapters that the client has configured. If omitted, the basic
83 84 transfer adapter MUST be assumed by the server.
84 85 objects - An Array of objects to download.
85 86 oid - String OID of the LFS object.
86 87 size - Integer byte size of the LFS object. Must be at least zero.
87 88 """
88 89 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 90 auth = request.authorization
90 91 repo = request.matchdict.get('repo')
91 92 data = request.json
92 93 operation = data.get('operation')
93 94 http_scheme = request.registry.git_lfs_http_scheme
94 95
95 96 if operation not in ('download', 'upload'):
96 97 log.debug('LFS: unsupported operation:%s', operation)
97 98 return write_response_error(
98 99 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
99 100
100 101 if 'objects' not in data:
101 102 log.debug('LFS: missing objects data')
102 103 return write_response_error(
103 104 HTTPBadRequest, 'missing objects data')
104 105
105 106 log.debug('LFS: handling operation of type: %s', operation)
106 107
107 108 objects = []
108 109 for o in data['objects']:
109 110 try:
110 111 oid = o['oid']
111 112 obj_size = o['size']
112 113 except KeyError:
113 114 log.exception('LFS, failed to extract data')
114 115 return write_response_error(
115 116 HTTPBadRequest, 'unsupported data in objects')
116 117
117 118 obj_data = {'oid': oid}
118 119 if http_scheme == 'http':
119 120 # Note(marcink): when using http, we might have a custom port
120 121 # so we skip setting it to http, url dispatch then wont generate a port in URL
121 122 # for development we need this
122 123 http_scheme = None
123 124
124 125 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
125 126 _scheme=http_scheme)
126 127 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
127 128 _scheme=http_scheme)
128 129 store = LFSOidStore(
129 130 oid, repo, store_location=request.registry.git_lfs_store_path)
130 131 handler = OidHandler(
131 132 store, repo, auth, oid, obj_size, obj_data,
132 133 obj_href, obj_verify_href)
133 134
134 135 # this verifies also OIDs
135 136 actions, errors = handler.exec_operation(operation)
136 137 if errors:
137 138 log.warning('LFS: got following errors: %s', errors)
138 139 obj_data['errors'] = errors
139 140
140 141 if actions:
141 142 obj_data['actions'] = actions
142 143
143 144 obj_data['size'] = obj_size
144 145 obj_data['authenticated'] = True
145 146 objects.append(obj_data)
146 147
147 148 result = {'objects': objects, 'transfer': 'basic'}
148 149 log.debug('LFS Response %s', safe_result(result))
149 150
150 151 return result
151 152
152 153
153 154 def lfs_objects_oid_upload(request):
154 155 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
155 156 repo = request.matchdict.get('repo')
156 157 oid = request.matchdict.get('oid')
157 158 store = LFSOidStore(
158 159 oid, repo, store_location=request.registry.git_lfs_store_path)
159 160 engine = store.get_engine(mode='wb')
160 161 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
161 162
163 # validate if OID is not by any chance already in the store
164 if store.has_oid():
165 log.debug('LFS: oid %s exists in store', oid)
166 return {'upload': 'ok', 'state': 'in-store'}
167
162 168 body = request.environ['wsgi.input']
163 169
170 digest = hashlib.sha256()
164 171 with engine as f:
165 172 blksize = 64 * 1024 # 64kb
166 173 while True:
167 174 # read in chunks as stream comes in from Gunicorn
168 175 # this is a specific Gunicorn support function.
169 176 # might work differently on waitress
170 177 try:
171 178 chunk = body.read(blksize)
172 179 except NoMoreData:
173 180 chunk = None
174 181
175 182 if not chunk:
176 183 break
184 f.write(chunk)
185 digest.update(chunk)
177 186
178 f.write(chunk)
187 hex_digest = digest.hexdigest()
188 digest_check = hex_digest == oid
189 if not digest_check:
190 engine.cleanup() # trigger cleanup so we don't save mismatch OID into the store
191 return write_response_error(
192 HTTPBadRequest, f'oid {oid} does not match expected sha {hex_digest}')
179 193
180 return {'upload': 'ok'}
194 return {'upload': 'ok', 'state': 'written'}
181 195
182 196
183 197 def lfs_objects_oid_download(request):
184 198 repo = request.matchdict.get('repo')
185 199 oid = request.matchdict.get('oid')
186 200
187 201 store = LFSOidStore(
188 202 oid, repo, store_location=request.registry.git_lfs_store_path)
189 203 if not store.has_oid():
190 204 log.debug('LFS: oid %s does not exists in store', oid)
191 205 return write_response_error(
192 206 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
193 207
194 208 # TODO(marcink): support range header ?
195 209 # Range: bytes=0-, `bytes=(\d+)\-.*`
196 210
197 211 f = open(store.oid_path, 'rb')
198 212 response = Response(
199 213 content_type='application/octet-stream', app_iter=FileIter(f))
200 214 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
201 215 return response
202 216
203 217
204 218 def lfs_objects_verify(request):
205 219 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
206 220 repo = request.matchdict.get('repo')
207 221
208 222 data = request.json
209 223 oid = data.get('oid')
210 224 size = safe_int(data.get('size'))
211 225
212 226 if not (oid and size):
213 227 return write_response_error(
214 228 HTTPBadRequest, 'missing oid and size in request data')
215 229
216 230 store = LFSOidStore(
217 231 oid, repo, store_location=request.registry.git_lfs_store_path)
218 232 if not store.has_oid():
219 233 log.debug('LFS: oid %s does not exists in store', oid)
220 234 return write_response_error(
221 235 HTTPNotFound, f'oid `{oid}` does not exists in store')
222 236
223 237 store_size = store.size_oid()
224 238 if store_size != size:
225 msg = 'requested file size mismatch store size:{} requested:{}'.format(
226 store_size, size)
227 return write_response_error(
228 HTTPUnprocessableEntity, msg)
239 msg = f'requested file size mismatch store size:{store_size} requested:{size}'
240 return write_response_error(HTTPUnprocessableEntity, msg)
229 241
230 return {'message': {'size': 'ok', 'in_store': 'ok'}}
242 return {'message': {'size': store_size, 'oid': oid}}
231 243
232 244
233 245 def lfs_objects_lock(request):
234 246 return write_response_error(
235 247 HTTPNotImplemented, 'GIT LFS locking api not supported')
236 248
237 249
238 250 def not_found(request):
239 251 return write_response_error(
240 252 HTTPNotFound, 'request path not found')
241 253
242 254
243 255 def lfs_disabled(request):
244 256 return write_response_error(
245 257 HTTPNotImplemented, 'GIT LFS disabled for this repo')
246 258
247 259
248 260 def git_lfs_app(config):
249 261
250 262 # v1 API deprecation endpoint
251 263 config.add_route('lfs_objects',
252 264 '/{repo:.*?[^/]}/info/lfs/objects')
253 265 config.add_view(lfs_objects, route_name='lfs_objects',
254 266 request_method='POST', renderer='json')
255 267
256 268 # locking API
257 269 config.add_route('lfs_objects_lock',
258 270 '/{repo:.*?[^/]}/info/lfs/locks')
259 271 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
260 272 request_method=('POST', 'GET'), renderer='json')
261 273
262 274 config.add_route('lfs_objects_lock_verify',
263 275 '/{repo:.*?[^/]}/info/lfs/locks/verify')
264 276 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
265 277 request_method=('POST', 'GET'), renderer='json')
266 278
267 279 # batch API
268 280 config.add_route('lfs_objects_batch',
269 281 '/{repo:.*?[^/]}/info/lfs/objects/batch')
270 282 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
271 283 request_method='POST', renderer='json')
272 284
273 285 # oid upload/download API
274 286 config.add_route('lfs_objects_oid',
275 287 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
276 288 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
277 289 request_method='PUT', renderer='json')
278 290 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
279 291 request_method='GET', renderer='json')
280 292
281 293 # verification API
282 294 config.add_route('lfs_objects_verify',
283 295 '/{repo:.*?[^/]}/info/lfs/verify')
284 296 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
285 297 request_method='POST', renderer='json')
286 298
287 299 # not found handler for API
288 300 config.add_notfound_view(not_found, renderer='json')
289 301
290 302
291 303 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
292 304 config = Configurator()
293 305 if git_lfs_enabled:
294 306 config.include(git_lfs_app)
295 307 config.registry.git_lfs_store_path = git_lfs_store_path
296 308 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
297 309 else:
298 310 # not found handler for API, reporting disabled LFS support
299 311 config.add_notfound_view(lfs_disabled, renderer='json')
300 312
301 313 app = config.make_wsgi_app()
302 314 return app
@@ -1,177 +1,185 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import logging
21 21 from collections import OrderedDict
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 class OidHandler:
27 27
28 28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 29 obj_verify_href=None):
30 30 self.current_store = store
31 31 self.repo_name = repo_name
32 32 self.auth = auth
33 33 self.oid = oid
34 34 self.obj_size = obj_size
35 35 self.obj_data = obj_data
36 36 self.obj_href = obj_href
37 37 self.obj_verify_href = obj_verify_href
38 38
39 39 def get_store(self, mode=None):
40 40 return self.current_store
41 41
42 42 def get_auth(self):
43 43 """returns auth header for re-use in upload/download"""
44 44 return " ".join(self.auth)
45 45
46 46 def download(self):
47 47
48 48 store = self.get_store()
49 49 response = None
50 50 has_errors = None
51 51
52 52 if not store.has_oid():
53 53 # error reply back to client that something is wrong with dl
54 54 err_msg = f'object: {store.oid} does not exist in store'
55 55 has_errors = OrderedDict(
56 56 error=OrderedDict(
57 57 code=404,
58 58 message=err_msg
59 59 )
60 60 )
61 61
62 62 download_action = OrderedDict(
63 63 href=self.obj_href,
64 64 header=OrderedDict([("Authorization", self.get_auth())])
65 65 )
66 66 if not has_errors:
67 67 response = OrderedDict(download=download_action)
68 68 return response, has_errors
69 69
70 70 def upload(self, skip_existing=True):
71 71 """
72 72 Write upload action for git-lfs server
73 73 """
74 74
75 75 store = self.get_store()
76 76 response = None
77 77 has_errors = None
78 78
79 79 # verify if we have the OID before, if we do, reply with empty
80 80 if store.has_oid():
81 81 log.debug('LFS: store already has oid %s', store.oid)
82 82
83 83 # validate size
84 84 store_size = store.size_oid()
85 85 size_match = store_size == self.obj_size
86 86 if not size_match:
87 87 log.warning(
88 88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 89 self.oid, store_size, self.obj_size)
90 90 elif skip_existing:
91 91 log.debug('LFS: skipping further action as oid is existing')
92 92 return response, has_errors
93 93
94 94 chunked = ("Transfer-Encoding", "chunked")
95 95 upload_action = OrderedDict(
96 96 href=self.obj_href,
97 97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 98 )
99 99 if not has_errors:
100 100 response = OrderedDict(upload=upload_action)
101 101 # if specified in handler, return the verification endpoint
102 102 if self.obj_verify_href:
103 103 verify_action = OrderedDict(
104 104 href=self.obj_verify_href,
105 105 header=OrderedDict([("Authorization", self.get_auth())])
106 106 )
107 107 response['verify'] = verify_action
108 108 return response, has_errors
109 109
110 110 def exec_operation(self, operation, *args, **kwargs):
111 111 handler = getattr(self, operation)
112 112 log.debug('LFS: handling request using %s handler', handler)
113 113 return handler(*args, **kwargs)
114 114
115 115
116 116 class LFSOidStore:
117 117
118 118 def __init__(self, oid, repo, store_location=None):
119 119 self.oid = oid
120 120 self.repo = repo
121 121 defined_store_path = store_location or self.get_default_store()
122 122 self.store_suffix = f"/objects/{oid[:2]}/{oid[2:4]}"
123 123 self.store_path = f"{defined_store_path.rstrip('/')}{self.store_suffix}"
124 124 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
125 125 self.oid_path = os.path.join(self.store_path, oid)
126 126 self.fd = None
127 127
128 128 def get_engine(self, mode):
129 129 """
130 130 engine = .get_engine(mode='wb')
131 131 with engine as f:
132 132 f.write('...')
133 133 """
134 134
135 135 class StoreEngine:
136 _cleanup = None
136 137 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
137 138 self.mode = mode
138 139 self.store_path = store_path
139 140 self.oid_path = oid_path
140 141 self.tmp_oid_path = tmp_oid_path
141 142
143 def cleanup(self):
144 self._cleanup = True
145
142 146 def __enter__(self):
143 147 if not os.path.isdir(self.store_path):
144 148 os.makedirs(self.store_path)
145 149
146 150 # TODO(marcink): maybe write metadata here with size/oid ?
147 151 fd = open(self.tmp_oid_path, self.mode)
148 152 self.fd = fd
149 153 return fd
150 154
151 155 def __exit__(self, exc_type, exc_value, traceback):
156 self.fd.close()
157
158 if self._cleanup is None:
152 159 # close tmp file, and rename to final destination
153 self.fd.close()
154 160 shutil.move(self.tmp_oid_path, self.oid_path)
161 else:
162 os.remove(self.tmp_oid_path)
155 163
156 164 return StoreEngine(
157 165 mode, self.store_path, self.oid_path, self.tmp_oid_path)
158 166
159 167 def get_default_store(self):
160 168 """
161 169 Default store, consistent with defaults of Mercurial large files store
162 170 which is /home/username/.cache/largefiles
163 171 """
164 172 user_home = os.path.expanduser("~")
165 173 return os.path.join(user_home, '.cache', 'lfs-store')
166 174
167 175 def has_oid(self):
168 176 return os.path.exists(os.path.join(self.store_path, self.oid))
169 177
170 178 def size_oid(self):
171 179 size = -1
172 180
173 181 if self.has_oid():
174 182 oid = os.path.join(self.store_path, self.oid)
175 183 size = os.stat(oid).st_size
176 184
177 185 return size
@@ -1,274 +1,310 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 21
22 22 from vcsserver.lib.ext_json import json
23 23 from vcsserver.lib.str_utils import safe_bytes
24 24 from vcsserver.git_lfs.app import create_app
25 25 from vcsserver.git_lfs.lib import LFSOidStore
26 26
27 27
28 28 @pytest.fixture(scope='function')
29 29 def git_lfs_app(tmpdir):
30 30 custom_app = WebObTestApp(create_app(
31 31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
32 32 git_lfs_http_scheme='http'))
33 33 custom_app._store = str(tmpdir)
34 34 return custom_app
35 35
36 36
37 37 @pytest.fixture(scope='function')
38 38 def git_lfs_https_app(tmpdir):
39 39 custom_app = WebObTestApp(create_app(
40 40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
41 41 git_lfs_http_scheme='https'))
42 42 custom_app._store = str(tmpdir)
43 43 return custom_app
44 44
45 45
46 46 @pytest.fixture()
47 47 def http_auth():
48 48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
49 49
50 50
51 51 class TestLFSApplication:
52 52
53 53 def test_app_wrong_path(self, git_lfs_app):
54 54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
55 55
56 56 def test_app_deprecated_endpoint(self, git_lfs_app):
57 57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
58 58 assert response.status_code == 501
59 59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
60 60
61 61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
62 62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
63 63 assert response.status_code == 501
64 64 assert json.loads(response.text) == {
65 65 'message': 'GIT LFS locking api not supported'}
66 66
67 67 def test_app_lock_api_not_available(self, git_lfs_app):
68 68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
69 69 assert response.status_code == 501
70 70 assert json.loads(response.text) == {
71 71 'message': 'GIT LFS locking api not supported'}
72 72
73 73 def test_app_batch_api_missing_auth(self, git_lfs_app):
74 74 git_lfs_app.post_json(
75 75 '/repo/info/lfs/objects/batch', params={}, status=403)
76 76
77 77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
78 78 response = git_lfs_app.post_json(
79 79 '/repo/info/lfs/objects/batch', params={}, status=400,
80 80 extra_environ=http_auth)
81 81 assert json.loads(response.text) == {
82 82 'message': 'unsupported operation mode: `None`'}
83 83
84 84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
85 85 response = git_lfs_app.post_json(
86 86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
87 87 status=400, extra_environ=http_auth)
88 88 assert json.loads(response.text) == {
89 89 'message': 'missing objects data'}
90 90
91 91 def test_app_batch_api_unsupported_data_in_objects(
92 92 self, git_lfs_app, http_auth):
93 93 params = {'operation': 'download',
94 94 'objects': [{}]}
95 95 response = git_lfs_app.post_json(
96 96 '/repo/info/lfs/objects/batch', params=params, status=400,
97 97 extra_environ=http_auth)
98 98 assert json.loads(response.text) == {
99 99 'message': 'unsupported data in objects'}
100 100
101 101 def test_app_batch_api_download_missing_object(
102 102 self, git_lfs_app, http_auth):
103 params = {'operation': 'download',
104 'objects': [{'oid': '123', 'size': '1024'}]}
103 params = {
104 'operation': 'download',
105 'objects': [{'oid': '123', 'size': '1024'}]
106 }
105 107 response = git_lfs_app.post_json(
106 108 '/repo/info/lfs/objects/batch', params=params,
107 109 extra_environ=http_auth)
108 110
109 111 expected_objects = [
110 {'authenticated': True,
111 'errors': {'error': {
112 'code': 404,
113 'message': 'object: 123 does not exist in store'}},
112 {
114 113 'oid': '123',
115 'size': '1024'}
114 'size': '1024',
115 'authenticated': True,
116 'errors': {'error': {'code': 404, 'message': 'object: 123 does not exist in store'}},
117 }
116 118 ]
119
117 120 assert json.loads(response.text) == {
118 'objects': expected_objects, 'transfer': 'basic'}
121 'objects': expected_objects,
122 'transfer': 'basic'
123 }
119 124
120 125 def test_app_batch_api_download(self, git_lfs_app, http_auth):
121 126 oid = '456'
122 127 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
123 128 if not os.path.isdir(os.path.dirname(oid_path)):
124 129 os.makedirs(os.path.dirname(oid_path))
125 130 with open(oid_path, 'wb') as f:
126 131 f.write(safe_bytes('OID_CONTENT'))
127 132
128 133 params = {'operation': 'download',
129 134 'objects': [{'oid': oid, 'size': '1024'}]}
130 135 response = git_lfs_app.post_json(
131 136 '/repo/info/lfs/objects/batch', params=params,
132 137 extra_environ=http_auth)
133 138
134 139 expected_objects = [
135 140 {'authenticated': True,
136 141 'actions': {
137 142 'download': {
138 143 'header': {'Authorization': 'Basic XXXXX'},
139 144 'href': 'http://localhost/repo/info/lfs/objects/456'},
140 145 },
141 146 'oid': '456',
142 147 'size': '1024'}
143 148 ]
144 149 assert json.loads(response.text) == {
145 'objects': expected_objects, 'transfer': 'basic'}
150 'objects': expected_objects,
151 'transfer': 'basic'
152 }
146 153
147 154 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
148 155 params = {'operation': 'upload',
149 156 'objects': [{'oid': '123', 'size': '1024'}]}
150 157 response = git_lfs_app.post_json(
151 158 '/repo/info/lfs/objects/batch', params=params,
152 159 extra_environ=http_auth)
153 160 expected_objects = [
154 {'authenticated': True,
161 {
162 'authenticated': True,
155 163 'actions': {
156 164 'upload': {
157 'header': {'Authorization': 'Basic XXXXX',
158 'Transfer-Encoding': 'chunked'},
159 'href': 'http://localhost/repo/info/lfs/objects/123'},
165 'header': {
166 'Authorization': 'Basic XXXXX',
167 'Transfer-Encoding': 'chunked'
168 },
169 'href': 'http://localhost/repo/info/lfs/objects/123'
170 },
160 171 'verify': {
161 'header': {'Authorization': 'Basic XXXXX'},
162 'href': 'http://localhost/repo/info/lfs/verify'}
172 'header': {
173 'Authorization': 'Basic XXXXX'
174 },
175 'href': 'http://localhost/repo/info/lfs/verify'
176 }
163 177 },
164 178 'oid': '123',
165 'size': '1024'}
179 'size': '1024'
180 }
166 181 ]
167 182 assert json.loads(response.text) == {
168 'objects': expected_objects, 'transfer': 'basic'}
183 'objects': expected_objects,
184 'transfer': 'basic'
185 }
169 186
170 187 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
171 188 params = {'operation': 'upload',
172 189 'objects': [{'oid': '123', 'size': '1024'}]}
173 190 response = git_lfs_https_app.post_json(
174 191 '/repo/info/lfs/objects/batch', params=params,
175 192 extra_environ=http_auth)
176 193 expected_objects = [
177 194 {'authenticated': True,
178 195 'actions': {
179 196 'upload': {
180 197 'header': {'Authorization': 'Basic XXXXX',
181 198 'Transfer-Encoding': 'chunked'},
182 199 'href': 'https://localhost/repo/info/lfs/objects/123'},
183 200 'verify': {
184 201 'header': {'Authorization': 'Basic XXXXX'},
185 202 'href': 'https://localhost/repo/info/lfs/verify'}
186 203 },
187 204 'oid': '123',
188 205 'size': '1024'}
189 206 ]
190 207 assert json.loads(response.text) == {
191 208 'objects': expected_objects, 'transfer': 'basic'}
192 209
193 210 def test_app_verify_api_missing_data(self, git_lfs_app):
194 211 params = {'oid': 'missing'}
195 212 response = git_lfs_app.post_json(
196 213 '/repo/info/lfs/verify', params=params,
197 214 status=400)
198 215
199 216 assert json.loads(response.text) == {
200 217 'message': 'missing oid and size in request data'}
201 218
202 219 def test_app_verify_api_missing_obj(self, git_lfs_app):
203 220 params = {'oid': 'missing', 'size': '1024'}
204 221 response = git_lfs_app.post_json(
205 222 '/repo/info/lfs/verify', params=params,
206 223 status=404)
207 224
208 225 assert json.loads(response.text) == {
209 'message': 'oid `missing` does not exists in store'}
226 'message': 'oid `missing` does not exists in store'
227 }
210 228
211 229 def test_app_verify_api_size_mismatch(self, git_lfs_app):
212 230 oid = 'existing'
213 231 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
214 232 if not os.path.isdir(os.path.dirname(oid_path)):
215 233 os.makedirs(os.path.dirname(oid_path))
216 234 with open(oid_path, 'wb') as f:
217 235 f.write(safe_bytes('OID_CONTENT'))
218 236
219 237 params = {'oid': oid, 'size': '1024'}
220 238 response = git_lfs_app.post_json(
221 239 '/repo/info/lfs/verify', params=params, status=422)
222 240
223 241 assert json.loads(response.text) == {
224 'message': 'requested file size mismatch '
225 'store size:11 requested:1024'}
242 'message': 'requested file size mismatch store size:11 requested:1024'
243 }
226 244
227 245 def test_app_verify_api(self, git_lfs_app):
228 246 oid = 'existing'
229 247 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
230 248 if not os.path.isdir(os.path.dirname(oid_path)):
231 249 os.makedirs(os.path.dirname(oid_path))
232 250 with open(oid_path, 'wb') as f:
233 251 f.write(safe_bytes('OID_CONTENT'))
234 252
235 253 params = {'oid': oid, 'size': 11}
236 254 response = git_lfs_app.post_json(
237 255 '/repo/info/lfs/verify', params=params)
238 256
239 257 assert json.loads(response.text) == {
240 'message': {'size': 'ok', 'in_store': 'ok'}}
258 'message': {'size': 11, 'oid': oid}
259 }
241 260
242 261 def test_app_download_api_oid_not_existing(self, git_lfs_app):
243 262 oid = 'missing'
244 263
245 response = git_lfs_app.get(
246 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
264 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
247 265
248 266 assert json.loads(response.text) == {
249 267 'message': 'requested file with oid `missing` not found in store'}
250 268
251 269 def test_app_download_api(self, git_lfs_app):
252 270 oid = 'existing'
253 271 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
254 272 if not os.path.isdir(os.path.dirname(oid_path)):
255 273 os.makedirs(os.path.dirname(oid_path))
256 274 with open(oid_path, 'wb') as f:
257 275 f.write(safe_bytes('OID_CONTENT'))
258 276
259 response = git_lfs_app.get(
260 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
277 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}')
261 278 assert response
262 279
263 280 def test_app_upload(self, git_lfs_app):
264 oid = 'uploaded'
281 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
265 282
266 283 response = git_lfs_app.put(
267 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
284 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
268 285
269 assert json.loads(response.text) == {'upload': 'ok'}
286 assert json.loads(response.text) == {'upload': 'ok', 'state': 'written'}
270 287
271 288 # verify that we actually wrote that OID
272 289 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
273 290 assert os.path.isfile(oid_path)
274 291 assert 'CONTENT' == open(oid_path).read()
292
293 response = git_lfs_app.put(
294 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
295
296 assert json.loads(response.text) == {'upload': 'ok', 'state': 'in-store'}
297
298
299 def test_app_upload_wrong_sha(self, git_lfs_app):
300 oid = 'i-am-a-wrong-sha'
301
302 response = git_lfs_app.put(f'/repo/info/lfs/objects/{oid}', params='CONTENT', status=400)
303
304 assert json.loads(response.text) == {
305 'message': 'oid i-am-a-wrong-sha does not match expected sha '
306 '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'}
307
308 # check this OID wasn't written to store
309 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
310 assert json.loads(response.text) == {'message': 'requested file with oid `i-am-a-wrong-sha` not found in store'}
@@ -1,142 +1,143 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from vcsserver.lib.str_utils import safe_bytes
21 21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
22 22
23 23
24 24 @pytest.fixture()
25 25 def lfs_store(tmpdir):
26 26 repo = 'test'
27 oid = '123456789'
27 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
28 28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
29 29 return store
30 30
31 31
32 32 @pytest.fixture()
33 33 def oid_handler(lfs_store):
34 34 store = lfs_store
35 35 repo = store.repo
36 36 oid = store.oid
37 37
38 38 oid_handler = OidHandler(
39 39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
40 40 oid=oid,
41 41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
42 42 obj_verify_href='http://localhost/verify')
43 43 return oid_handler
44 44
45 45
46 46 class TestOidHandler:
47 47
48 48 @pytest.mark.parametrize('exec_action', [
49 49 'download',
50 50 'upload',
51 51 ])
52 52 def test_exec_action(self, exec_action, oid_handler):
53 53 handler = oid_handler.exec_operation(exec_action)
54 54 assert handler
55 55
56 56 def test_exec_action_undefined(self, oid_handler):
57 57 with pytest.raises(AttributeError):
58 58 oid_handler.exec_operation('wrong')
59 59
60 60 def test_download_oid_not_existing(self, oid_handler):
61 61 response, has_errors = oid_handler.exec_operation('download')
62 62
63 63 assert response is None
64 64 assert has_errors['error'] == {
65 65 'code': 404,
66 'message': 'object: 123456789 does not exist in store'}
66 'message': 'object: 65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12 does not exist in store'
67 }
67 68
68 69 def test_download_oid(self, oid_handler):
69 70 store = oid_handler.get_store()
70 71 if not os.path.isdir(os.path.dirname(store.oid_path)):
71 72 os.makedirs(os.path.dirname(store.oid_path))
72 73
73 74 with open(store.oid_path, 'wb') as f:
74 75 f.write(safe_bytes('CONTENT'))
75 76
76 77 response, has_errors = oid_handler.exec_operation('download')
77 78
78 79 assert has_errors is None
79 80 assert response['download'] == {
80 81 'header': {'Authorization': 'basic xxxx'},
81 82 'href': 'http://localhost/handle_oid'
82 83 }
83 84
84 85 def test_upload_oid_that_exists(self, oid_handler):
85 86 store = oid_handler.get_store()
86 87 if not os.path.isdir(os.path.dirname(store.oid_path)):
87 88 os.makedirs(os.path.dirname(store.oid_path))
88 89
89 90 with open(store.oid_path, 'wb') as f:
90 91 f.write(safe_bytes('CONTENT'))
91 92 oid_handler.obj_size = 7
92 93 response, has_errors = oid_handler.exec_operation('upload')
93 94 assert has_errors is None
94 95 assert response is None
95 96
96 97 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
97 98 store = oid_handler.get_store()
98 99 if not os.path.isdir(os.path.dirname(store.oid_path)):
99 100 os.makedirs(os.path.dirname(store.oid_path))
100 101
101 102 with open(store.oid_path, 'wb') as f:
102 103 f.write(safe_bytes('CONTENT'))
103 104
104 105 oid_handler.obj_size = 10240
105 106 response, has_errors = oid_handler.exec_operation('upload')
106 107 assert has_errors is None
107 108 assert response['upload'] == {
108 109 'header': {'Authorization': 'basic xxxx',
109 110 'Transfer-Encoding': 'chunked'},
110 111 'href': 'http://localhost/handle_oid',
111 112 }
112 113
113 114 def test_upload_oid(self, oid_handler):
114 115 response, has_errors = oid_handler.exec_operation('upload')
115 116 assert has_errors is None
116 117 assert response['upload'] == {
117 118 'header': {'Authorization': 'basic xxxx',
118 119 'Transfer-Encoding': 'chunked'},
119 120 'href': 'http://localhost/handle_oid'
120 121 }
121 122
122 123
123 124 class TestLFSStore:
124 125 def test_write_oid(self, lfs_store):
125 126 oid_location = lfs_store.oid_path
126 127
127 128 assert not os.path.isfile(oid_location)
128 129
129 130 engine = lfs_store.get_engine(mode='wb')
130 131 with engine as f:
131 132 f.write(safe_bytes('CONTENT'))
132 133
133 134 assert os.path.isfile(oid_location)
134 135
135 136 def test_detect_has_oid(self, lfs_store):
136 137
137 138 assert lfs_store.has_oid() is False
138 139 engine = lfs_store.get_engine(mode='wb')
139 140 with engine as f:
140 141 f.write(safe_bytes('CONTENT'))
141 142
142 143 assert lfs_store.has_oid() is True
@@ -1,824 +1,828 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21 import logging
22 22 import collections
23 23 import base64
24 24 import msgpack
25 25 import dataclasses
26 26 import pygit2
27 27
28 28 import http.client
29 29 from celery import Celery
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33
34 34 from vcsserver import exceptions, subprocessio, settings
35 35 from vcsserver.lib.ext_json import json
36 36 from vcsserver.lib.str_utils import ascii_str, safe_str
37 37 from vcsserver.lib.svn_txn_utils import get_txn_id_from_store
38 38 from vcsserver.remote.git_remote import Repository
39 39
40 40 celery_app = Celery('__vcsserver__')
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 class HooksHttpClient:
45 45 proto = 'msgpack.v1'
46 46 connection = None
47 47
48 48 def __init__(self, hooks_uri):
49 49 self.hooks_uri = hooks_uri
50 50
51 51 def __repr__(self):
52 52 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
53 53
54 54 def __call__(self, method, extras):
55 55 connection = http.client.HTTPConnection(self.hooks_uri)
56 56 # binary msgpack body
57 57 headers, body = self._serialize(method, extras)
58 58 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
59 59
60 60 try:
61 61 try:
62 62 connection.request('POST', '/', body, headers)
63 63 except Exception as error:
64 64 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
65 65 raise
66 66
67 67 response = connection.getresponse()
68 68 try:
69 69 return msgpack.load(response)
70 70 except Exception:
71 71 response_data = response.read()
72 72 log.exception('Failed to decode hook response json data. '
73 73 'response_code:%s, raw_data:%s',
74 74 response.status, response_data)
75 75 raise
76 76 finally:
77 77 connection.close()
78 78
79 79 @classmethod
80 80 def _serialize(cls, hook_name, extras):
81 81 data = {
82 82 'method': hook_name,
83 83 'extras': extras
84 84 }
85 85 headers = {
86 86 "rc-hooks-protocol": cls.proto,
87 87 "Connection": "keep-alive"
88 88 }
89 89 return headers, msgpack.packb(data)
90 90
91 91
92 92 class HooksCeleryClient:
93 93 TASK_TIMEOUT = 60 # time in seconds
94 94
95 95 def __init__(self, queue, backend):
96 96 celery_app.config_from_object({
97 97 'broker_url': queue, 'result_backend': backend,
98 98 'broker_connection_retry_on_startup': True,
99 99 'task_serializer': 'json',
100 100 'accept_content': ['json', 'msgpack'],
101 101 'result_serializer': 'json',
102 102 'result_accept_content': ['json', 'msgpack']
103 103 })
104 104 self.celery_app = celery_app
105 105
106 106 def __call__(self, method, extras):
107 107 inquired_task = self.celery_app.signature(
108 108 f'rhodecode.lib.celerylib.tasks.{method}'
109 109 )
110 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
110 result = inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
111
112 return result
111 113
112 114
113 115 class HooksShadowRepoClient:
114 116
115 117 def __call__(self, hook_name, extras):
116 118 return {'output': '', 'status': 0}
117 119
118 120
119 121 class RemoteMessageWriter:
120 122 """Writer base class."""
121 123 def write(self, message):
122 124 raise NotImplementedError()
123 125
124 126
125 127 class HgMessageWriter(RemoteMessageWriter):
126 128 """Writer that knows how to send messages to mercurial clients."""
127 129
128 130 def __init__(self, ui):
129 131 self.ui = ui
130 132
131 133 def write(self, message: str):
132 134 # TODO: Check why the quiet flag is set by default.
133 135 old = self.ui.quiet
134 136 self.ui.quiet = False
135 137 self.ui.status(message.encode('utf-8'))
136 138 self.ui.quiet = old
137 139
138 140
139 141 class GitMessageWriter(RemoteMessageWriter):
140 142 """Writer that knows how to send messages to git clients."""
141 143
142 144 def __init__(self, stdout=None):
143 145 self.stdout = stdout or sys.stdout
144 146
145 147 def write(self, message: str):
146 148 self.stdout.write(message)
147 149
148 150
149 151 class SvnMessageWriter(RemoteMessageWriter):
150 152 """Writer that knows how to send messages to svn clients."""
151 153
152 154 def __init__(self, stderr=None):
153 155 # SVN needs data sent to stderr for back-to-client messaging
154 156 self.stderr = stderr or sys.stderr
155 157
156 158 def write(self, message):
157 159 self.stderr.write(message)
158 160
159 161
160 def _handle_exception(result):
162 def _maybe_handle_exception(result):
161 163 exception_class = result.get('exception')
162 164 exception_traceback = result.get('exception_traceback')
165 if not (exception_class and exception_traceback):
166 return
163 167 log.debug('Handling hook-call exception: %s', exception_class)
164 168
165 169 if exception_traceback:
166 170 log.error('Got traceback from remote call:%s', exception_traceback)
167 171
168 172 if exception_class == 'HTTPLockedRC':
169 173 raise exceptions.RepositoryLockedException()(*result['exception_args'])
170 174 elif exception_class == 'ClientNotSupportedError':
171 175 raise exceptions.ClientNotSupportedException()(*result['exception_args'])
172 176 elif exception_class == 'HTTPBranchProtected':
173 177 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
174 178 elif exception_class == 'RepositoryError':
175 179 raise exceptions.VcsException()(*result['exception_args'])
176 180 elif exception_class:
177 181 raise Exception(
178 182 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
179 183 )
180 184
181 185
182 186 def _get_hooks_client(extras):
183 187 hooks_uri = extras.get('hooks_uri')
184 188 task_queue = extras.get('task_queue')
185 189 task_backend = extras.get('task_backend')
186 190 is_shadow_repo = extras.get('is_shadow_repo')
187 191
188 192 if hooks_uri:
189 193 return HooksHttpClient(hooks_uri)
190 194 elif task_queue and task_backend:
191 195 return HooksCeleryClient(task_queue, task_backend)
192 196 elif is_shadow_repo:
193 197 return HooksShadowRepoClient()
194 198 else:
195 199 raise Exception("Hooks client not found!")
196 200
197 201
198 202 def _call_hook(hook_name, extras, writer):
199 203 hooks_client = _get_hooks_client(extras)
200 204 log.debug('Hooks, using client:%s', hooks_client)
201 205 result = hooks_client(hook_name, extras)
202 206 log.debug('Hooks got result: %s', result)
203 _handle_exception(result)
207 _maybe_handle_exception(result)
204 208 writer.write(result['output'])
205 209
206 210 return result['status']
207 211
208 212
209 213 def _extras_from_ui(ui):
210 214 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
211 215 if not hook_data:
212 216 # maybe it's inside environ ?
213 217 env_hook_data = os.environ.get('RC_SCM_DATA')
214 218 if env_hook_data:
215 219 hook_data = env_hook_data
216 220
217 221 extras = {}
218 222 if hook_data:
219 223 extras = json.loads(hook_data)
220 224 return extras
221 225
222 226
223 227 def _rev_range_hash(repo, node, check_heads=False):
224 228 from vcsserver.hgcompat import get_ctx
225 229
226 230 commits = []
227 231 revs = []
228 232 start = get_ctx(repo, node).rev()
229 233 end = len(repo)
230 234 for rev in range(start, end):
231 235 revs.append(rev)
232 236 ctx = get_ctx(repo, rev)
233 237 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
234 238 branch = safe_str(ctx.branch())
235 239 commits.append((commit_id, branch))
236 240
237 241 parent_heads = []
238 242 if check_heads:
239 243 parent_heads = _check_heads(repo, start, end, revs)
240 244 return commits, parent_heads
241 245
242 246
243 247 def _check_heads(repo, start, end, commits):
244 248 from vcsserver.hgcompat import get_ctx
245 249 changelog = repo.changelog
246 250 parents = set()
247 251
248 252 for new_rev in commits:
249 253 for p in changelog.parentrevs(new_rev):
250 254 if p == mercurial.node.nullrev:
251 255 continue
252 256 if p < start:
253 257 parents.add(p)
254 258
255 259 for p in parents:
256 260 branch = get_ctx(repo, p).branch()
257 261 # The heads descending from that parent, on the same branch
258 262 parent_heads = {p}
259 263 reachable = {p}
260 264 for x in range(p + 1, end):
261 265 if get_ctx(repo, x).branch() != branch:
262 266 continue
263 267 for pp in changelog.parentrevs(x):
264 268 if pp in reachable:
265 269 reachable.add(x)
266 270 parent_heads.discard(pp)
267 271 parent_heads.add(x)
268 272 # More than one head? Suggest merging
269 273 if len(parent_heads) > 1:
270 274 return list(parent_heads)
271 275
272 276 return []
273 277
274 278
275 279 def _get_git_env():
276 280 env = {}
277 281 for k, v in os.environ.items():
278 282 if k.startswith('GIT'):
279 283 env[k] = v
280 284
281 285 # serialized version
282 286 return [(k, v) for k, v in env.items()]
283 287
284 288
285 289 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
286 290 env = {}
287 291 for k, v in os.environ.items():
288 292 if k.startswith('HG'):
289 293 env[k] = v
290 294
291 295 env['HG_NODE'] = old_rev
292 296 env['HG_NODE_LAST'] = new_rev
293 297 env['HG_TXNID'] = txnid
294 298 env['HG_PENDING'] = repo_path
295 299
296 300 return [(k, v) for k, v in env.items()]
297 301
298 302
299 303 def _get_ini_settings(ini_file):
300 304 from vcsserver.http_main import sanitize_settings_and_apply_defaults
301 305 from vcsserver.lib.config_utils import get_app_config_lightweight, configure_and_store_settings
302 306
303 307 global_config = {'__file__': ini_file}
304 308 ini_settings = get_app_config_lightweight(ini_file)
305 309 sanitize_settings_and_apply_defaults(global_config, ini_settings)
306 310 configure_and_store_settings(global_config, ini_settings)
307 311
308 312 return ini_settings
309 313
310 314
311 315 def _fix_hooks_executables(ini_path=''):
312 316 """
313 317 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
314 318 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
315 319 because svn is not on PATH
316 320 """
317 321 # set defaults, in case we can't read from ini_file
318 322 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
319 323 if ini_path:
320 324 ini_settings = _get_ini_settings(ini_path)
321 325 core_binary_dir = ini_settings['core.binary_dir']
322 326
323 327 settings.BINARY_DIR = core_binary_dir
324 328
325 329
326 330 def repo_size(ui, repo, **kwargs):
327 331 extras = _extras_from_ui(ui)
328 332 return _call_hook('repo_size', extras, HgMessageWriter(ui))
329 333
330 334
331 335 def pre_pull(ui, repo, **kwargs):
332 336 extras = _extras_from_ui(ui)
333 337 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
334 338
335 339
336 340 def pre_pull_ssh(ui, repo, **kwargs):
337 341 extras = _extras_from_ui(ui)
338 342 if extras and extras.get('SSH'):
339 343 return pre_pull(ui, repo, **kwargs)
340 344 return 0
341 345
342 346
343 347 def post_pull(ui, repo, **kwargs):
344 348 extras = _extras_from_ui(ui)
345 349 return _call_hook('post_pull', extras, HgMessageWriter(ui))
346 350
347 351
348 352 def post_pull_ssh(ui, repo, **kwargs):
349 353 extras = _extras_from_ui(ui)
350 354 if extras and extras.get('SSH'):
351 355 return post_pull(ui, repo, **kwargs)
352 356 return 0
353 357
354 358
355 359 def pre_push(ui, repo, node=None, **kwargs):
356 360 """
357 361 Mercurial pre_push hook
358 362 """
359 363 extras = _extras_from_ui(ui)
360 364 detect_force_push = extras.get('detect_force_push')
361 365
362 366 rev_data = []
363 367 hook_type: str = safe_str(kwargs.get('hooktype'))
364 368
365 369 if node and hook_type == 'pretxnchangegroup':
366 370 branches = collections.defaultdict(list)
367 371 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
368 372 for commit_id, branch in commits:
369 373 branches[branch].append(commit_id)
370 374
371 375 for branch, commits in branches.items():
372 376 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
373 377 rev_data.append({
374 378 'total_commits': len(commits),
375 379 'old_rev': old_rev,
376 380 'new_rev': commits[-1],
377 381 'ref': '',
378 382 'type': 'branch',
379 383 'name': branch,
380 384 })
381 385
382 386 for push_ref in rev_data:
383 387 push_ref['multiple_heads'] = _heads
384 388
385 389 repo_path = os.path.join(
386 390 extras.get('repo_store', ''), extras.get('repository', ''))
387 391 push_ref['hg_env'] = _get_hg_env(
388 392 old_rev=push_ref['old_rev'],
389 393 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
390 394 repo_path=repo_path)
391 395
392 396 extras['hook_type'] = hook_type or 'pre_push'
393 397 extras['commit_ids'] = rev_data
394 398
395 399 return _call_hook('pre_push', extras, HgMessageWriter(ui))
396 400
397 401
398 402 def pre_push_ssh(ui, repo, node=None, **kwargs):
399 403 extras = _extras_from_ui(ui)
400 404 if extras.get('SSH'):
401 405 return pre_push(ui, repo, node, **kwargs)
402 406
403 407 return 0
404 408
405 409
406 410 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
407 411 """
408 412 Mercurial pre_push hook for SSH
409 413 """
410 414 extras = _extras_from_ui(ui)
411 415 if extras.get('SSH'):
412 416 permission = extras['SSH_PERMISSIONS']
413 417
414 418 if 'repository.write' == permission or 'repository.admin' == permission:
415 419 return 0
416 420
417 421 # non-zero ret code
418 422 return 1
419 423
420 424 return 0
421 425
422 426
423 427 def post_push(ui, repo, node, **kwargs):
424 428 """
425 429 Mercurial post_push hook
426 430 """
427 431 extras = _extras_from_ui(ui)
428 432
429 433 commit_ids = []
430 434 branches = []
431 435 bookmarks = []
432 436 tags = []
433 437 hook_type: str = safe_str(kwargs.get('hooktype'))
434 438
435 439 commits, _heads = _rev_range_hash(repo, node)
436 440 for commit_id, branch in commits:
437 441 commit_ids.append(commit_id)
438 442 if branch not in branches:
439 443 branches.append(branch)
440 444
441 445 if hasattr(ui, '_rc_pushkey_bookmarks'):
442 446 bookmarks = ui._rc_pushkey_bookmarks
443 447
444 448 extras['hook_type'] = hook_type or 'post_push'
445 449 extras['commit_ids'] = commit_ids
446 450
447 451 extras['new_refs'] = {
448 452 'branches': branches,
449 453 'bookmarks': bookmarks,
450 454 'tags': tags
451 455 }
452 456
453 457 return _call_hook('post_push', extras, HgMessageWriter(ui))
454 458
455 459
456 460 def post_push_ssh(ui, repo, node, **kwargs):
457 461 """
458 462 Mercurial post_push hook for SSH
459 463 """
460 464 if _extras_from_ui(ui).get('SSH'):
461 465 return post_push(ui, repo, node, **kwargs)
462 466 return 0
463 467
464 468
465 469 def key_push(ui, repo, **kwargs):
466 470 from vcsserver.hgcompat import get_ctx
467 471
468 472 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
469 473 # store new bookmarks in our UI object propagated later to post_push
470 474 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
471 475 return
472 476
473 477
474 478 # backward compat
475 479 log_pull_action = post_pull
476 480
477 481 # backward compat
478 482 log_push_action = post_push
479 483
480 484
481 485 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
482 486 """
483 487 Old hook name: keep here for backward compatibility.
484 488
485 489 This is only required when the installed git hooks are not upgraded.
486 490 """
487 491 pass
488 492
489 493
490 494 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
491 495 """
492 496 Old hook name: keep here for backward compatibility.
493 497
494 498 This is only required when the installed git hooks are not upgraded.
495 499 """
496 500 pass
497 501
498 502
499 503 @dataclasses.dataclass
500 504 class HookResponse:
501 505 status: int
502 506 output: str
503 507
504 508
505 509 def git_pre_pull(extras) -> HookResponse:
506 510 """
507 511 Pre pull hook.
508 512
509 513 :param extras: dictionary containing the keys defined in simplevcs
510 514 :type extras: dict
511 515
512 516 :return: status code of the hook. 0 for success.
513 517 :rtype: int
514 518 """
515 519
516 520 if 'pull' not in extras['hooks']:
517 521 return HookResponse(0, '')
518 522
519 523 stdout = io.StringIO()
520 524 try:
521 525 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
522 526
523 527 except Exception as error:
524 528 log.exception('Failed to call pre_pull hook')
525 529 status_code = 128
526 530 stdout.write(f'ERROR: {error}\n')
527 531
528 532 return HookResponse(status_code, stdout.getvalue())
529 533
530 534
531 535 def git_post_pull(extras) -> HookResponse:
532 536 """
533 537 Post pull hook.
534 538
535 539 :param extras: dictionary containing the keys defined in simplevcs
536 540 :type extras: dict
537 541
538 542 :return: status code of the hook. 0 for success.
539 543 :rtype: int
540 544 """
541 545 if 'pull' not in extras['hooks']:
542 546 return HookResponse(0, '')
543 547
544 548 stdout = io.StringIO()
545 549 try:
546 550 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
547 551 except Exception as error:
548 552 status = 128
549 553 stdout.write(f'ERROR: {error}\n')
550 554
551 555 return HookResponse(status, stdout.getvalue())
552 556
553 557
554 558 def _parse_git_ref_lines(revision_lines):
555 559 rev_data = []
556 560 for revision_line in revision_lines or []:
557 561 old_rev, new_rev, ref = revision_line.strip().split(' ')
558 562 ref_data = ref.split('/', 2)
559 563 if ref_data[1] in ('tags', 'heads'):
560 564 rev_data.append({
561 565 # NOTE(marcink):
562 566 # we're unable to tell total_commits for git at this point
563 567 # but we set the variable for consistency with GIT
564 568 'total_commits': -1,
565 569 'old_rev': old_rev,
566 570 'new_rev': new_rev,
567 571 'ref': ref,
568 572 'type': ref_data[1],
569 573 'name': ref_data[2],
570 574 })
571 575 return rev_data
572 576
573 577
574 578 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
575 579 """
576 580 Pre push hook.
577 581
578 582 :return: status code of the hook. 0 for success.
579 583 """
580 584 extras = json.loads(env['RC_SCM_DATA'])
581 585 rev_data = _parse_git_ref_lines(revision_lines)
582 586 if 'push' not in extras['hooks']:
583 587 return 0
584 588 _fix_hooks_executables(env.get('RC_INI_FILE'))
585 589
586 590 empty_commit_id = '0' * 40
587 591
588 592 detect_force_push = extras.get('detect_force_push')
589 593
590 594 for push_ref in rev_data:
591 595 # store our git-env which holds the temp store
592 596 push_ref['git_env'] = _get_git_env()
593 597 push_ref['pruned_sha'] = ''
594 598 if not detect_force_push:
595 599 # don't check for forced-push when we don't need to
596 600 continue
597 601
598 602 type_ = push_ref['type']
599 603 new_branch = push_ref['old_rev'] == empty_commit_id
600 604 delete_branch = push_ref['new_rev'] == empty_commit_id
601 605 if type_ == 'heads' and not (new_branch or delete_branch):
602 606 old_rev = push_ref['old_rev']
603 607 new_rev = push_ref['new_rev']
604 608 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
605 609 stdout, stderr = subprocessio.run_command(
606 610 cmd, env=os.environ.copy())
607 611 # means we're having some non-reachable objects, this forced push was used
608 612 if stdout:
609 613 push_ref['pruned_sha'] = stdout.splitlines()
610 614
611 615 extras['hook_type'] = 'pre_receive'
612 616 extras['commit_ids'] = rev_data
613 617
614 618 stdout = sys.stdout
615 619 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
616 620
617 621 return status_code
618 622
619 623
620 624 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
621 625 """
622 626 Post push hook.
623 627
624 628 :return: status code of the hook. 0 for success.
625 629 """
626 630 extras = json.loads(env['RC_SCM_DATA'])
627 631 if 'push' not in extras['hooks']:
628 632 return 0
629 633
630 634 _fix_hooks_executables(env.get('RC_INI_FILE'))
631 635
632 636 rev_data = _parse_git_ref_lines(revision_lines)
633 637
634 638 git_revs = []
635 639
636 640 # N.B.(skreft): it is ok to just call git, as git before calling a
637 641 # subcommand sets the PATH environment variable so that it point to the
638 642 # correct version of the git executable.
639 643 empty_commit_id = '0' * 40
640 644 branches = []
641 645 tags = []
642 646 for push_ref in rev_data:
643 647 type_ = push_ref['type']
644 648
645 649 if type_ == 'heads':
646 650 # starting new branch case
647 651 if push_ref['old_rev'] == empty_commit_id:
648 652 push_ref_name = push_ref['name']
649 653
650 654 if push_ref_name not in branches:
651 655 branches.append(push_ref_name)
652 656
653 657 need_head_set = ''
654 658 with Repository(os.getcwd()) as repo:
655 659 try:
656 660 repo.head
657 661 except pygit2.GitError:
658 662 need_head_set = f'refs/heads/{push_ref_name}'
659 663
660 664 if need_head_set:
661 665 repo.set_head(need_head_set)
662 666 print(f"Setting default branch to {push_ref_name}")
663 667
664 668 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
665 669 stdout, stderr = subprocessio.run_command(
666 670 cmd, env=os.environ.copy())
667 671 heads = safe_str(stdout)
668 672 heads = heads.replace(push_ref['ref'], '')
669 673 heads = ' '.join(head for head
670 674 in heads.splitlines() if head) or '.'
671 675 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
672 676 '--pretty=format:%H', '--', push_ref['new_rev'],
673 677 '--not', heads]
674 678 stdout, stderr = subprocessio.run_command(
675 679 cmd, env=os.environ.copy())
676 680 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
677 681
678 682 # delete branch case
679 683 elif push_ref['new_rev'] == empty_commit_id:
680 684 git_revs.append(f'delete_branch=>{push_ref["name"]}')
681 685 else:
682 686 if push_ref['name'] not in branches:
683 687 branches.append(push_ref['name'])
684 688
685 689 cmd = [settings.GIT_EXECUTABLE(), 'log',
686 690 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
687 691 '--reverse', '--pretty=format:%H']
688 692 stdout, stderr = subprocessio.run_command(
689 693 cmd, env=os.environ.copy())
690 694 # we get bytes from stdout, we need str to be consistent
691 695 log_revs = list(map(ascii_str, stdout.splitlines()))
692 696 git_revs.extend(log_revs)
693 697
694 698 # Pure pygit2 impl. but still 2-3x slower :/
695 699 # results = []
696 700 #
697 701 # with Repository(os.getcwd()) as repo:
698 702 # repo_new_rev = repo[push_ref['new_rev']]
699 703 # repo_old_rev = repo[push_ref['old_rev']]
700 704 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
701 705 #
702 706 # for commit in walker:
703 707 # if commit.id == repo_old_rev.id:
704 708 # break
705 709 # results.append(commit.id.hex)
706 710 # # reverse the order, can't use GIT_SORT_REVERSE
707 711 # log_revs = results[::-1]
708 712
709 713 elif type_ == 'tags':
710 714 if push_ref['name'] not in tags:
711 715 tags.append(push_ref['name'])
712 716 git_revs.append(f'tag=>{push_ref["name"]}')
713 717
714 718 extras['hook_type'] = 'post_receive'
715 719 extras['commit_ids'] = git_revs
716 720 extras['new_refs'] = {
717 721 'branches': branches,
718 722 'bookmarks': [],
719 723 'tags': tags,
720 724 }
721 725
722 726 stdout = sys.stdout
723 727
724 728 if 'repo_size' in extras['hooks']:
725 729 try:
726 730 _call_hook('repo_size', extras, GitMessageWriter(stdout))
727 731 except Exception:
728 732 pass
729 733
730 734 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
731 735 return status_code
732 736
733 737
734 738 def get_extras_from_txn_id(repo_path, txn_id):
735 739 extras = get_txn_id_from_store(repo_path, txn_id)
736 740 return extras
737 741
738 742
739 743 def svn_pre_commit(repo_path, commit_data, env):
740 744
741 745 path, txn_id = commit_data
742 746 branches = []
743 747 tags = []
744 748
745 749 if env.get('RC_SCM_DATA'):
746 750 extras = json.loads(env['RC_SCM_DATA'])
747 751 else:
748 752 ini_path = env.get('RC_INI_FILE')
749 753 if ini_path:
750 754 _get_ini_settings(ini_path)
751 755 # fallback method to read from TXN-ID stored data
752 756 extras = get_extras_from_txn_id(path, txn_id)
753 757
754 758 if not extras:
755 759 raise ValueError('SVN-PRE-COMMIT: Failed to extract context data in called extras for hook execution')
756 760
757 761 if extras.get('rc_internal_commit'):
758 762 # special marker for internal commit, we don't call hooks client
759 763 return 0
760 764
761 765 extras['hook_type'] = 'pre_commit'
762 766 extras['commit_ids'] = [txn_id]
763 767 extras['txn_id'] = txn_id
764 768 extras['new_refs'] = {
765 769 'total_commits': 1,
766 770 'branches': branches,
767 771 'bookmarks': [],
768 772 'tags': tags,
769 773 }
770 774
771 775 return _call_hook('pre_push', extras, SvnMessageWriter())
772 776
773 777
774 778 def svn_post_commit(repo_path, commit_data, env):
775 779 """
776 780 commit_data is path, rev, txn_id
777 781 """
778 782
779 783 if len(commit_data) == 3:
780 784 path, commit_id, txn_id = commit_data
781 785 elif len(commit_data) == 2:
782 786 log.error('Failed to extract txn_id from commit_data using legacy method. '
783 787 'Some functionality might be limited')
784 788 path, commit_id = commit_data
785 789 txn_id = None
786 790 else:
787 791 return 0
788 792
789 793 branches = []
790 794 tags = []
791 795
792 796 if env.get('RC_SCM_DATA'):
793 797 extras = json.loads(env['RC_SCM_DATA'])
794 798 else:
795 799 ini_path = env.get('RC_INI_FILE')
796 800 if ini_path:
797 801 _get_ini_settings(ini_path)
798 802 # fallback method to read from TXN-ID stored data
799 803 extras = get_extras_from_txn_id(path, txn_id)
800 804
801 805 if not extras and txn_id:
802 806 raise ValueError('SVN-POST-COMMIT: Failed to extract context data in called extras for hook execution')
803 807
804 808 if extras.get('rc_internal_commit'):
805 809 # special marker for internal commit, we don't call hooks client
806 810 return 0
807 811
808 812 extras['hook_type'] = 'post_commit'
809 813 extras['commit_ids'] = [commit_id]
810 814 extras['txn_id'] = txn_id
811 815 extras['new_refs'] = {
812 816 'branches': branches,
813 817 'bookmarks': [],
814 818 'tags': tags,
815 819 'total_commits': 1,
816 820 }
817 821
818 822 if 'repo_size' in extras['hooks']:
819 823 try:
820 824 _call_hook('repo_size', extras, SvnMessageWriter())
821 825 except Exception:
822 826 pass
823 827
824 828 return _call_hook('post_push', extras, SvnMessageWriter())
General Comments 0
You need to be logged in to leave comments. Login now