##// END OF EJS Templates
chore(release): merged default into stable branch
super-admin -
r1301:433d8917 merge v5.3.0 stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 5.2.1
2 current_version = 5.3.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
@@ -1,102 +1,102 b''
1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
2
2
3 async-timeout==4.0.3
3 async-timeout==4.0.3
4 atomicwrites==1.4.1
4 atomicwrites==1.4.1
5 celery==5.3.6
5 celery==5.3.6
6 billiard==4.2.0
6 billiard==4.2.0
7 click==8.1.3
7 click==8.1.3
8 click-didyoumean==0.3.0
8 click-didyoumean==0.3.0
9 click==8.1.3
9 click==8.1.3
10 click-plugins==1.1.1
10 click-plugins==1.1.1
11 click==8.1.3
11 click==8.1.3
12 click-repl==0.2.0
12 click-repl==0.2.0
13 click==8.1.3
13 click==8.1.3
14 prompt_toolkit==3.0.47
14 prompt_toolkit==3.0.47
15 wcwidth==0.2.13
15 wcwidth==0.2.13
16 six==1.16.0
16 six==1.16.0
17 kombu==5.3.5
17 kombu==5.3.5
18 amqp==5.2.0
18 amqp==5.2.0
19 vine==5.1.0
19 vine==5.1.0
20 vine==5.1.0
20 vine==5.1.0
21 python-dateutil==2.8.2
21 python-dateutil==2.8.2
22 six==1.16.0
22 six==1.16.0
23 tzdata==2024.1
23 tzdata==2024.1
24 vine==5.1.0
24 vine==5.1.0
25 contextlib2==21.6.0
25 contextlib2==21.6.0
26 dogpile.cache==1.3.3
26 dogpile.cache==1.3.3
27 decorator==5.1.1
27 decorator==5.1.1
28 stevedore==5.1.0
28 stevedore==5.1.0
29 pbr==5.11.1
29 pbr==5.11.1
30 dulwich==0.21.6
30 dulwich==0.21.6
31 urllib3==1.26.14
31 urllib3==1.26.14
32 fsspec==2024.9.0
32 fsspec==2024.9.0
33 gunicorn==23.0.0
33 gunicorn==23.0.0
34 packaging==24.1
34 packaging==24.1
35 hg-evolve==11.1.3
35 hg-evolve==11.1.3
36 importlib-metadata==6.0.0
36 importlib-metadata==6.0.0
37 zipp==3.15.0
37 zipp==3.15.0
38 mercurial==6.7.4
38 mercurial==6.7.4
39 more-itertools==9.1.0
39 more-itertools==9.1.0
40 msgpack==1.0.8
40 msgpack==1.0.8
41 orjson==3.10.7
41 orjson==3.10.7
42 psutil==5.9.8
42 psutil==5.9.8
43 py==1.11.0
43 py==1.11.0
44 pygit2==1.13.3
44 pygit2==1.13.3
45 cffi==1.16.0
45 cffi==1.16.0
46 pycparser==2.21
46 pycparser==2.21
47 pygments==2.18.0
47 pygments==2.18.0
48 pyparsing==3.1.1
48 pyparsing==3.1.1
49 pyramid==2.0.2
49 pyramid==2.0.2
50 hupper==1.12
50 hupper==1.12
51 plaster==1.1.2
51 plaster==1.1.2
52 plaster-pastedeploy==1.0.1
52 plaster-pastedeploy==1.0.1
53 pastedeploy==3.1.0
53 pastedeploy==3.1.0
54 plaster==1.1.2
54 plaster==1.1.2
55 translationstring==1.4
55 translationstring==1.4
56 venusian==3.0.0
56 venusian==3.0.0
57 webob==1.8.7
57 webob==1.8.7
58 zope.deprecation==5.0.0
58 zope.deprecation==5.0.0
59 zope.interface==6.4.post2
59 zope.interface==7.0.3
60 redis==5.1.0
60 redis==5.1.0
61 async-timeout==4.0.3
61 async-timeout==4.0.3
62 repoze.lru==0.7
62 repoze.lru==0.7
63 s3fs==2024.9.0
63 s3fs==2024.9.0
64 aiobotocore==2.13.0
64 aiobotocore==2.13.0
65 aiohttp==3.9.5
65 aiohttp==3.9.5
66 aiosignal==1.3.1
66 aiosignal==1.3.1
67 frozenlist==1.4.1
67 frozenlist==1.4.1
68 attrs==22.2.0
68 attrs==22.2.0
69 frozenlist==1.4.1
69 frozenlist==1.4.1
70 multidict==6.0.5
70 multidict==6.0.5
71 yarl==1.9.4
71 yarl==1.9.4
72 idna==3.4
72 idna==3.4
73 multidict==6.0.5
73 multidict==6.0.5
74 aioitertools==0.11.0
74 aioitertools==0.11.0
75 botocore==1.34.106
75 botocore==1.34.106
76 jmespath==1.0.1
76 jmespath==1.0.1
77 python-dateutil==2.8.2
77 python-dateutil==2.8.2
78 six==1.16.0
78 six==1.16.0
79 urllib3==1.26.14
79 urllib3==1.26.14
80 wrapt==1.16.0
80 wrapt==1.16.0
81 aiohttp==3.9.5
81 aiohttp==3.9.5
82 aiosignal==1.3.1
82 aiosignal==1.3.1
83 frozenlist==1.4.1
83 frozenlist==1.4.1
84 attrs==22.2.0
84 attrs==22.2.0
85 frozenlist==1.4.1
85 frozenlist==1.4.1
86 multidict==6.0.5
86 multidict==6.0.5
87 yarl==1.9.4
87 yarl==1.9.4
88 idna==3.4
88 idna==3.4
89 multidict==6.0.5
89 multidict==6.0.5
90 fsspec==2024.9.0
90 fsspec==2024.9.0
91 scandir==1.10.0
91 scandir==1.10.0
92 setproctitle==1.3.3
92 setproctitle==1.3.3
93 subvertpy==0.11.0
93 subvertpy==0.11.0
94 waitress==3.0.0
94 waitress==3.0.0
95 wcwidth==0.2.13
95 wcwidth==0.2.13
96
96
97
97
98 ## test related requirements
98 ## test related requirements
99 #-r requirements_test.txt
99 #-r requirements_test.txt
100
100
101 ## uncomment to add the debug libraries
101 ## uncomment to add the debug libraries
102 #-r requirements_debug.txt
102 #-r requirements_debug.txt
@@ -1,1 +1,1 b''
1 5.2.1 No newline at end of file
1 5.3.0
@@ -1,302 +1,314 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import hashlib
18 import re
18 import re
19 import logging
19 import logging
20
20
21 from gunicorn.http.errors import NoMoreData
21 from gunicorn.http.errors import NoMoreData
22 from pyramid.config import Configurator
22 from pyramid.config import Configurator
23 from pyramid.response import Response, FileIter
23 from pyramid.response import Response, FileIter
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
25 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPUnprocessableEntity)
26 HTTPUnprocessableEntity)
27
27
28 from vcsserver.lib.ext_json import json
28 from vcsserver.lib.ext_json import json
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.lib.str_utils import safe_int
31 from vcsserver.lib.str_utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' # +json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired:
51 class AuthHeaderRequired:
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 log.debug('No auth header found, returning 403')
63 return write_response_error(HTTPForbidden)
64 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
65 return func(*fargs[1:], **fkwargs)
65
66
66
67
67 # views
68 # views
68
69
69 def lfs_objects(request):
70 def lfs_objects(request):
70 # indicate not supported, V1 API
71 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
74
74
75
75 @AuthHeaderRequired()
76 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
77 def lfs_objects_batch(request):
77 """
78 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
79 The client sends the following information to the Batch endpoint to transfer some objects:
79
80
80 operation - Should be download or upload.
81 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
82 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
83 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
84 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
85 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
86 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 size - Integer byte size of the LFS object. Must be at least zero.
87 """
88 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
90 auth = request.authorization
90 repo = request.matchdict.get('repo')
91 repo = request.matchdict.get('repo')
91 data = request.json
92 data = request.json
92 operation = data.get('operation')
93 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
94 http_scheme = request.registry.git_lfs_http_scheme
94
95
95 if operation not in ('download', 'upload'):
96 if operation not in ('download', 'upload'):
96 log.debug('LFS: unsupported operation:%s', operation)
97 log.debug('LFS: unsupported operation:%s', operation)
97 return write_response_error(
98 return write_response_error(
98 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
99 HTTPBadRequest, f'unsupported operation mode: `{operation}`')
99
100
100 if 'objects' not in data:
101 if 'objects' not in data:
101 log.debug('LFS: missing objects data')
102 log.debug('LFS: missing objects data')
102 return write_response_error(
103 return write_response_error(
103 HTTPBadRequest, 'missing objects data')
104 HTTPBadRequest, 'missing objects data')
104
105
105 log.debug('LFS: handling operation of type: %s', operation)
106 log.debug('LFS: handling operation of type: %s', operation)
106
107
107 objects = []
108 objects = []
108 for o in data['objects']:
109 for o in data['objects']:
109 try:
110 try:
110 oid = o['oid']
111 oid = o['oid']
111 obj_size = o['size']
112 obj_size = o['size']
112 except KeyError:
113 except KeyError:
113 log.exception('LFS, failed to extract data')
114 log.exception('LFS, failed to extract data')
114 return write_response_error(
115 return write_response_error(
115 HTTPBadRequest, 'unsupported data in objects')
116 HTTPBadRequest, 'unsupported data in objects')
116
117
117 obj_data = {'oid': oid}
118 obj_data = {'oid': oid}
118 if http_scheme == 'http':
119 if http_scheme == 'http':
119 # Note(marcink): when using http, we might have a custom port
120 # Note(marcink): when using http, we might have a custom port
120 # so we skip setting it to http, url dispatch then wont generate a port in URL
121 # so we skip setting it to http, url dispatch then wont generate a port in URL
121 # for development we need this
122 # for development we need this
122 http_scheme = None
123 http_scheme = None
123
124
124 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
125 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
125 _scheme=http_scheme)
126 _scheme=http_scheme)
126 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
127 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
127 _scheme=http_scheme)
128 _scheme=http_scheme)
128 store = LFSOidStore(
129 store = LFSOidStore(
129 oid, repo, store_location=request.registry.git_lfs_store_path)
130 oid, repo, store_location=request.registry.git_lfs_store_path)
130 handler = OidHandler(
131 handler = OidHandler(
131 store, repo, auth, oid, obj_size, obj_data,
132 store, repo, auth, oid, obj_size, obj_data,
132 obj_href, obj_verify_href)
133 obj_href, obj_verify_href)
133
134
134 # this verifies also OIDs
135 # this verifies also OIDs
135 actions, errors = handler.exec_operation(operation)
136 actions, errors = handler.exec_operation(operation)
136 if errors:
137 if errors:
137 log.warning('LFS: got following errors: %s', errors)
138 log.warning('LFS: got following errors: %s', errors)
138 obj_data['errors'] = errors
139 obj_data['errors'] = errors
139
140
140 if actions:
141 if actions:
141 obj_data['actions'] = actions
142 obj_data['actions'] = actions
142
143
143 obj_data['size'] = obj_size
144 obj_data['size'] = obj_size
144 obj_data['authenticated'] = True
145 obj_data['authenticated'] = True
145 objects.append(obj_data)
146 objects.append(obj_data)
146
147
147 result = {'objects': objects, 'transfer': 'basic'}
148 result = {'objects': objects, 'transfer': 'basic'}
148 log.debug('LFS Response %s', safe_result(result))
149 log.debug('LFS Response %s', safe_result(result))
149
150
150 return result
151 return result
151
152
152
153
153 def lfs_objects_oid_upload(request):
154 def lfs_objects_oid_upload(request):
154 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
155 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
155 repo = request.matchdict.get('repo')
156 repo = request.matchdict.get('repo')
156 oid = request.matchdict.get('oid')
157 oid = request.matchdict.get('oid')
157 store = LFSOidStore(
158 store = LFSOidStore(
158 oid, repo, store_location=request.registry.git_lfs_store_path)
159 oid, repo, store_location=request.registry.git_lfs_store_path)
159 engine = store.get_engine(mode='wb')
160 engine = store.get_engine(mode='wb')
160 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
161 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
161
162
163 # validate if OID is not by any chance already in the store
164 if store.has_oid():
165 log.debug('LFS: oid %s exists in store', oid)
166 return {'upload': 'ok', 'state': 'in-store'}
167
162 body = request.environ['wsgi.input']
168 body = request.environ['wsgi.input']
163
169
170 digest = hashlib.sha256()
164 with engine as f:
171 with engine as f:
165 blksize = 64 * 1024 # 64kb
172 blksize = 64 * 1024 # 64kb
166 while True:
173 while True:
167 # read in chunks as stream comes in from Gunicorn
174 # read in chunks as stream comes in from Gunicorn
168 # this is a specific Gunicorn support function.
175 # this is a specific Gunicorn support function.
169 # might work differently on waitress
176 # might work differently on waitress
170 try:
177 try:
171 chunk = body.read(blksize)
178 chunk = body.read(blksize)
172 except NoMoreData:
179 except NoMoreData:
173 chunk = None
180 chunk = None
174
181
175 if not chunk:
182 if not chunk:
176 break
183 break
184 f.write(chunk)
185 digest.update(chunk)
177
186
178 f.write(chunk)
187 hex_digest = digest.hexdigest()
188 digest_check = hex_digest == oid
189 if not digest_check:
190 engine.cleanup() # trigger cleanup so we don't save mismatch OID into the store
191 return write_response_error(
192 HTTPBadRequest, f'oid {oid} does not match expected sha {hex_digest}')
179
193
180 return {'upload': 'ok'}
194 return {'upload': 'ok', 'state': 'written'}
181
195
182
196
183 def lfs_objects_oid_download(request):
197 def lfs_objects_oid_download(request):
184 repo = request.matchdict.get('repo')
198 repo = request.matchdict.get('repo')
185 oid = request.matchdict.get('oid')
199 oid = request.matchdict.get('oid')
186
200
187 store = LFSOidStore(
201 store = LFSOidStore(
188 oid, repo, store_location=request.registry.git_lfs_store_path)
202 oid, repo, store_location=request.registry.git_lfs_store_path)
189 if not store.has_oid():
203 if not store.has_oid():
190 log.debug('LFS: oid %s does not exists in store', oid)
204 log.debug('LFS: oid %s does not exists in store', oid)
191 return write_response_error(
205 return write_response_error(
192 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
206 HTTPNotFound, f'requested file with oid `{oid}` not found in store')
193
207
194 # TODO(marcink): support range header ?
208 # TODO(marcink): support range header ?
195 # Range: bytes=0-, `bytes=(\d+)\-.*`
209 # Range: bytes=0-, `bytes=(\d+)\-.*`
196
210
197 f = open(store.oid_path, 'rb')
211 f = open(store.oid_path, 'rb')
198 response = Response(
212 response = Response(
199 content_type='application/octet-stream', app_iter=FileIter(f))
213 content_type='application/octet-stream', app_iter=FileIter(f))
200 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
214 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
201 return response
215 return response
202
216
203
217
204 def lfs_objects_verify(request):
218 def lfs_objects_verify(request):
205 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
219 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
206 repo = request.matchdict.get('repo')
220 repo = request.matchdict.get('repo')
207
221
208 data = request.json
222 data = request.json
209 oid = data.get('oid')
223 oid = data.get('oid')
210 size = safe_int(data.get('size'))
224 size = safe_int(data.get('size'))
211
225
212 if not (oid and size):
226 if not (oid and size):
213 return write_response_error(
227 return write_response_error(
214 HTTPBadRequest, 'missing oid and size in request data')
228 HTTPBadRequest, 'missing oid and size in request data')
215
229
216 store = LFSOidStore(
230 store = LFSOidStore(
217 oid, repo, store_location=request.registry.git_lfs_store_path)
231 oid, repo, store_location=request.registry.git_lfs_store_path)
218 if not store.has_oid():
232 if not store.has_oid():
219 log.debug('LFS: oid %s does not exists in store', oid)
233 log.debug('LFS: oid %s does not exists in store', oid)
220 return write_response_error(
234 return write_response_error(
221 HTTPNotFound, f'oid `{oid}` does not exists in store')
235 HTTPNotFound, f'oid `{oid}` does not exists in store')
222
236
223 store_size = store.size_oid()
237 store_size = store.size_oid()
224 if store_size != size:
238 if store_size != size:
225 msg = 'requested file size mismatch store size:{} requested:{}'.format(
239 msg = f'requested file size mismatch store size:{store_size} requested:{size}'
226 store_size, size)
240 return write_response_error(HTTPUnprocessableEntity, msg)
227 return write_response_error(
228 HTTPUnprocessableEntity, msg)
229
241
230 return {'message': {'size': 'ok', 'in_store': 'ok'}}
242 return {'message': {'size': store_size, 'oid': oid}}
231
243
232
244
233 def lfs_objects_lock(request):
245 def lfs_objects_lock(request):
234 return write_response_error(
246 return write_response_error(
235 HTTPNotImplemented, 'GIT LFS locking api not supported')
247 HTTPNotImplemented, 'GIT LFS locking api not supported')
236
248
237
249
238 def not_found(request):
250 def not_found(request):
239 return write_response_error(
251 return write_response_error(
240 HTTPNotFound, 'request path not found')
252 HTTPNotFound, 'request path not found')
241
253
242
254
243 def lfs_disabled(request):
255 def lfs_disabled(request):
244 return write_response_error(
256 return write_response_error(
245 HTTPNotImplemented, 'GIT LFS disabled for this repo')
257 HTTPNotImplemented, 'GIT LFS disabled for this repo')
246
258
247
259
248 def git_lfs_app(config):
260 def git_lfs_app(config):
249
261
250 # v1 API deprecation endpoint
262 # v1 API deprecation endpoint
251 config.add_route('lfs_objects',
263 config.add_route('lfs_objects',
252 '/{repo:.*?[^/]}/info/lfs/objects')
264 '/{repo:.*?[^/]}/info/lfs/objects')
253 config.add_view(lfs_objects, route_name='lfs_objects',
265 config.add_view(lfs_objects, route_name='lfs_objects',
254 request_method='POST', renderer='json')
266 request_method='POST', renderer='json')
255
267
256 # locking API
268 # locking API
257 config.add_route('lfs_objects_lock',
269 config.add_route('lfs_objects_lock',
258 '/{repo:.*?[^/]}/info/lfs/locks')
270 '/{repo:.*?[^/]}/info/lfs/locks')
259 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
271 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
260 request_method=('POST', 'GET'), renderer='json')
272 request_method=('POST', 'GET'), renderer='json')
261
273
262 config.add_route('lfs_objects_lock_verify',
274 config.add_route('lfs_objects_lock_verify',
263 '/{repo:.*?[^/]}/info/lfs/locks/verify')
275 '/{repo:.*?[^/]}/info/lfs/locks/verify')
264 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
276 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
265 request_method=('POST', 'GET'), renderer='json')
277 request_method=('POST', 'GET'), renderer='json')
266
278
267 # batch API
279 # batch API
268 config.add_route('lfs_objects_batch',
280 config.add_route('lfs_objects_batch',
269 '/{repo:.*?[^/]}/info/lfs/objects/batch')
281 '/{repo:.*?[^/]}/info/lfs/objects/batch')
270 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
282 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
271 request_method='POST', renderer='json')
283 request_method='POST', renderer='json')
272
284
273 # oid upload/download API
285 # oid upload/download API
274 config.add_route('lfs_objects_oid',
286 config.add_route('lfs_objects_oid',
275 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
287 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
276 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
288 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
277 request_method='PUT', renderer='json')
289 request_method='PUT', renderer='json')
278 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
290 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
279 request_method='GET', renderer='json')
291 request_method='GET', renderer='json')
280
292
281 # verification API
293 # verification API
282 config.add_route('lfs_objects_verify',
294 config.add_route('lfs_objects_verify',
283 '/{repo:.*?[^/]}/info/lfs/verify')
295 '/{repo:.*?[^/]}/info/lfs/verify')
284 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
296 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
285 request_method='POST', renderer='json')
297 request_method='POST', renderer='json')
286
298
287 # not found handler for API
299 # not found handler for API
288 config.add_notfound_view(not_found, renderer='json')
300 config.add_notfound_view(not_found, renderer='json')
289
301
290
302
291 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
303 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
292 config = Configurator()
304 config = Configurator()
293 if git_lfs_enabled:
305 if git_lfs_enabled:
294 config.include(git_lfs_app)
306 config.include(git_lfs_app)
295 config.registry.git_lfs_store_path = git_lfs_store_path
307 config.registry.git_lfs_store_path = git_lfs_store_path
296 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
308 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
297 else:
309 else:
298 # not found handler for API, reporting disabled LFS support
310 # not found handler for API, reporting disabled LFS support
299 config.add_notfound_view(lfs_disabled, renderer='json')
311 config.add_notfound_view(lfs_disabled, renderer='json')
300
312
301 app = config.make_wsgi_app()
313 app = config.make_wsgi_app()
302 return app
314 return app
@@ -1,177 +1,185 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import logging
20 import logging
21 from collections import OrderedDict
21 from collections import OrderedDict
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class OidHandler:
26 class OidHandler:
27
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
29 obj_verify_href=None):
30 self.current_store = store
30 self.current_store = store
31 self.repo_name = repo_name
31 self.repo_name = repo_name
32 self.auth = auth
32 self.auth = auth
33 self.oid = oid
33 self.oid = oid
34 self.obj_size = obj_size
34 self.obj_size = obj_size
35 self.obj_data = obj_data
35 self.obj_data = obj_data
36 self.obj_href = obj_href
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
37 self.obj_verify_href = obj_verify_href
38
38
39 def get_store(self, mode=None):
39 def get_store(self, mode=None):
40 return self.current_store
40 return self.current_store
41
41
42 def get_auth(self):
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
44 return " ".join(self.auth)
45
45
46 def download(self):
46 def download(self):
47
47
48 store = self.get_store()
48 store = self.get_store()
49 response = None
49 response = None
50 has_errors = None
50 has_errors = None
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = f'object: {store.oid} does not exist in store'
54 err_msg = f'object: {store.oid} does not exist in store'
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
58 message=err_msg
58 message=err_msg
59 )
59 )
60 )
60 )
61
61
62 download_action = OrderedDict(
62 download_action = OrderedDict(
63 href=self.obj_href,
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
65 )
66 if not has_errors:
66 if not has_errors:
67 response = OrderedDict(download=download_action)
67 response = OrderedDict(download=download_action)
68 return response, has_errors
68 return response, has_errors
69
69
70 def upload(self, skip_existing=True):
70 def upload(self, skip_existing=True):
71 """
71 """
72 Write upload action for git-lfs server
72 Write upload action for git-lfs server
73 """
73 """
74
74
75 store = self.get_store()
75 store = self.get_store()
76 response = None
76 response = None
77 has_errors = None
77 has_errors = None
78
78
79 # verify if we have the OID before, if we do, reply with empty
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
81 log.debug('LFS: store already has oid %s', store.oid)
82
82
83 # validate size
83 # validate size
84 store_size = store.size_oid()
84 store_size = store.size_oid()
85 size_match = store_size == self.obj_size
85 size_match = store_size == self.obj_size
86 if not size_match:
86 if not size_match:
87 log.warning(
87 log.warning(
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 self.oid, store_size, self.obj_size)
89 self.oid, store_size, self.obj_size)
90 elif skip_existing:
90 elif skip_existing:
91 log.debug('LFS: skipping further action as oid is existing')
91 log.debug('LFS: skipping further action as oid is existing')
92 return response, has_errors
92 return response, has_errors
93
93
94 chunked = ("Transfer-Encoding", "chunked")
94 chunked = ("Transfer-Encoding", "chunked")
95 upload_action = OrderedDict(
95 upload_action = OrderedDict(
96 href=self.obj_href,
96 href=self.obj_href,
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 )
98 )
99 if not has_errors:
99 if not has_errors:
100 response = OrderedDict(upload=upload_action)
100 response = OrderedDict(upload=upload_action)
101 # if specified in handler, return the verification endpoint
101 # if specified in handler, return the verification endpoint
102 if self.obj_verify_href:
102 if self.obj_verify_href:
103 verify_action = OrderedDict(
103 verify_action = OrderedDict(
104 href=self.obj_verify_href,
104 href=self.obj_verify_href,
105 header=OrderedDict([("Authorization", self.get_auth())])
105 header=OrderedDict([("Authorization", self.get_auth())])
106 )
106 )
107 response['verify'] = verify_action
107 response['verify'] = verify_action
108 return response, has_errors
108 return response, has_errors
109
109
110 def exec_operation(self, operation, *args, **kwargs):
110 def exec_operation(self, operation, *args, **kwargs):
111 handler = getattr(self, operation)
111 handler = getattr(self, operation)
112 log.debug('LFS: handling request using %s handler', handler)
112 log.debug('LFS: handling request using %s handler', handler)
113 return handler(*args, **kwargs)
113 return handler(*args, **kwargs)
114
114
115
115
116 class LFSOidStore:
116 class LFSOidStore:
117
117
118 def __init__(self, oid, repo, store_location=None):
118 def __init__(self, oid, repo, store_location=None):
119 self.oid = oid
119 self.oid = oid
120 self.repo = repo
120 self.repo = repo
121 defined_store_path = store_location or self.get_default_store()
121 defined_store_path = store_location or self.get_default_store()
122 self.store_suffix = f"/objects/{oid[:2]}/{oid[2:4]}"
122 self.store_suffix = f"/objects/{oid[:2]}/{oid[2:4]}"
123 self.store_path = f"{defined_store_path.rstrip('/')}{self.store_suffix}"
123 self.store_path = f"{defined_store_path.rstrip('/')}{self.store_suffix}"
124 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
124 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
125 self.oid_path = os.path.join(self.store_path, oid)
125 self.oid_path = os.path.join(self.store_path, oid)
126 self.fd = None
126 self.fd = None
127
127
128 def get_engine(self, mode):
128 def get_engine(self, mode):
129 """
129 """
130 engine = .get_engine(mode='wb')
130 engine = .get_engine(mode='wb')
131 with engine as f:
131 with engine as f:
132 f.write('...')
132 f.write('...')
133 """
133 """
134
134
135 class StoreEngine:
135 class StoreEngine:
136 _cleanup = None
136 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
137 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
137 self.mode = mode
138 self.mode = mode
138 self.store_path = store_path
139 self.store_path = store_path
139 self.oid_path = oid_path
140 self.oid_path = oid_path
140 self.tmp_oid_path = tmp_oid_path
141 self.tmp_oid_path = tmp_oid_path
141
142
143 def cleanup(self):
144 self._cleanup = True
145
142 def __enter__(self):
146 def __enter__(self):
143 if not os.path.isdir(self.store_path):
147 if not os.path.isdir(self.store_path):
144 os.makedirs(self.store_path)
148 os.makedirs(self.store_path)
145
149
146 # TODO(marcink): maybe write metadata here with size/oid ?
150 # TODO(marcink): maybe write metadata here with size/oid ?
147 fd = open(self.tmp_oid_path, self.mode)
151 fd = open(self.tmp_oid_path, self.mode)
148 self.fd = fd
152 self.fd = fd
149 return fd
153 return fd
150
154
151 def __exit__(self, exc_type, exc_value, traceback):
155 def __exit__(self, exc_type, exc_value, traceback):
152 # close tmp file, and rename to final destination
153 self.fd.close()
156 self.fd.close()
154 shutil.move(self.tmp_oid_path, self.oid_path)
157
158 if self._cleanup is None:
159 # close tmp file, and rename to final destination
160 shutil.move(self.tmp_oid_path, self.oid_path)
161 else:
162 os.remove(self.tmp_oid_path)
155
163
156 return StoreEngine(
164 return StoreEngine(
157 mode, self.store_path, self.oid_path, self.tmp_oid_path)
165 mode, self.store_path, self.oid_path, self.tmp_oid_path)
158
166
159 def get_default_store(self):
167 def get_default_store(self):
160 """
168 """
161 Default store, consistent with defaults of Mercurial large files store
169 Default store, consistent with defaults of Mercurial large files store
162 which is /home/username/.cache/largefiles
170 which is /home/username/.cache/largefiles
163 """
171 """
164 user_home = os.path.expanduser("~")
172 user_home = os.path.expanduser("~")
165 return os.path.join(user_home, '.cache', 'lfs-store')
173 return os.path.join(user_home, '.cache', 'lfs-store')
166
174
167 def has_oid(self):
175 def has_oid(self):
168 return os.path.exists(os.path.join(self.store_path, self.oid))
176 return os.path.exists(os.path.join(self.store_path, self.oid))
169
177
170 def size_oid(self):
178 def size_oid(self):
171 size = -1
179 size = -1
172
180
173 if self.has_oid():
181 if self.has_oid():
174 oid = os.path.join(self.store_path, self.oid)
182 oid = os.path.join(self.store_path, self.oid)
175 size = os.stat(oid).st_size
183 size = os.stat(oid).st_size
176
184
177 return size
185 return size
@@ -1,274 +1,310 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21
21
22 from vcsserver.lib.ext_json import json
22 from vcsserver.lib.ext_json import json
23 from vcsserver.lib.str_utils import safe_bytes
23 from vcsserver.lib.str_utils import safe_bytes
24 from vcsserver.git_lfs.app import create_app
24 from vcsserver.git_lfs.app import create_app
25 from vcsserver.git_lfs.lib import LFSOidStore
25 from vcsserver.git_lfs.lib import LFSOidStore
26
26
27
27
28 @pytest.fixture(scope='function')
28 @pytest.fixture(scope='function')
29 def git_lfs_app(tmpdir):
29 def git_lfs_app(tmpdir):
30 custom_app = WebObTestApp(create_app(
30 custom_app = WebObTestApp(create_app(
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
31 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
32 git_lfs_http_scheme='http'))
32 git_lfs_http_scheme='http'))
33 custom_app._store = str(tmpdir)
33 custom_app._store = str(tmpdir)
34 return custom_app
34 return custom_app
35
35
36
36
37 @pytest.fixture(scope='function')
37 @pytest.fixture(scope='function')
38 def git_lfs_https_app(tmpdir):
38 def git_lfs_https_app(tmpdir):
39 custom_app = WebObTestApp(create_app(
39 custom_app = WebObTestApp(create_app(
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
40 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
41 git_lfs_http_scheme='https'))
41 git_lfs_http_scheme='https'))
42 custom_app._store = str(tmpdir)
42 custom_app._store = str(tmpdir)
43 return custom_app
43 return custom_app
44
44
45
45
46 @pytest.fixture()
46 @pytest.fixture()
47 def http_auth():
47 def http_auth():
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
48 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
49
49
50
50
51 class TestLFSApplication:
51 class TestLFSApplication:
52
52
53 def test_app_wrong_path(self, git_lfs_app):
53 def test_app_wrong_path(self, git_lfs_app):
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
54 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
55
55
56 def test_app_deprecated_endpoint(self, git_lfs_app):
56 def test_app_deprecated_endpoint(self, git_lfs_app):
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
57 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
58 assert response.status_code == 501
58 assert response.status_code == 501
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
59 assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'}
60
60
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
61 def test_app_lock_verify_api_not_available(self, git_lfs_app):
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
62 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
63 assert response.status_code == 501
63 assert response.status_code == 501
64 assert json.loads(response.text) == {
64 assert json.loads(response.text) == {
65 'message': 'GIT LFS locking api not supported'}
65 'message': 'GIT LFS locking api not supported'}
66
66
67 def test_app_lock_api_not_available(self, git_lfs_app):
67 def test_app_lock_api_not_available(self, git_lfs_app):
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
68 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
69 assert response.status_code == 501
69 assert response.status_code == 501
70 assert json.loads(response.text) == {
70 assert json.loads(response.text) == {
71 'message': 'GIT LFS locking api not supported'}
71 'message': 'GIT LFS locking api not supported'}
72
72
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
73 def test_app_batch_api_missing_auth(self, git_lfs_app):
74 git_lfs_app.post_json(
74 git_lfs_app.post_json(
75 '/repo/info/lfs/objects/batch', params={}, status=403)
75 '/repo/info/lfs/objects/batch', params={}, status=403)
76
76
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
77 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
78 response = git_lfs_app.post_json(
78 response = git_lfs_app.post_json(
79 '/repo/info/lfs/objects/batch', params={}, status=400,
79 '/repo/info/lfs/objects/batch', params={}, status=400,
80 extra_environ=http_auth)
80 extra_environ=http_auth)
81 assert json.loads(response.text) == {
81 assert json.loads(response.text) == {
82 'message': 'unsupported operation mode: `None`'}
82 'message': 'unsupported operation mode: `None`'}
83
83
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
84 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
85 response = git_lfs_app.post_json(
85 response = git_lfs_app.post_json(
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
86 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
87 status=400, extra_environ=http_auth)
87 status=400, extra_environ=http_auth)
88 assert json.loads(response.text) == {
88 assert json.loads(response.text) == {
89 'message': 'missing objects data'}
89 'message': 'missing objects data'}
90
90
91 def test_app_batch_api_unsupported_data_in_objects(
91 def test_app_batch_api_unsupported_data_in_objects(
92 self, git_lfs_app, http_auth):
92 self, git_lfs_app, http_auth):
93 params = {'operation': 'download',
93 params = {'operation': 'download',
94 'objects': [{}]}
94 'objects': [{}]}
95 response = git_lfs_app.post_json(
95 response = git_lfs_app.post_json(
96 '/repo/info/lfs/objects/batch', params=params, status=400,
96 '/repo/info/lfs/objects/batch', params=params, status=400,
97 extra_environ=http_auth)
97 extra_environ=http_auth)
98 assert json.loads(response.text) == {
98 assert json.loads(response.text) == {
99 'message': 'unsupported data in objects'}
99 'message': 'unsupported data in objects'}
100
100
101 def test_app_batch_api_download_missing_object(
101 def test_app_batch_api_download_missing_object(
102 self, git_lfs_app, http_auth):
102 self, git_lfs_app, http_auth):
103 params = {'operation': 'download',
103 params = {
104 'objects': [{'oid': '123', 'size': '1024'}]}
104 'operation': 'download',
105 'objects': [{'oid': '123', 'size': '1024'}]
106 }
105 response = git_lfs_app.post_json(
107 response = git_lfs_app.post_json(
106 '/repo/info/lfs/objects/batch', params=params,
108 '/repo/info/lfs/objects/batch', params=params,
107 extra_environ=http_auth)
109 extra_environ=http_auth)
108
110
109 expected_objects = [
111 expected_objects = [
110 {'authenticated': True,
112 {
111 'errors': {'error': {
113 'oid': '123',
112 'code': 404,
114 'size': '1024',
113 'message': 'object: 123 does not exist in store'}},
115 'authenticated': True,
114 'oid': '123',
116 'errors': {'error': {'code': 404, 'message': 'object: 123 does not exist in store'}},
115 'size': '1024'}
117 }
116 ]
118 ]
119
117 assert json.loads(response.text) == {
120 assert json.loads(response.text) == {
118 'objects': expected_objects, 'transfer': 'basic'}
121 'objects': expected_objects,
122 'transfer': 'basic'
123 }
119
124
120 def test_app_batch_api_download(self, git_lfs_app, http_auth):
125 def test_app_batch_api_download(self, git_lfs_app, http_auth):
121 oid = '456'
126 oid = '456'
122 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
127 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
123 if not os.path.isdir(os.path.dirname(oid_path)):
128 if not os.path.isdir(os.path.dirname(oid_path)):
124 os.makedirs(os.path.dirname(oid_path))
129 os.makedirs(os.path.dirname(oid_path))
125 with open(oid_path, 'wb') as f:
130 with open(oid_path, 'wb') as f:
126 f.write(safe_bytes('OID_CONTENT'))
131 f.write(safe_bytes('OID_CONTENT'))
127
132
128 params = {'operation': 'download',
133 params = {'operation': 'download',
129 'objects': [{'oid': oid, 'size': '1024'}]}
134 'objects': [{'oid': oid, 'size': '1024'}]}
130 response = git_lfs_app.post_json(
135 response = git_lfs_app.post_json(
131 '/repo/info/lfs/objects/batch', params=params,
136 '/repo/info/lfs/objects/batch', params=params,
132 extra_environ=http_auth)
137 extra_environ=http_auth)
133
138
134 expected_objects = [
139 expected_objects = [
135 {'authenticated': True,
140 {'authenticated': True,
136 'actions': {
141 'actions': {
137 'download': {
142 'download': {
138 'header': {'Authorization': 'Basic XXXXX'},
143 'header': {'Authorization': 'Basic XXXXX'},
139 'href': 'http://localhost/repo/info/lfs/objects/456'},
144 'href': 'http://localhost/repo/info/lfs/objects/456'},
140 },
145 },
141 'oid': '456',
146 'oid': '456',
142 'size': '1024'}
147 'size': '1024'}
143 ]
148 ]
144 assert json.loads(response.text) == {
149 assert json.loads(response.text) == {
145 'objects': expected_objects, 'transfer': 'basic'}
150 'objects': expected_objects,
151 'transfer': 'basic'
152 }
146
153
147 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
154 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
148 params = {'operation': 'upload',
155 params = {'operation': 'upload',
149 'objects': [{'oid': '123', 'size': '1024'}]}
156 'objects': [{'oid': '123', 'size': '1024'}]}
150 response = git_lfs_app.post_json(
157 response = git_lfs_app.post_json(
151 '/repo/info/lfs/objects/batch', params=params,
158 '/repo/info/lfs/objects/batch', params=params,
152 extra_environ=http_auth)
159 extra_environ=http_auth)
153 expected_objects = [
160 expected_objects = [
154 {'authenticated': True,
161 {
155 'actions': {
162 'authenticated': True,
156 'upload': {
163 'actions': {
157 'header': {'Authorization': 'Basic XXXXX',
164 'upload': {
158 'Transfer-Encoding': 'chunked'},
165 'header': {
159 'href': 'http://localhost/repo/info/lfs/objects/123'},
166 'Authorization': 'Basic XXXXX',
160 'verify': {
167 'Transfer-Encoding': 'chunked'
161 'header': {'Authorization': 'Basic XXXXX'},
168 },
162 'href': 'http://localhost/repo/info/lfs/verify'}
169 'href': 'http://localhost/repo/info/lfs/objects/123'
163 },
170 },
164 'oid': '123',
171 'verify': {
165 'size': '1024'}
172 'header': {
173 'Authorization': 'Basic XXXXX'
174 },
175 'href': 'http://localhost/repo/info/lfs/verify'
176 }
177 },
178 'oid': '123',
179 'size': '1024'
180 }
166 ]
181 ]
167 assert json.loads(response.text) == {
182 assert json.loads(response.text) == {
168 'objects': expected_objects, 'transfer': 'basic'}
183 'objects': expected_objects,
184 'transfer': 'basic'
185 }
169
186
170 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
187 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
171 params = {'operation': 'upload',
188 params = {'operation': 'upload',
172 'objects': [{'oid': '123', 'size': '1024'}]}
189 'objects': [{'oid': '123', 'size': '1024'}]}
173 response = git_lfs_https_app.post_json(
190 response = git_lfs_https_app.post_json(
174 '/repo/info/lfs/objects/batch', params=params,
191 '/repo/info/lfs/objects/batch', params=params,
175 extra_environ=http_auth)
192 extra_environ=http_auth)
176 expected_objects = [
193 expected_objects = [
177 {'authenticated': True,
194 {'authenticated': True,
178 'actions': {
195 'actions': {
179 'upload': {
196 'upload': {
180 'header': {'Authorization': 'Basic XXXXX',
197 'header': {'Authorization': 'Basic XXXXX',
181 'Transfer-Encoding': 'chunked'},
198 'Transfer-Encoding': 'chunked'},
182 'href': 'https://localhost/repo/info/lfs/objects/123'},
199 'href': 'https://localhost/repo/info/lfs/objects/123'},
183 'verify': {
200 'verify': {
184 'header': {'Authorization': 'Basic XXXXX'},
201 'header': {'Authorization': 'Basic XXXXX'},
185 'href': 'https://localhost/repo/info/lfs/verify'}
202 'href': 'https://localhost/repo/info/lfs/verify'}
186 },
203 },
187 'oid': '123',
204 'oid': '123',
188 'size': '1024'}
205 'size': '1024'}
189 ]
206 ]
190 assert json.loads(response.text) == {
207 assert json.loads(response.text) == {
191 'objects': expected_objects, 'transfer': 'basic'}
208 'objects': expected_objects, 'transfer': 'basic'}
192
209
193 def test_app_verify_api_missing_data(self, git_lfs_app):
210 def test_app_verify_api_missing_data(self, git_lfs_app):
194 params = {'oid': 'missing'}
211 params = {'oid': 'missing'}
195 response = git_lfs_app.post_json(
212 response = git_lfs_app.post_json(
196 '/repo/info/lfs/verify', params=params,
213 '/repo/info/lfs/verify', params=params,
197 status=400)
214 status=400)
198
215
199 assert json.loads(response.text) == {
216 assert json.loads(response.text) == {
200 'message': 'missing oid and size in request data'}
217 'message': 'missing oid and size in request data'}
201
218
202 def test_app_verify_api_missing_obj(self, git_lfs_app):
219 def test_app_verify_api_missing_obj(self, git_lfs_app):
203 params = {'oid': 'missing', 'size': '1024'}
220 params = {'oid': 'missing', 'size': '1024'}
204 response = git_lfs_app.post_json(
221 response = git_lfs_app.post_json(
205 '/repo/info/lfs/verify', params=params,
222 '/repo/info/lfs/verify', params=params,
206 status=404)
223 status=404)
207
224
208 assert json.loads(response.text) == {
225 assert json.loads(response.text) == {
209 'message': 'oid `missing` does not exists in store'}
226 'message': 'oid `missing` does not exists in store'
227 }
210
228
211 def test_app_verify_api_size_mismatch(self, git_lfs_app):
229 def test_app_verify_api_size_mismatch(self, git_lfs_app):
212 oid = 'existing'
230 oid = 'existing'
213 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
231 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
214 if not os.path.isdir(os.path.dirname(oid_path)):
232 if not os.path.isdir(os.path.dirname(oid_path)):
215 os.makedirs(os.path.dirname(oid_path))
233 os.makedirs(os.path.dirname(oid_path))
216 with open(oid_path, 'wb') as f:
234 with open(oid_path, 'wb') as f:
217 f.write(safe_bytes('OID_CONTENT'))
235 f.write(safe_bytes('OID_CONTENT'))
218
236
219 params = {'oid': oid, 'size': '1024'}
237 params = {'oid': oid, 'size': '1024'}
220 response = git_lfs_app.post_json(
238 response = git_lfs_app.post_json(
221 '/repo/info/lfs/verify', params=params, status=422)
239 '/repo/info/lfs/verify', params=params, status=422)
222
240
223 assert json.loads(response.text) == {
241 assert json.loads(response.text) == {
224 'message': 'requested file size mismatch '
242 'message': 'requested file size mismatch store size:11 requested:1024'
225 'store size:11 requested:1024'}
243 }
226
244
227 def test_app_verify_api(self, git_lfs_app):
245 def test_app_verify_api(self, git_lfs_app):
228 oid = 'existing'
246 oid = 'existing'
229 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
247 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
230 if not os.path.isdir(os.path.dirname(oid_path)):
248 if not os.path.isdir(os.path.dirname(oid_path)):
231 os.makedirs(os.path.dirname(oid_path))
249 os.makedirs(os.path.dirname(oid_path))
232 with open(oid_path, 'wb') as f:
250 with open(oid_path, 'wb') as f:
233 f.write(safe_bytes('OID_CONTENT'))
251 f.write(safe_bytes('OID_CONTENT'))
234
252
235 params = {'oid': oid, 'size': 11}
253 params = {'oid': oid, 'size': 11}
236 response = git_lfs_app.post_json(
254 response = git_lfs_app.post_json(
237 '/repo/info/lfs/verify', params=params)
255 '/repo/info/lfs/verify', params=params)
238
256
239 assert json.loads(response.text) == {
257 assert json.loads(response.text) == {
240 'message': {'size': 'ok', 'in_store': 'ok'}}
258 'message': {'size': 11, 'oid': oid}
259 }
241
260
242 def test_app_download_api_oid_not_existing(self, git_lfs_app):
261 def test_app_download_api_oid_not_existing(self, git_lfs_app):
243 oid = 'missing'
262 oid = 'missing'
244
263
245 response = git_lfs_app.get(
264 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
246 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
247
265
248 assert json.loads(response.text) == {
266 assert json.loads(response.text) == {
249 'message': 'requested file with oid `missing` not found in store'}
267 'message': 'requested file with oid `missing` not found in store'}
250
268
251 def test_app_download_api(self, git_lfs_app):
269 def test_app_download_api(self, git_lfs_app):
252 oid = 'existing'
270 oid = 'existing'
253 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
271 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
254 if not os.path.isdir(os.path.dirname(oid_path)):
272 if not os.path.isdir(os.path.dirname(oid_path)):
255 os.makedirs(os.path.dirname(oid_path))
273 os.makedirs(os.path.dirname(oid_path))
256 with open(oid_path, 'wb') as f:
274 with open(oid_path, 'wb') as f:
257 f.write(safe_bytes('OID_CONTENT'))
275 f.write(safe_bytes('OID_CONTENT'))
258
276
259 response = git_lfs_app.get(
277 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}')
260 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
261 assert response
278 assert response
262
279
263 def test_app_upload(self, git_lfs_app):
280 def test_app_upload(self, git_lfs_app):
264 oid = 'uploaded'
281 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
265
282
266 response = git_lfs_app.put(
283 response = git_lfs_app.put(
267 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
284 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
268
285
269 assert json.loads(response.text) == {'upload': 'ok'}
286 assert json.loads(response.text) == {'upload': 'ok', 'state': 'written'}
270
287
271 # verify that we actually wrote that OID
288 # verify that we actually wrote that OID
272 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
289 oid_path = LFSOidStore(oid=oid, repo=None, store_location=git_lfs_app._store).oid_path
273 assert os.path.isfile(oid_path)
290 assert os.path.isfile(oid_path)
274 assert 'CONTENT' == open(oid_path).read()
291 assert 'CONTENT' == open(oid_path).read()
292
293 response = git_lfs_app.put(
294 f'/repo/info/lfs/objects/{oid}', params='CONTENT')
295
296 assert json.loads(response.text) == {'upload': 'ok', 'state': 'in-store'}
297
298
299 def test_app_upload_wrong_sha(self, git_lfs_app):
300 oid = 'i-am-a-wrong-sha'
301
302 response = git_lfs_app.put(f'/repo/info/lfs/objects/{oid}', params='CONTENT', status=400)
303
304 assert json.loads(response.text) == {
305 'message': 'oid i-am-a-wrong-sha does not match expected sha '
306 '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'}
307
308 # check this OID wasn't written to store
309 response = git_lfs_app.get(f'/repo/info/lfs/objects/{oid}', status=404)
310 assert json.loads(response.text) == {'message': 'requested file with oid `i-am-a-wrong-sha` not found in store'}
@@ -1,142 +1,143 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.lib.str_utils import safe_bytes
20 from vcsserver.lib.str_utils import safe_bytes
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
22
22
23
23
24 @pytest.fixture()
24 @pytest.fixture()
25 def lfs_store(tmpdir):
25 def lfs_store(tmpdir):
26 repo = 'test'
26 repo = 'test'
27 oid = '123456789'
27 oid = '65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12'
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
29 return store
29 return store
30
30
31
31
32 @pytest.fixture()
32 @pytest.fixture()
33 def oid_handler(lfs_store):
33 def oid_handler(lfs_store):
34 store = lfs_store
34 store = lfs_store
35 repo = store.repo
35 repo = store.repo
36 oid = store.oid
36 oid = store.oid
37
37
38 oid_handler = OidHandler(
38 oid_handler = OidHandler(
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 store=store, repo_name=repo, auth=('basic', 'xxxx'),
40 oid=oid,
40 oid=oid,
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
42 obj_verify_href='http://localhost/verify')
42 obj_verify_href='http://localhost/verify')
43 return oid_handler
43 return oid_handler
44
44
45
45
46 class TestOidHandler:
46 class TestOidHandler:
47
47
48 @pytest.mark.parametrize('exec_action', [
48 @pytest.mark.parametrize('exec_action', [
49 'download',
49 'download',
50 'upload',
50 'upload',
51 ])
51 ])
52 def test_exec_action(self, exec_action, oid_handler):
52 def test_exec_action(self, exec_action, oid_handler):
53 handler = oid_handler.exec_operation(exec_action)
53 handler = oid_handler.exec_operation(exec_action)
54 assert handler
54 assert handler
55
55
56 def test_exec_action_undefined(self, oid_handler):
56 def test_exec_action_undefined(self, oid_handler):
57 with pytest.raises(AttributeError):
57 with pytest.raises(AttributeError):
58 oid_handler.exec_operation('wrong')
58 oid_handler.exec_operation('wrong')
59
59
60 def test_download_oid_not_existing(self, oid_handler):
60 def test_download_oid_not_existing(self, oid_handler):
61 response, has_errors = oid_handler.exec_operation('download')
61 response, has_errors = oid_handler.exec_operation('download')
62
62
63 assert response is None
63 assert response is None
64 assert has_errors['error'] == {
64 assert has_errors['error'] == {
65 'code': 404,
65 'code': 404,
66 'message': 'object: 123456789 does not exist in store'}
66 'message': 'object: 65f23e22a9bfedda96929b3cfcb8b6d2fdd34a2e877ddb81f45d79ab05710e12 does not exist in store'
67 }
67
68
68 def test_download_oid(self, oid_handler):
69 def test_download_oid(self, oid_handler):
69 store = oid_handler.get_store()
70 store = oid_handler.get_store()
70 if not os.path.isdir(os.path.dirname(store.oid_path)):
71 if not os.path.isdir(os.path.dirname(store.oid_path)):
71 os.makedirs(os.path.dirname(store.oid_path))
72 os.makedirs(os.path.dirname(store.oid_path))
72
73
73 with open(store.oid_path, 'wb') as f:
74 with open(store.oid_path, 'wb') as f:
74 f.write(safe_bytes('CONTENT'))
75 f.write(safe_bytes('CONTENT'))
75
76
76 response, has_errors = oid_handler.exec_operation('download')
77 response, has_errors = oid_handler.exec_operation('download')
77
78
78 assert has_errors is None
79 assert has_errors is None
79 assert response['download'] == {
80 assert response['download'] == {
80 'header': {'Authorization': 'basic xxxx'},
81 'header': {'Authorization': 'basic xxxx'},
81 'href': 'http://localhost/handle_oid'
82 'href': 'http://localhost/handle_oid'
82 }
83 }
83
84
84 def test_upload_oid_that_exists(self, oid_handler):
85 def test_upload_oid_that_exists(self, oid_handler):
85 store = oid_handler.get_store()
86 store = oid_handler.get_store()
86 if not os.path.isdir(os.path.dirname(store.oid_path)):
87 if not os.path.isdir(os.path.dirname(store.oid_path)):
87 os.makedirs(os.path.dirname(store.oid_path))
88 os.makedirs(os.path.dirname(store.oid_path))
88
89
89 with open(store.oid_path, 'wb') as f:
90 with open(store.oid_path, 'wb') as f:
90 f.write(safe_bytes('CONTENT'))
91 f.write(safe_bytes('CONTENT'))
91 oid_handler.obj_size = 7
92 oid_handler.obj_size = 7
92 response, has_errors = oid_handler.exec_operation('upload')
93 response, has_errors = oid_handler.exec_operation('upload')
93 assert has_errors is None
94 assert has_errors is None
94 assert response is None
95 assert response is None
95
96
96 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
97 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
97 store = oid_handler.get_store()
98 store = oid_handler.get_store()
98 if not os.path.isdir(os.path.dirname(store.oid_path)):
99 if not os.path.isdir(os.path.dirname(store.oid_path)):
99 os.makedirs(os.path.dirname(store.oid_path))
100 os.makedirs(os.path.dirname(store.oid_path))
100
101
101 with open(store.oid_path, 'wb') as f:
102 with open(store.oid_path, 'wb') as f:
102 f.write(safe_bytes('CONTENT'))
103 f.write(safe_bytes('CONTENT'))
103
104
104 oid_handler.obj_size = 10240
105 oid_handler.obj_size = 10240
105 response, has_errors = oid_handler.exec_operation('upload')
106 response, has_errors = oid_handler.exec_operation('upload')
106 assert has_errors is None
107 assert has_errors is None
107 assert response['upload'] == {
108 assert response['upload'] == {
108 'header': {'Authorization': 'basic xxxx',
109 'header': {'Authorization': 'basic xxxx',
109 'Transfer-Encoding': 'chunked'},
110 'Transfer-Encoding': 'chunked'},
110 'href': 'http://localhost/handle_oid',
111 'href': 'http://localhost/handle_oid',
111 }
112 }
112
113
113 def test_upload_oid(self, oid_handler):
114 def test_upload_oid(self, oid_handler):
114 response, has_errors = oid_handler.exec_operation('upload')
115 response, has_errors = oid_handler.exec_operation('upload')
115 assert has_errors is None
116 assert has_errors is None
116 assert response['upload'] == {
117 assert response['upload'] == {
117 'header': {'Authorization': 'basic xxxx',
118 'header': {'Authorization': 'basic xxxx',
118 'Transfer-Encoding': 'chunked'},
119 'Transfer-Encoding': 'chunked'},
119 'href': 'http://localhost/handle_oid'
120 'href': 'http://localhost/handle_oid'
120 }
121 }
121
122
122
123
123 class TestLFSStore:
124 class TestLFSStore:
124 def test_write_oid(self, lfs_store):
125 def test_write_oid(self, lfs_store):
125 oid_location = lfs_store.oid_path
126 oid_location = lfs_store.oid_path
126
127
127 assert not os.path.isfile(oid_location)
128 assert not os.path.isfile(oid_location)
128
129
129 engine = lfs_store.get_engine(mode='wb')
130 engine = lfs_store.get_engine(mode='wb')
130 with engine as f:
131 with engine as f:
131 f.write(safe_bytes('CONTENT'))
132 f.write(safe_bytes('CONTENT'))
132
133
133 assert os.path.isfile(oid_location)
134 assert os.path.isfile(oid_location)
134
135
135 def test_detect_has_oid(self, lfs_store):
136 def test_detect_has_oid(self, lfs_store):
136
137
137 assert lfs_store.has_oid() is False
138 assert lfs_store.has_oid() is False
138 engine = lfs_store.get_engine(mode='wb')
139 engine = lfs_store.get_engine(mode='wb')
139 with engine as f:
140 with engine as f:
140 f.write(safe_bytes('CONTENT'))
141 f.write(safe_bytes('CONTENT'))
141
142
142 assert lfs_store.has_oid() is True
143 assert lfs_store.has_oid() is True
@@ -1,824 +1,828 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import base64
23 import base64
24 import msgpack
24 import msgpack
25 import dataclasses
25 import dataclasses
26 import pygit2
26 import pygit2
27
27
28 import http.client
28 import http.client
29 from celery import Celery
29 from celery import Celery
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33
33
34 from vcsserver import exceptions, subprocessio, settings
34 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver.lib.ext_json import json
35 from vcsserver.lib.ext_json import json
36 from vcsserver.lib.str_utils import ascii_str, safe_str
36 from vcsserver.lib.str_utils import ascii_str, safe_str
37 from vcsserver.lib.svn_txn_utils import get_txn_id_from_store
37 from vcsserver.lib.svn_txn_utils import get_txn_id_from_store
38 from vcsserver.remote.git_remote import Repository
38 from vcsserver.remote.git_remote import Repository
39
39
40 celery_app = Celery('__vcsserver__')
40 celery_app = Celery('__vcsserver__')
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class HooksHttpClient:
44 class HooksHttpClient:
45 proto = 'msgpack.v1'
45 proto = 'msgpack.v1'
46 connection = None
46 connection = None
47
47
48 def __init__(self, hooks_uri):
48 def __init__(self, hooks_uri):
49 self.hooks_uri = hooks_uri
49 self.hooks_uri = hooks_uri
50
50
51 def __repr__(self):
51 def __repr__(self):
52 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
53
53
54 def __call__(self, method, extras):
54 def __call__(self, method, extras):
55 connection = http.client.HTTPConnection(self.hooks_uri)
55 connection = http.client.HTTPConnection(self.hooks_uri)
56 # binary msgpack body
56 # binary msgpack body
57 headers, body = self._serialize(method, extras)
57 headers, body = self._serialize(method, extras)
58 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
59
59
60 try:
60 try:
61 try:
61 try:
62 connection.request('POST', '/', body, headers)
62 connection.request('POST', '/', body, headers)
63 except Exception as error:
63 except Exception as error:
64 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
65 raise
65 raise
66
66
67 response = connection.getresponse()
67 response = connection.getresponse()
68 try:
68 try:
69 return msgpack.load(response)
69 return msgpack.load(response)
70 except Exception:
70 except Exception:
71 response_data = response.read()
71 response_data = response.read()
72 log.exception('Failed to decode hook response json data. '
72 log.exception('Failed to decode hook response json data. '
73 'response_code:%s, raw_data:%s',
73 'response_code:%s, raw_data:%s',
74 response.status, response_data)
74 response.status, response_data)
75 raise
75 raise
76 finally:
76 finally:
77 connection.close()
77 connection.close()
78
78
79 @classmethod
79 @classmethod
80 def _serialize(cls, hook_name, extras):
80 def _serialize(cls, hook_name, extras):
81 data = {
81 data = {
82 'method': hook_name,
82 'method': hook_name,
83 'extras': extras
83 'extras': extras
84 }
84 }
85 headers = {
85 headers = {
86 "rc-hooks-protocol": cls.proto,
86 "rc-hooks-protocol": cls.proto,
87 "Connection": "keep-alive"
87 "Connection": "keep-alive"
88 }
88 }
89 return headers, msgpack.packb(data)
89 return headers, msgpack.packb(data)
90
90
91
91
92 class HooksCeleryClient:
92 class HooksCeleryClient:
93 TASK_TIMEOUT = 60 # time in seconds
93 TASK_TIMEOUT = 60 # time in seconds
94
94
95 def __init__(self, queue, backend):
95 def __init__(self, queue, backend):
96 celery_app.config_from_object({
96 celery_app.config_from_object({
97 'broker_url': queue, 'result_backend': backend,
97 'broker_url': queue, 'result_backend': backend,
98 'broker_connection_retry_on_startup': True,
98 'broker_connection_retry_on_startup': True,
99 'task_serializer': 'json',
99 'task_serializer': 'json',
100 'accept_content': ['json', 'msgpack'],
100 'accept_content': ['json', 'msgpack'],
101 'result_serializer': 'json',
101 'result_serializer': 'json',
102 'result_accept_content': ['json', 'msgpack']
102 'result_accept_content': ['json', 'msgpack']
103 })
103 })
104 self.celery_app = celery_app
104 self.celery_app = celery_app
105
105
106 def __call__(self, method, extras):
106 def __call__(self, method, extras):
107 inquired_task = self.celery_app.signature(
107 inquired_task = self.celery_app.signature(
108 f'rhodecode.lib.celerylib.tasks.{method}'
108 f'rhodecode.lib.celerylib.tasks.{method}'
109 )
109 )
110 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
110 result = inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
111
112 return result
111
113
112
114
113 class HooksShadowRepoClient:
115 class HooksShadowRepoClient:
114
116
115 def __call__(self, hook_name, extras):
117 def __call__(self, hook_name, extras):
116 return {'output': '', 'status': 0}
118 return {'output': '', 'status': 0}
117
119
118
120
119 class RemoteMessageWriter:
121 class RemoteMessageWriter:
120 """Writer base class."""
122 """Writer base class."""
121 def write(self, message):
123 def write(self, message):
122 raise NotImplementedError()
124 raise NotImplementedError()
123
125
124
126
125 class HgMessageWriter(RemoteMessageWriter):
127 class HgMessageWriter(RemoteMessageWriter):
126 """Writer that knows how to send messages to mercurial clients."""
128 """Writer that knows how to send messages to mercurial clients."""
127
129
128 def __init__(self, ui):
130 def __init__(self, ui):
129 self.ui = ui
131 self.ui = ui
130
132
131 def write(self, message: str):
133 def write(self, message: str):
132 # TODO: Check why the quiet flag is set by default.
134 # TODO: Check why the quiet flag is set by default.
133 old = self.ui.quiet
135 old = self.ui.quiet
134 self.ui.quiet = False
136 self.ui.quiet = False
135 self.ui.status(message.encode('utf-8'))
137 self.ui.status(message.encode('utf-8'))
136 self.ui.quiet = old
138 self.ui.quiet = old
137
139
138
140
139 class GitMessageWriter(RemoteMessageWriter):
141 class GitMessageWriter(RemoteMessageWriter):
140 """Writer that knows how to send messages to git clients."""
142 """Writer that knows how to send messages to git clients."""
141
143
142 def __init__(self, stdout=None):
144 def __init__(self, stdout=None):
143 self.stdout = stdout or sys.stdout
145 self.stdout = stdout or sys.stdout
144
146
145 def write(self, message: str):
147 def write(self, message: str):
146 self.stdout.write(message)
148 self.stdout.write(message)
147
149
148
150
149 class SvnMessageWriter(RemoteMessageWriter):
151 class SvnMessageWriter(RemoteMessageWriter):
150 """Writer that knows how to send messages to svn clients."""
152 """Writer that knows how to send messages to svn clients."""
151
153
152 def __init__(self, stderr=None):
154 def __init__(self, stderr=None):
153 # SVN needs data sent to stderr for back-to-client messaging
155 # SVN needs data sent to stderr for back-to-client messaging
154 self.stderr = stderr or sys.stderr
156 self.stderr = stderr or sys.stderr
155
157
156 def write(self, message):
158 def write(self, message):
157 self.stderr.write(message)
159 self.stderr.write(message)
158
160
159
161
160 def _handle_exception(result):
162 def _maybe_handle_exception(result):
161 exception_class = result.get('exception')
163 exception_class = result.get('exception')
162 exception_traceback = result.get('exception_traceback')
164 exception_traceback = result.get('exception_traceback')
165 if not (exception_class and exception_traceback):
166 return
163 log.debug('Handling hook-call exception: %s', exception_class)
167 log.debug('Handling hook-call exception: %s', exception_class)
164
168
165 if exception_traceback:
169 if exception_traceback:
166 log.error('Got traceback from remote call:%s', exception_traceback)
170 log.error('Got traceback from remote call:%s', exception_traceback)
167
171
168 if exception_class == 'HTTPLockedRC':
172 if exception_class == 'HTTPLockedRC':
169 raise exceptions.RepositoryLockedException()(*result['exception_args'])
173 raise exceptions.RepositoryLockedException()(*result['exception_args'])
170 elif exception_class == 'ClientNotSupportedError':
174 elif exception_class == 'ClientNotSupportedError':
171 raise exceptions.ClientNotSupportedException()(*result['exception_args'])
175 raise exceptions.ClientNotSupportedException()(*result['exception_args'])
172 elif exception_class == 'HTTPBranchProtected':
176 elif exception_class == 'HTTPBranchProtected':
173 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
177 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
174 elif exception_class == 'RepositoryError':
178 elif exception_class == 'RepositoryError':
175 raise exceptions.VcsException()(*result['exception_args'])
179 raise exceptions.VcsException()(*result['exception_args'])
176 elif exception_class:
180 elif exception_class:
177 raise Exception(
181 raise Exception(
178 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
182 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
179 )
183 )
180
184
181
185
182 def _get_hooks_client(extras):
186 def _get_hooks_client(extras):
183 hooks_uri = extras.get('hooks_uri')
187 hooks_uri = extras.get('hooks_uri')
184 task_queue = extras.get('task_queue')
188 task_queue = extras.get('task_queue')
185 task_backend = extras.get('task_backend')
189 task_backend = extras.get('task_backend')
186 is_shadow_repo = extras.get('is_shadow_repo')
190 is_shadow_repo = extras.get('is_shadow_repo')
187
191
188 if hooks_uri:
192 if hooks_uri:
189 return HooksHttpClient(hooks_uri)
193 return HooksHttpClient(hooks_uri)
190 elif task_queue and task_backend:
194 elif task_queue and task_backend:
191 return HooksCeleryClient(task_queue, task_backend)
195 return HooksCeleryClient(task_queue, task_backend)
192 elif is_shadow_repo:
196 elif is_shadow_repo:
193 return HooksShadowRepoClient()
197 return HooksShadowRepoClient()
194 else:
198 else:
195 raise Exception("Hooks client not found!")
199 raise Exception("Hooks client not found!")
196
200
197
201
198 def _call_hook(hook_name, extras, writer):
202 def _call_hook(hook_name, extras, writer):
199 hooks_client = _get_hooks_client(extras)
203 hooks_client = _get_hooks_client(extras)
200 log.debug('Hooks, using client:%s', hooks_client)
204 log.debug('Hooks, using client:%s', hooks_client)
201 result = hooks_client(hook_name, extras)
205 result = hooks_client(hook_name, extras)
202 log.debug('Hooks got result: %s', result)
206 log.debug('Hooks got result: %s', result)
203 _handle_exception(result)
207 _maybe_handle_exception(result)
204 writer.write(result['output'])
208 writer.write(result['output'])
205
209
206 return result['status']
210 return result['status']
207
211
208
212
209 def _extras_from_ui(ui):
213 def _extras_from_ui(ui):
210 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
214 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
211 if not hook_data:
215 if not hook_data:
212 # maybe it's inside environ ?
216 # maybe it's inside environ ?
213 env_hook_data = os.environ.get('RC_SCM_DATA')
217 env_hook_data = os.environ.get('RC_SCM_DATA')
214 if env_hook_data:
218 if env_hook_data:
215 hook_data = env_hook_data
219 hook_data = env_hook_data
216
220
217 extras = {}
221 extras = {}
218 if hook_data:
222 if hook_data:
219 extras = json.loads(hook_data)
223 extras = json.loads(hook_data)
220 return extras
224 return extras
221
225
222
226
223 def _rev_range_hash(repo, node, check_heads=False):
227 def _rev_range_hash(repo, node, check_heads=False):
224 from vcsserver.hgcompat import get_ctx
228 from vcsserver.hgcompat import get_ctx
225
229
226 commits = []
230 commits = []
227 revs = []
231 revs = []
228 start = get_ctx(repo, node).rev()
232 start = get_ctx(repo, node).rev()
229 end = len(repo)
233 end = len(repo)
230 for rev in range(start, end):
234 for rev in range(start, end):
231 revs.append(rev)
235 revs.append(rev)
232 ctx = get_ctx(repo, rev)
236 ctx = get_ctx(repo, rev)
233 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
237 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
234 branch = safe_str(ctx.branch())
238 branch = safe_str(ctx.branch())
235 commits.append((commit_id, branch))
239 commits.append((commit_id, branch))
236
240
237 parent_heads = []
241 parent_heads = []
238 if check_heads:
242 if check_heads:
239 parent_heads = _check_heads(repo, start, end, revs)
243 parent_heads = _check_heads(repo, start, end, revs)
240 return commits, parent_heads
244 return commits, parent_heads
241
245
242
246
243 def _check_heads(repo, start, end, commits):
247 def _check_heads(repo, start, end, commits):
244 from vcsserver.hgcompat import get_ctx
248 from vcsserver.hgcompat import get_ctx
245 changelog = repo.changelog
249 changelog = repo.changelog
246 parents = set()
250 parents = set()
247
251
248 for new_rev in commits:
252 for new_rev in commits:
249 for p in changelog.parentrevs(new_rev):
253 for p in changelog.parentrevs(new_rev):
250 if p == mercurial.node.nullrev:
254 if p == mercurial.node.nullrev:
251 continue
255 continue
252 if p < start:
256 if p < start:
253 parents.add(p)
257 parents.add(p)
254
258
255 for p in parents:
259 for p in parents:
256 branch = get_ctx(repo, p).branch()
260 branch = get_ctx(repo, p).branch()
257 # The heads descending from that parent, on the same branch
261 # The heads descending from that parent, on the same branch
258 parent_heads = {p}
262 parent_heads = {p}
259 reachable = {p}
263 reachable = {p}
260 for x in range(p + 1, end):
264 for x in range(p + 1, end):
261 if get_ctx(repo, x).branch() != branch:
265 if get_ctx(repo, x).branch() != branch:
262 continue
266 continue
263 for pp in changelog.parentrevs(x):
267 for pp in changelog.parentrevs(x):
264 if pp in reachable:
268 if pp in reachable:
265 reachable.add(x)
269 reachable.add(x)
266 parent_heads.discard(pp)
270 parent_heads.discard(pp)
267 parent_heads.add(x)
271 parent_heads.add(x)
268 # More than one head? Suggest merging
272 # More than one head? Suggest merging
269 if len(parent_heads) > 1:
273 if len(parent_heads) > 1:
270 return list(parent_heads)
274 return list(parent_heads)
271
275
272 return []
276 return []
273
277
274
278
275 def _get_git_env():
279 def _get_git_env():
276 env = {}
280 env = {}
277 for k, v in os.environ.items():
281 for k, v in os.environ.items():
278 if k.startswith('GIT'):
282 if k.startswith('GIT'):
279 env[k] = v
283 env[k] = v
280
284
281 # serialized version
285 # serialized version
282 return [(k, v) for k, v in env.items()]
286 return [(k, v) for k, v in env.items()]
283
287
284
288
285 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
289 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
286 env = {}
290 env = {}
287 for k, v in os.environ.items():
291 for k, v in os.environ.items():
288 if k.startswith('HG'):
292 if k.startswith('HG'):
289 env[k] = v
293 env[k] = v
290
294
291 env['HG_NODE'] = old_rev
295 env['HG_NODE'] = old_rev
292 env['HG_NODE_LAST'] = new_rev
296 env['HG_NODE_LAST'] = new_rev
293 env['HG_TXNID'] = txnid
297 env['HG_TXNID'] = txnid
294 env['HG_PENDING'] = repo_path
298 env['HG_PENDING'] = repo_path
295
299
296 return [(k, v) for k, v in env.items()]
300 return [(k, v) for k, v in env.items()]
297
301
298
302
299 def _get_ini_settings(ini_file):
303 def _get_ini_settings(ini_file):
300 from vcsserver.http_main import sanitize_settings_and_apply_defaults
304 from vcsserver.http_main import sanitize_settings_and_apply_defaults
301 from vcsserver.lib.config_utils import get_app_config_lightweight, configure_and_store_settings
305 from vcsserver.lib.config_utils import get_app_config_lightweight, configure_and_store_settings
302
306
303 global_config = {'__file__': ini_file}
307 global_config = {'__file__': ini_file}
304 ini_settings = get_app_config_lightweight(ini_file)
308 ini_settings = get_app_config_lightweight(ini_file)
305 sanitize_settings_and_apply_defaults(global_config, ini_settings)
309 sanitize_settings_and_apply_defaults(global_config, ini_settings)
306 configure_and_store_settings(global_config, ini_settings)
310 configure_and_store_settings(global_config, ini_settings)
307
311
308 return ini_settings
312 return ini_settings
309
313
310
314
311 def _fix_hooks_executables(ini_path=''):
315 def _fix_hooks_executables(ini_path=''):
312 """
316 """
313 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
317 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
314 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
318 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
315 because svn is not on PATH
319 because svn is not on PATH
316 """
320 """
317 # set defaults, in case we can't read from ini_file
321 # set defaults, in case we can't read from ini_file
318 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
322 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
319 if ini_path:
323 if ini_path:
320 ini_settings = _get_ini_settings(ini_path)
324 ini_settings = _get_ini_settings(ini_path)
321 core_binary_dir = ini_settings['core.binary_dir']
325 core_binary_dir = ini_settings['core.binary_dir']
322
326
323 settings.BINARY_DIR = core_binary_dir
327 settings.BINARY_DIR = core_binary_dir
324
328
325
329
326 def repo_size(ui, repo, **kwargs):
330 def repo_size(ui, repo, **kwargs):
327 extras = _extras_from_ui(ui)
331 extras = _extras_from_ui(ui)
328 return _call_hook('repo_size', extras, HgMessageWriter(ui))
332 return _call_hook('repo_size', extras, HgMessageWriter(ui))
329
333
330
334
331 def pre_pull(ui, repo, **kwargs):
335 def pre_pull(ui, repo, **kwargs):
332 extras = _extras_from_ui(ui)
336 extras = _extras_from_ui(ui)
333 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
337 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
334
338
335
339
336 def pre_pull_ssh(ui, repo, **kwargs):
340 def pre_pull_ssh(ui, repo, **kwargs):
337 extras = _extras_from_ui(ui)
341 extras = _extras_from_ui(ui)
338 if extras and extras.get('SSH'):
342 if extras and extras.get('SSH'):
339 return pre_pull(ui, repo, **kwargs)
343 return pre_pull(ui, repo, **kwargs)
340 return 0
344 return 0
341
345
342
346
343 def post_pull(ui, repo, **kwargs):
347 def post_pull(ui, repo, **kwargs):
344 extras = _extras_from_ui(ui)
348 extras = _extras_from_ui(ui)
345 return _call_hook('post_pull', extras, HgMessageWriter(ui))
349 return _call_hook('post_pull', extras, HgMessageWriter(ui))
346
350
347
351
348 def post_pull_ssh(ui, repo, **kwargs):
352 def post_pull_ssh(ui, repo, **kwargs):
349 extras = _extras_from_ui(ui)
353 extras = _extras_from_ui(ui)
350 if extras and extras.get('SSH'):
354 if extras and extras.get('SSH'):
351 return post_pull(ui, repo, **kwargs)
355 return post_pull(ui, repo, **kwargs)
352 return 0
356 return 0
353
357
354
358
355 def pre_push(ui, repo, node=None, **kwargs):
359 def pre_push(ui, repo, node=None, **kwargs):
356 """
360 """
357 Mercurial pre_push hook
361 Mercurial pre_push hook
358 """
362 """
359 extras = _extras_from_ui(ui)
363 extras = _extras_from_ui(ui)
360 detect_force_push = extras.get('detect_force_push')
364 detect_force_push = extras.get('detect_force_push')
361
365
362 rev_data = []
366 rev_data = []
363 hook_type: str = safe_str(kwargs.get('hooktype'))
367 hook_type: str = safe_str(kwargs.get('hooktype'))
364
368
365 if node and hook_type == 'pretxnchangegroup':
369 if node and hook_type == 'pretxnchangegroup':
366 branches = collections.defaultdict(list)
370 branches = collections.defaultdict(list)
367 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
371 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
368 for commit_id, branch in commits:
372 for commit_id, branch in commits:
369 branches[branch].append(commit_id)
373 branches[branch].append(commit_id)
370
374
371 for branch, commits in branches.items():
375 for branch, commits in branches.items():
372 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
376 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
373 rev_data.append({
377 rev_data.append({
374 'total_commits': len(commits),
378 'total_commits': len(commits),
375 'old_rev': old_rev,
379 'old_rev': old_rev,
376 'new_rev': commits[-1],
380 'new_rev': commits[-1],
377 'ref': '',
381 'ref': '',
378 'type': 'branch',
382 'type': 'branch',
379 'name': branch,
383 'name': branch,
380 })
384 })
381
385
382 for push_ref in rev_data:
386 for push_ref in rev_data:
383 push_ref['multiple_heads'] = _heads
387 push_ref['multiple_heads'] = _heads
384
388
385 repo_path = os.path.join(
389 repo_path = os.path.join(
386 extras.get('repo_store', ''), extras.get('repository', ''))
390 extras.get('repo_store', ''), extras.get('repository', ''))
387 push_ref['hg_env'] = _get_hg_env(
391 push_ref['hg_env'] = _get_hg_env(
388 old_rev=push_ref['old_rev'],
392 old_rev=push_ref['old_rev'],
389 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
393 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
390 repo_path=repo_path)
394 repo_path=repo_path)
391
395
392 extras['hook_type'] = hook_type or 'pre_push'
396 extras['hook_type'] = hook_type or 'pre_push'
393 extras['commit_ids'] = rev_data
397 extras['commit_ids'] = rev_data
394
398
395 return _call_hook('pre_push', extras, HgMessageWriter(ui))
399 return _call_hook('pre_push', extras, HgMessageWriter(ui))
396
400
397
401
398 def pre_push_ssh(ui, repo, node=None, **kwargs):
402 def pre_push_ssh(ui, repo, node=None, **kwargs):
399 extras = _extras_from_ui(ui)
403 extras = _extras_from_ui(ui)
400 if extras.get('SSH'):
404 if extras.get('SSH'):
401 return pre_push(ui, repo, node, **kwargs)
405 return pre_push(ui, repo, node, **kwargs)
402
406
403 return 0
407 return 0
404
408
405
409
406 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
410 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
407 """
411 """
408 Mercurial pre_push hook for SSH
412 Mercurial pre_push hook for SSH
409 """
413 """
410 extras = _extras_from_ui(ui)
414 extras = _extras_from_ui(ui)
411 if extras.get('SSH'):
415 if extras.get('SSH'):
412 permission = extras['SSH_PERMISSIONS']
416 permission = extras['SSH_PERMISSIONS']
413
417
414 if 'repository.write' == permission or 'repository.admin' == permission:
418 if 'repository.write' == permission or 'repository.admin' == permission:
415 return 0
419 return 0
416
420
417 # non-zero ret code
421 # non-zero ret code
418 return 1
422 return 1
419
423
420 return 0
424 return 0
421
425
422
426
423 def post_push(ui, repo, node, **kwargs):
427 def post_push(ui, repo, node, **kwargs):
424 """
428 """
425 Mercurial post_push hook
429 Mercurial post_push hook
426 """
430 """
427 extras = _extras_from_ui(ui)
431 extras = _extras_from_ui(ui)
428
432
429 commit_ids = []
433 commit_ids = []
430 branches = []
434 branches = []
431 bookmarks = []
435 bookmarks = []
432 tags = []
436 tags = []
433 hook_type: str = safe_str(kwargs.get('hooktype'))
437 hook_type: str = safe_str(kwargs.get('hooktype'))
434
438
435 commits, _heads = _rev_range_hash(repo, node)
439 commits, _heads = _rev_range_hash(repo, node)
436 for commit_id, branch in commits:
440 for commit_id, branch in commits:
437 commit_ids.append(commit_id)
441 commit_ids.append(commit_id)
438 if branch not in branches:
442 if branch not in branches:
439 branches.append(branch)
443 branches.append(branch)
440
444
441 if hasattr(ui, '_rc_pushkey_bookmarks'):
445 if hasattr(ui, '_rc_pushkey_bookmarks'):
442 bookmarks = ui._rc_pushkey_bookmarks
446 bookmarks = ui._rc_pushkey_bookmarks
443
447
444 extras['hook_type'] = hook_type or 'post_push'
448 extras['hook_type'] = hook_type or 'post_push'
445 extras['commit_ids'] = commit_ids
449 extras['commit_ids'] = commit_ids
446
450
447 extras['new_refs'] = {
451 extras['new_refs'] = {
448 'branches': branches,
452 'branches': branches,
449 'bookmarks': bookmarks,
453 'bookmarks': bookmarks,
450 'tags': tags
454 'tags': tags
451 }
455 }
452
456
453 return _call_hook('post_push', extras, HgMessageWriter(ui))
457 return _call_hook('post_push', extras, HgMessageWriter(ui))
454
458
455
459
456 def post_push_ssh(ui, repo, node, **kwargs):
460 def post_push_ssh(ui, repo, node, **kwargs):
457 """
461 """
458 Mercurial post_push hook for SSH
462 Mercurial post_push hook for SSH
459 """
463 """
460 if _extras_from_ui(ui).get('SSH'):
464 if _extras_from_ui(ui).get('SSH'):
461 return post_push(ui, repo, node, **kwargs)
465 return post_push(ui, repo, node, **kwargs)
462 return 0
466 return 0
463
467
464
468
465 def key_push(ui, repo, **kwargs):
469 def key_push(ui, repo, **kwargs):
466 from vcsserver.hgcompat import get_ctx
470 from vcsserver.hgcompat import get_ctx
467
471
468 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
472 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
469 # store new bookmarks in our UI object propagated later to post_push
473 # store new bookmarks in our UI object propagated later to post_push
470 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
474 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
471 return
475 return
472
476
473
477
474 # backward compat
478 # backward compat
475 log_pull_action = post_pull
479 log_pull_action = post_pull
476
480
477 # backward compat
481 # backward compat
478 log_push_action = post_push
482 log_push_action = post_push
479
483
480
484
481 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
485 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
482 """
486 """
483 Old hook name: keep here for backward compatibility.
487 Old hook name: keep here for backward compatibility.
484
488
485 This is only required when the installed git hooks are not upgraded.
489 This is only required when the installed git hooks are not upgraded.
486 """
490 """
487 pass
491 pass
488
492
489
493
490 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
494 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
491 """
495 """
492 Old hook name: keep here for backward compatibility.
496 Old hook name: keep here for backward compatibility.
493
497
494 This is only required when the installed git hooks are not upgraded.
498 This is only required when the installed git hooks are not upgraded.
495 """
499 """
496 pass
500 pass
497
501
498
502
499 @dataclasses.dataclass
503 @dataclasses.dataclass
500 class HookResponse:
504 class HookResponse:
501 status: int
505 status: int
502 output: str
506 output: str
503
507
504
508
505 def git_pre_pull(extras) -> HookResponse:
509 def git_pre_pull(extras) -> HookResponse:
506 """
510 """
507 Pre pull hook.
511 Pre pull hook.
508
512
509 :param extras: dictionary containing the keys defined in simplevcs
513 :param extras: dictionary containing the keys defined in simplevcs
510 :type extras: dict
514 :type extras: dict
511
515
512 :return: status code of the hook. 0 for success.
516 :return: status code of the hook. 0 for success.
513 :rtype: int
517 :rtype: int
514 """
518 """
515
519
516 if 'pull' not in extras['hooks']:
520 if 'pull' not in extras['hooks']:
517 return HookResponse(0, '')
521 return HookResponse(0, '')
518
522
519 stdout = io.StringIO()
523 stdout = io.StringIO()
520 try:
524 try:
521 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
525 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
522
526
523 except Exception as error:
527 except Exception as error:
524 log.exception('Failed to call pre_pull hook')
528 log.exception('Failed to call pre_pull hook')
525 status_code = 128
529 status_code = 128
526 stdout.write(f'ERROR: {error}\n')
530 stdout.write(f'ERROR: {error}\n')
527
531
528 return HookResponse(status_code, stdout.getvalue())
532 return HookResponse(status_code, stdout.getvalue())
529
533
530
534
531 def git_post_pull(extras) -> HookResponse:
535 def git_post_pull(extras) -> HookResponse:
532 """
536 """
533 Post pull hook.
537 Post pull hook.
534
538
535 :param extras: dictionary containing the keys defined in simplevcs
539 :param extras: dictionary containing the keys defined in simplevcs
536 :type extras: dict
540 :type extras: dict
537
541
538 :return: status code of the hook. 0 for success.
542 :return: status code of the hook. 0 for success.
539 :rtype: int
543 :rtype: int
540 """
544 """
541 if 'pull' not in extras['hooks']:
545 if 'pull' not in extras['hooks']:
542 return HookResponse(0, '')
546 return HookResponse(0, '')
543
547
544 stdout = io.StringIO()
548 stdout = io.StringIO()
545 try:
549 try:
546 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
550 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
547 except Exception as error:
551 except Exception as error:
548 status = 128
552 status = 128
549 stdout.write(f'ERROR: {error}\n')
553 stdout.write(f'ERROR: {error}\n')
550
554
551 return HookResponse(status, stdout.getvalue())
555 return HookResponse(status, stdout.getvalue())
552
556
553
557
554 def _parse_git_ref_lines(revision_lines):
558 def _parse_git_ref_lines(revision_lines):
555 rev_data = []
559 rev_data = []
556 for revision_line in revision_lines or []:
560 for revision_line in revision_lines or []:
557 old_rev, new_rev, ref = revision_line.strip().split(' ')
561 old_rev, new_rev, ref = revision_line.strip().split(' ')
558 ref_data = ref.split('/', 2)
562 ref_data = ref.split('/', 2)
559 if ref_data[1] in ('tags', 'heads'):
563 if ref_data[1] in ('tags', 'heads'):
560 rev_data.append({
564 rev_data.append({
561 # NOTE(marcink):
565 # NOTE(marcink):
562 # we're unable to tell total_commits for git at this point
566 # we're unable to tell total_commits for git at this point
563 # but we set the variable for consistency with GIT
567 # but we set the variable for consistency with GIT
564 'total_commits': -1,
568 'total_commits': -1,
565 'old_rev': old_rev,
569 'old_rev': old_rev,
566 'new_rev': new_rev,
570 'new_rev': new_rev,
567 'ref': ref,
571 'ref': ref,
568 'type': ref_data[1],
572 'type': ref_data[1],
569 'name': ref_data[2],
573 'name': ref_data[2],
570 })
574 })
571 return rev_data
575 return rev_data
572
576
573
577
574 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
578 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
575 """
579 """
576 Pre push hook.
580 Pre push hook.
577
581
578 :return: status code of the hook. 0 for success.
582 :return: status code of the hook. 0 for success.
579 """
583 """
580 extras = json.loads(env['RC_SCM_DATA'])
584 extras = json.loads(env['RC_SCM_DATA'])
581 rev_data = _parse_git_ref_lines(revision_lines)
585 rev_data = _parse_git_ref_lines(revision_lines)
582 if 'push' not in extras['hooks']:
586 if 'push' not in extras['hooks']:
583 return 0
587 return 0
584 _fix_hooks_executables(env.get('RC_INI_FILE'))
588 _fix_hooks_executables(env.get('RC_INI_FILE'))
585
589
586 empty_commit_id = '0' * 40
590 empty_commit_id = '0' * 40
587
591
588 detect_force_push = extras.get('detect_force_push')
592 detect_force_push = extras.get('detect_force_push')
589
593
590 for push_ref in rev_data:
594 for push_ref in rev_data:
591 # store our git-env which holds the temp store
595 # store our git-env which holds the temp store
592 push_ref['git_env'] = _get_git_env()
596 push_ref['git_env'] = _get_git_env()
593 push_ref['pruned_sha'] = ''
597 push_ref['pruned_sha'] = ''
594 if not detect_force_push:
598 if not detect_force_push:
595 # don't check for forced-push when we don't need to
599 # don't check for forced-push when we don't need to
596 continue
600 continue
597
601
598 type_ = push_ref['type']
602 type_ = push_ref['type']
599 new_branch = push_ref['old_rev'] == empty_commit_id
603 new_branch = push_ref['old_rev'] == empty_commit_id
600 delete_branch = push_ref['new_rev'] == empty_commit_id
604 delete_branch = push_ref['new_rev'] == empty_commit_id
601 if type_ == 'heads' and not (new_branch or delete_branch):
605 if type_ == 'heads' and not (new_branch or delete_branch):
602 old_rev = push_ref['old_rev']
606 old_rev = push_ref['old_rev']
603 new_rev = push_ref['new_rev']
607 new_rev = push_ref['new_rev']
604 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
608 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
605 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
606 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
607 # means we're having some non-reachable objects, this forced push was used
611 # means we're having some non-reachable objects, this forced push was used
608 if stdout:
612 if stdout:
609 push_ref['pruned_sha'] = stdout.splitlines()
613 push_ref['pruned_sha'] = stdout.splitlines()
610
614
611 extras['hook_type'] = 'pre_receive'
615 extras['hook_type'] = 'pre_receive'
612 extras['commit_ids'] = rev_data
616 extras['commit_ids'] = rev_data
613
617
614 stdout = sys.stdout
618 stdout = sys.stdout
615 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
619 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
616
620
617 return status_code
621 return status_code
618
622
619
623
620 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
624 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
621 """
625 """
622 Post push hook.
626 Post push hook.
623
627
624 :return: status code of the hook. 0 for success.
628 :return: status code of the hook. 0 for success.
625 """
629 """
626 extras = json.loads(env['RC_SCM_DATA'])
630 extras = json.loads(env['RC_SCM_DATA'])
627 if 'push' not in extras['hooks']:
631 if 'push' not in extras['hooks']:
628 return 0
632 return 0
629
633
630 _fix_hooks_executables(env.get('RC_INI_FILE'))
634 _fix_hooks_executables(env.get('RC_INI_FILE'))
631
635
632 rev_data = _parse_git_ref_lines(revision_lines)
636 rev_data = _parse_git_ref_lines(revision_lines)
633
637
634 git_revs = []
638 git_revs = []
635
639
636 # N.B.(skreft): it is ok to just call git, as git before calling a
640 # N.B.(skreft): it is ok to just call git, as git before calling a
637 # subcommand sets the PATH environment variable so that it point to the
641 # subcommand sets the PATH environment variable so that it point to the
638 # correct version of the git executable.
642 # correct version of the git executable.
639 empty_commit_id = '0' * 40
643 empty_commit_id = '0' * 40
640 branches = []
644 branches = []
641 tags = []
645 tags = []
642 for push_ref in rev_data:
646 for push_ref in rev_data:
643 type_ = push_ref['type']
647 type_ = push_ref['type']
644
648
645 if type_ == 'heads':
649 if type_ == 'heads':
646 # starting new branch case
650 # starting new branch case
647 if push_ref['old_rev'] == empty_commit_id:
651 if push_ref['old_rev'] == empty_commit_id:
648 push_ref_name = push_ref['name']
652 push_ref_name = push_ref['name']
649
653
650 if push_ref_name not in branches:
654 if push_ref_name not in branches:
651 branches.append(push_ref_name)
655 branches.append(push_ref_name)
652
656
653 need_head_set = ''
657 need_head_set = ''
654 with Repository(os.getcwd()) as repo:
658 with Repository(os.getcwd()) as repo:
655 try:
659 try:
656 repo.head
660 repo.head
657 except pygit2.GitError:
661 except pygit2.GitError:
658 need_head_set = f'refs/heads/{push_ref_name}'
662 need_head_set = f'refs/heads/{push_ref_name}'
659
663
660 if need_head_set:
664 if need_head_set:
661 repo.set_head(need_head_set)
665 repo.set_head(need_head_set)
662 print(f"Setting default branch to {push_ref_name}")
666 print(f"Setting default branch to {push_ref_name}")
663
667
664 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
668 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
665 stdout, stderr = subprocessio.run_command(
669 stdout, stderr = subprocessio.run_command(
666 cmd, env=os.environ.copy())
670 cmd, env=os.environ.copy())
667 heads = safe_str(stdout)
671 heads = safe_str(stdout)
668 heads = heads.replace(push_ref['ref'], '')
672 heads = heads.replace(push_ref['ref'], '')
669 heads = ' '.join(head for head
673 heads = ' '.join(head for head
670 in heads.splitlines() if head) or '.'
674 in heads.splitlines() if head) or '.'
671 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
675 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
672 '--pretty=format:%H', '--', push_ref['new_rev'],
676 '--pretty=format:%H', '--', push_ref['new_rev'],
673 '--not', heads]
677 '--not', heads]
674 stdout, stderr = subprocessio.run_command(
678 stdout, stderr = subprocessio.run_command(
675 cmd, env=os.environ.copy())
679 cmd, env=os.environ.copy())
676 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
680 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
677
681
678 # delete branch case
682 # delete branch case
679 elif push_ref['new_rev'] == empty_commit_id:
683 elif push_ref['new_rev'] == empty_commit_id:
680 git_revs.append(f'delete_branch=>{push_ref["name"]}')
684 git_revs.append(f'delete_branch=>{push_ref["name"]}')
681 else:
685 else:
682 if push_ref['name'] not in branches:
686 if push_ref['name'] not in branches:
683 branches.append(push_ref['name'])
687 branches.append(push_ref['name'])
684
688
685 cmd = [settings.GIT_EXECUTABLE(), 'log',
689 cmd = [settings.GIT_EXECUTABLE(), 'log',
686 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
690 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
687 '--reverse', '--pretty=format:%H']
691 '--reverse', '--pretty=format:%H']
688 stdout, stderr = subprocessio.run_command(
692 stdout, stderr = subprocessio.run_command(
689 cmd, env=os.environ.copy())
693 cmd, env=os.environ.copy())
690 # we get bytes from stdout, we need str to be consistent
694 # we get bytes from stdout, we need str to be consistent
691 log_revs = list(map(ascii_str, stdout.splitlines()))
695 log_revs = list(map(ascii_str, stdout.splitlines()))
692 git_revs.extend(log_revs)
696 git_revs.extend(log_revs)
693
697
694 # Pure pygit2 impl. but still 2-3x slower :/
698 # Pure pygit2 impl. but still 2-3x slower :/
695 # results = []
699 # results = []
696 #
700 #
697 # with Repository(os.getcwd()) as repo:
701 # with Repository(os.getcwd()) as repo:
698 # repo_new_rev = repo[push_ref['new_rev']]
702 # repo_new_rev = repo[push_ref['new_rev']]
699 # repo_old_rev = repo[push_ref['old_rev']]
703 # repo_old_rev = repo[push_ref['old_rev']]
700 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
704 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
701 #
705 #
702 # for commit in walker:
706 # for commit in walker:
703 # if commit.id == repo_old_rev.id:
707 # if commit.id == repo_old_rev.id:
704 # break
708 # break
705 # results.append(commit.id.hex)
709 # results.append(commit.id.hex)
706 # # reverse the order, can't use GIT_SORT_REVERSE
710 # # reverse the order, can't use GIT_SORT_REVERSE
707 # log_revs = results[::-1]
711 # log_revs = results[::-1]
708
712
709 elif type_ == 'tags':
713 elif type_ == 'tags':
710 if push_ref['name'] not in tags:
714 if push_ref['name'] not in tags:
711 tags.append(push_ref['name'])
715 tags.append(push_ref['name'])
712 git_revs.append(f'tag=>{push_ref["name"]}')
716 git_revs.append(f'tag=>{push_ref["name"]}')
713
717
714 extras['hook_type'] = 'post_receive'
718 extras['hook_type'] = 'post_receive'
715 extras['commit_ids'] = git_revs
719 extras['commit_ids'] = git_revs
716 extras['new_refs'] = {
720 extras['new_refs'] = {
717 'branches': branches,
721 'branches': branches,
718 'bookmarks': [],
722 'bookmarks': [],
719 'tags': tags,
723 'tags': tags,
720 }
724 }
721
725
722 stdout = sys.stdout
726 stdout = sys.stdout
723
727
724 if 'repo_size' in extras['hooks']:
728 if 'repo_size' in extras['hooks']:
725 try:
729 try:
726 _call_hook('repo_size', extras, GitMessageWriter(stdout))
730 _call_hook('repo_size', extras, GitMessageWriter(stdout))
727 except Exception:
731 except Exception:
728 pass
732 pass
729
733
730 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
734 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
731 return status_code
735 return status_code
732
736
733
737
734 def get_extras_from_txn_id(repo_path, txn_id):
738 def get_extras_from_txn_id(repo_path, txn_id):
735 extras = get_txn_id_from_store(repo_path, txn_id)
739 extras = get_txn_id_from_store(repo_path, txn_id)
736 return extras
740 return extras
737
741
738
742
739 def svn_pre_commit(repo_path, commit_data, env):
743 def svn_pre_commit(repo_path, commit_data, env):
740
744
741 path, txn_id = commit_data
745 path, txn_id = commit_data
742 branches = []
746 branches = []
743 tags = []
747 tags = []
744
748
745 if env.get('RC_SCM_DATA'):
749 if env.get('RC_SCM_DATA'):
746 extras = json.loads(env['RC_SCM_DATA'])
750 extras = json.loads(env['RC_SCM_DATA'])
747 else:
751 else:
748 ini_path = env.get('RC_INI_FILE')
752 ini_path = env.get('RC_INI_FILE')
749 if ini_path:
753 if ini_path:
750 _get_ini_settings(ini_path)
754 _get_ini_settings(ini_path)
751 # fallback method to read from TXN-ID stored data
755 # fallback method to read from TXN-ID stored data
752 extras = get_extras_from_txn_id(path, txn_id)
756 extras = get_extras_from_txn_id(path, txn_id)
753
757
754 if not extras:
758 if not extras:
755 raise ValueError('SVN-PRE-COMMIT: Failed to extract context data in called extras for hook execution')
759 raise ValueError('SVN-PRE-COMMIT: Failed to extract context data in called extras for hook execution')
756
760
757 if extras.get('rc_internal_commit'):
761 if extras.get('rc_internal_commit'):
758 # special marker for internal commit, we don't call hooks client
762 # special marker for internal commit, we don't call hooks client
759 return 0
763 return 0
760
764
761 extras['hook_type'] = 'pre_commit'
765 extras['hook_type'] = 'pre_commit'
762 extras['commit_ids'] = [txn_id]
766 extras['commit_ids'] = [txn_id]
763 extras['txn_id'] = txn_id
767 extras['txn_id'] = txn_id
764 extras['new_refs'] = {
768 extras['new_refs'] = {
765 'total_commits': 1,
769 'total_commits': 1,
766 'branches': branches,
770 'branches': branches,
767 'bookmarks': [],
771 'bookmarks': [],
768 'tags': tags,
772 'tags': tags,
769 }
773 }
770
774
771 return _call_hook('pre_push', extras, SvnMessageWriter())
775 return _call_hook('pre_push', extras, SvnMessageWriter())
772
776
773
777
774 def svn_post_commit(repo_path, commit_data, env):
778 def svn_post_commit(repo_path, commit_data, env):
775 """
779 """
776 commit_data is path, rev, txn_id
780 commit_data is path, rev, txn_id
777 """
781 """
778
782
779 if len(commit_data) == 3:
783 if len(commit_data) == 3:
780 path, commit_id, txn_id = commit_data
784 path, commit_id, txn_id = commit_data
781 elif len(commit_data) == 2:
785 elif len(commit_data) == 2:
782 log.error('Failed to extract txn_id from commit_data using legacy method. '
786 log.error('Failed to extract txn_id from commit_data using legacy method. '
783 'Some functionality might be limited')
787 'Some functionality might be limited')
784 path, commit_id = commit_data
788 path, commit_id = commit_data
785 txn_id = None
789 txn_id = None
786 else:
790 else:
787 return 0
791 return 0
788
792
789 branches = []
793 branches = []
790 tags = []
794 tags = []
791
795
792 if env.get('RC_SCM_DATA'):
796 if env.get('RC_SCM_DATA'):
793 extras = json.loads(env['RC_SCM_DATA'])
797 extras = json.loads(env['RC_SCM_DATA'])
794 else:
798 else:
795 ini_path = env.get('RC_INI_FILE')
799 ini_path = env.get('RC_INI_FILE')
796 if ini_path:
800 if ini_path:
797 _get_ini_settings(ini_path)
801 _get_ini_settings(ini_path)
798 # fallback method to read from TXN-ID stored data
802 # fallback method to read from TXN-ID stored data
799 extras = get_extras_from_txn_id(path, txn_id)
803 extras = get_extras_from_txn_id(path, txn_id)
800
804
801 if not extras and txn_id:
805 if not extras and txn_id:
802 raise ValueError('SVN-POST-COMMIT: Failed to extract context data in called extras for hook execution')
806 raise ValueError('SVN-POST-COMMIT: Failed to extract context data in called extras for hook execution')
803
807
804 if extras.get('rc_internal_commit'):
808 if extras.get('rc_internal_commit'):
805 # special marker for internal commit, we don't call hooks client
809 # special marker for internal commit, we don't call hooks client
806 return 0
810 return 0
807
811
808 extras['hook_type'] = 'post_commit'
812 extras['hook_type'] = 'post_commit'
809 extras['commit_ids'] = [commit_id]
813 extras['commit_ids'] = [commit_id]
810 extras['txn_id'] = txn_id
814 extras['txn_id'] = txn_id
811 extras['new_refs'] = {
815 extras['new_refs'] = {
812 'branches': branches,
816 'branches': branches,
813 'bookmarks': [],
817 'bookmarks': [],
814 'tags': tags,
818 'tags': tags,
815 'total_commits': 1,
819 'total_commits': 1,
816 }
820 }
817
821
818 if 'repo_size' in extras['hooks']:
822 if 'repo_size' in extras['hooks']:
819 try:
823 try:
820 _call_hook('repo_size', extras, SvnMessageWriter())
824 _call_hook('repo_size', extras, SvnMessageWriter())
821 except Exception:
825 except Exception:
822 pass
826 pass
823
827
824 return _call_hook('post_push', extras, SvnMessageWriter())
828 return _call_hook('post_push', extras, SvnMessageWriter())
General Comments 0
You need to be logged in to leave comments. Login now