Show More
@@ -0,0 +1,19 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | ||||
|
19 | from app import create_app |
@@ -0,0 +1,276 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | import re | |||
|
19 | import logging | |||
|
20 | from wsgiref.util import FileWrapper | |||
|
21 | ||||
|
22 | import simplejson as json | |||
|
23 | from pyramid.config import Configurator | |||
|
24 | from pyramid.response import Response, FileIter | |||
|
25 | from pyramid.httpexceptions import ( | |||
|
26 | HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden, | |||
|
27 | HTTPUnprocessableEntity) | |||
|
28 | ||||
|
29 | from vcsserver.git_lfs.lib import OidHandler, LFSOidStore | |||
|
30 | from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator | |||
|
31 | from vcsserver.utils import safe_int | |||
|
32 | ||||
|
33 | log = logging.getLogger(__name__) | |||
|
34 | ||||
|
35 | ||||
|
36 | GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ? | |||
|
37 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') | |||
|
38 | ||||
|
39 | ||||
|
40 | def write_response_error(http_exception, text=None): | |||
|
41 | content_type = 'application/json' | |||
|
42 | _exception = http_exception(content_type=content_type) | |||
|
43 | _exception.content_type = content_type | |||
|
44 | if text: | |||
|
45 | _exception.body = json.dumps({'message': text}) | |||
|
46 | log.debug('LFS: writing response of type %s to client with text:%s', | |||
|
47 | http_exception, text) | |||
|
48 | return _exception | |||
|
49 | ||||
|
50 | ||||
|
51 | class AuthHeaderRequired(object): | |||
|
52 | """ | |||
|
53 | Decorator to check if request has proper auth-header | |||
|
54 | """ | |||
|
55 | ||||
|
56 | def __call__(self, func): | |||
|
57 | return get_cython_compat_decorator(self.__wrapper, func) | |||
|
58 | ||||
|
59 | def __wrapper(self, func, *fargs, **fkwargs): | |||
|
60 | request = fargs[1] | |||
|
61 | auth = request.authorization | |||
|
62 | if not auth: | |||
|
63 | return write_response_error(HTTPForbidden) | |||
|
64 | return func(*fargs[1:], **fkwargs) | |||
|
65 | ||||
|
66 | ||||
|
67 | # views | |||
|
68 | ||||
|
69 | def lfs_objects(request): | |||
|
70 | # indicate not supported, V1 API | |||
|
71 | log.warning('LFS: v1 api not supported, reporting it back to client') | |||
|
72 | return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported') | |||
|
73 | ||||
|
74 | ||||
|
75 | @AuthHeaderRequired() | |||
|
76 | def lfs_objects_batch(request): | |||
|
77 | """ | |||
|
78 | The client sends the following information to the Batch endpoint to transfer some objects: | |||
|
79 | ||||
|
80 | operation - Should be download or upload. | |||
|
81 | transfers - An optional Array of String identifiers for transfer | |||
|
82 | adapters that the client has configured. If omitted, the basic | |||
|
83 | transfer adapter MUST be assumed by the server. | |||
|
84 | objects - An Array of objects to download. | |||
|
85 | oid - String OID of the LFS object. | |||
|
86 | size - Integer byte size of the LFS object. Must be at least zero. | |||
|
87 | """ | |||
|
88 | auth = request.authorization | |||
|
89 | ||||
|
90 | repo = request.matchdict.get('repo') | |||
|
91 | ||||
|
92 | data = request.json | |||
|
93 | operation = data.get('operation') | |||
|
94 | if operation not in ('download', 'upload'): | |||
|
95 | log.debug('LFS: unsupported operation:%s', operation) | |||
|
96 | return write_response_error( | |||
|
97 | HTTPBadRequest, 'unsupported operation mode: `%s`' % operation) | |||
|
98 | ||||
|
99 | if 'objects' not in data: | |||
|
100 | log.debug('LFS: missing objects data') | |||
|
101 | return write_response_error( | |||
|
102 | HTTPBadRequest, 'missing objects data') | |||
|
103 | ||||
|
104 | log.debug('LFS: handling operation of type: %s', operation) | |||
|
105 | ||||
|
106 | objects = [] | |||
|
107 | for o in data['objects']: | |||
|
108 | try: | |||
|
109 | oid = o['oid'] | |||
|
110 | obj_size = o['size'] | |||
|
111 | except KeyError: | |||
|
112 | log.exception('LFS, failed to extract data') | |||
|
113 | return write_response_error( | |||
|
114 | HTTPBadRequest, 'unsupported data in objects') | |||
|
115 | ||||
|
116 | obj_data = {'oid': oid} | |||
|
117 | ||||
|
118 | obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid) | |||
|
119 | obj_verify_href = request.route_url('lfs_objects_verify', repo=repo) | |||
|
120 | store = LFSOidStore( | |||
|
121 | oid, repo, store_location=request.registry.git_lfs_store_path) | |||
|
122 | handler = OidHandler( | |||
|
123 | store, repo, auth, oid, obj_size, obj_data, | |||
|
124 | obj_href, obj_verify_href) | |||
|
125 | ||||
|
126 | # this verifies also OIDs | |||
|
127 | actions, errors = handler.exec_operation(operation) | |||
|
128 | if errors: | |||
|
129 | log.warning('LFS: got following errors: %s', errors) | |||
|
130 | obj_data['errors'] = errors | |||
|
131 | ||||
|
132 | if actions: | |||
|
133 | obj_data['actions'] = actions | |||
|
134 | ||||
|
135 | obj_data['size'] = obj_size | |||
|
136 | obj_data['authenticated'] = True | |||
|
137 | objects.append(obj_data) | |||
|
138 | ||||
|
139 | result = {'objects': objects, 'transfer': 'basic'} | |||
|
140 | log.debug('LFS Response %s', safe_result(result)) | |||
|
141 | ||||
|
142 | return result | |||
|
143 | ||||
|
144 | ||||
|
145 | def lfs_objects_oid_upload(request): | |||
|
146 | repo = request.matchdict.get('repo') | |||
|
147 | oid = request.matchdict.get('oid') | |||
|
148 | store = LFSOidStore( | |||
|
149 | oid, repo, store_location=request.registry.git_lfs_store_path) | |||
|
150 | engine = store.get_engine(mode='wb') | |||
|
151 | log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid) | |||
|
152 | with engine as f: | |||
|
153 | for chunk in FileWrapper(request.body_file_seekable, blksize=64 * 1024): | |||
|
154 | f.write(chunk) | |||
|
155 | ||||
|
156 | return {'upload': 'ok'} | |||
|
157 | ||||
|
158 | ||||
|
159 | def lfs_objects_oid_download(request): | |||
|
160 | repo = request.matchdict.get('repo') | |||
|
161 | oid = request.matchdict.get('oid') | |||
|
162 | ||||
|
163 | store = LFSOidStore( | |||
|
164 | oid, repo, store_location=request.registry.git_lfs_store_path) | |||
|
165 | if not store.has_oid(): | |||
|
166 | log.debug('LFS: oid %s does not exists in store', oid) | |||
|
167 | return write_response_error( | |||
|
168 | HTTPNotFound, 'requested file with oid `%s` not found in store' % oid) | |||
|
169 | ||||
|
170 | # TODO(marcink): support range header ? | |||
|
171 | # Range: bytes=0-, `bytes=(\d+)\-.*` | |||
|
172 | ||||
|
173 | f = open(store.oid_path, 'rb') | |||
|
174 | response = Response( | |||
|
175 | content_type='application/octet-stream', app_iter=FileIter(f)) | |||
|
176 | response.headers.add('X-RC-LFS-Response-Oid', str(oid)) | |||
|
177 | return response | |||
|
178 | ||||
|
179 | ||||
|
180 | def lfs_objects_verify(request): | |||
|
181 | repo = request.matchdict.get('repo') | |||
|
182 | ||||
|
183 | data = request.json | |||
|
184 | oid = data.get('oid') | |||
|
185 | size = safe_int(data.get('size')) | |||
|
186 | ||||
|
187 | if not (oid and size): | |||
|
188 | return write_response_error( | |||
|
189 | HTTPBadRequest, 'missing oid and size in request data') | |||
|
190 | ||||
|
191 | store = LFSOidStore( | |||
|
192 | oid, repo, store_location=request.registry.git_lfs_store_path) | |||
|
193 | if not store.has_oid(): | |||
|
194 | log.debug('LFS: oid %s does not exists in store', oid) | |||
|
195 | return write_response_error( | |||
|
196 | HTTPNotFound, 'oid `%s` does not exists in store' % oid) | |||
|
197 | ||||
|
198 | store_size = store.size_oid() | |||
|
199 | if store_size != size: | |||
|
200 | msg = 'requested file size mismatch store size:%s requested:%s' % ( | |||
|
201 | store_size, size) | |||
|
202 | return write_response_error( | |||
|
203 | HTTPUnprocessableEntity, msg) | |||
|
204 | ||||
|
205 | return {'message': {'size': 'ok', 'in_store': 'ok'}} | |||
|
206 | ||||
|
207 | ||||
|
208 | def lfs_objects_lock(request): | |||
|
209 | return write_response_error( | |||
|
210 | HTTPNotImplemented, 'GIT LFS locking api not supported') | |||
|
211 | ||||
|
212 | ||||
|
213 | def not_found(request): | |||
|
214 | return write_response_error( | |||
|
215 | HTTPNotFound, 'request path not found') | |||
|
216 | ||||
|
217 | ||||
|
218 | def lfs_disabled(request): | |||
|
219 | return write_response_error( | |||
|
220 | HTTPNotImplemented, 'GIT LFS disabled for this repo') | |||
|
221 | ||||
|
222 | ||||
|
223 | def git_lfs_app(config): | |||
|
224 | ||||
|
225 | # v1 API deprecation endpoint | |||
|
226 | config.add_route('lfs_objects', | |||
|
227 | '/{repo:.*?[^/]}/info/lfs/objects') | |||
|
228 | config.add_view(lfs_objects, route_name='lfs_objects', | |||
|
229 | request_method='POST', renderer='json') | |||
|
230 | ||||
|
231 | # locking API | |||
|
232 | config.add_route('lfs_objects_lock', | |||
|
233 | '/{repo:.*?[^/]}/info/lfs/locks') | |||
|
234 | config.add_view(lfs_objects_lock, route_name='lfs_objects_lock', | |||
|
235 | request_method=('POST', 'GET'), renderer='json') | |||
|
236 | ||||
|
237 | config.add_route('lfs_objects_lock_verify', | |||
|
238 | '/{repo:.*?[^/]}/info/lfs/locks/verify') | |||
|
239 | config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify', | |||
|
240 | request_method=('POST', 'GET'), renderer='json') | |||
|
241 | ||||
|
242 | # batch API | |||
|
243 | config.add_route('lfs_objects_batch', | |||
|
244 | '/{repo:.*?[^/]}/info/lfs/objects/batch') | |||
|
245 | config.add_view(lfs_objects_batch, route_name='lfs_objects_batch', | |||
|
246 | request_method='POST', renderer='json') | |||
|
247 | ||||
|
248 | # oid upload/download API | |||
|
249 | config.add_route('lfs_objects_oid', | |||
|
250 | '/{repo:.*?[^/]}/info/lfs/objects/{oid}') | |||
|
251 | config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid', | |||
|
252 | request_method='PUT', renderer='json') | |||
|
253 | config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid', | |||
|
254 | request_method='GET', renderer='json') | |||
|
255 | ||||
|
256 | # verification API | |||
|
257 | config.add_route('lfs_objects_verify', | |||
|
258 | '/{repo:.*?[^/]}/info/lfs/verify') | |||
|
259 | config.add_view(lfs_objects_verify, route_name='lfs_objects_verify', | |||
|
260 | request_method='POST', renderer='json') | |||
|
261 | ||||
|
262 | # not found handler for API | |||
|
263 | config.add_notfound_view(not_found, renderer='json') | |||
|
264 | ||||
|
265 | ||||
|
266 | def create_app(git_lfs_enabled, git_lfs_store_path): | |||
|
267 | config = Configurator() | |||
|
268 | if git_lfs_enabled: | |||
|
269 | config.include(git_lfs_app) | |||
|
270 | config.registry.git_lfs_store_path = git_lfs_store_path | |||
|
271 | else: | |||
|
272 | # not found handler for API, reporting disabled LFS support | |||
|
273 | config.add_notfound_view(lfs_disabled, renderer='json') | |||
|
274 | ||||
|
275 | app = config.make_wsgi_app() | |||
|
276 | return app |
@@ -0,0 +1,166 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | import os | |||
|
19 | import shutil | |||
|
20 | import logging | |||
|
21 | from collections import OrderedDict | |||
|
22 | ||||
|
23 | log = logging.getLogger(__name__) | |||
|
24 | ||||
|
25 | ||||
|
26 | class OidHandler(object): | |||
|
27 | ||||
|
28 | def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href, | |||
|
29 | obj_verify_href=None): | |||
|
30 | self.current_store = store | |||
|
31 | self.repo_name = repo_name | |||
|
32 | self.auth = auth | |||
|
33 | self.oid = oid | |||
|
34 | self.obj_size = obj_size | |||
|
35 | self.obj_data = obj_data | |||
|
36 | self.obj_href = obj_href | |||
|
37 | self.obj_verify_href = obj_verify_href | |||
|
38 | ||||
|
39 | def get_store(self, mode=None): | |||
|
40 | return self.current_store | |||
|
41 | ||||
|
42 | def get_auth(self): | |||
|
43 | """returns auth header for re-use in upload/download""" | |||
|
44 | return " ".join(self.auth) | |||
|
45 | ||||
|
46 | def download(self): | |||
|
47 | ||||
|
48 | store = self.get_store() | |||
|
49 | response = None | |||
|
50 | has_errors = None | |||
|
51 | ||||
|
52 | if not store.has_oid(): | |||
|
53 | # error reply back to client that something is wrong with dl | |||
|
54 | err_msg = 'object: {} does not exist in store'.format(store.oid) | |||
|
55 | has_errors = OrderedDict( | |||
|
56 | error=OrderedDict( | |||
|
57 | code=404, | |||
|
58 | message=err_msg | |||
|
59 | ) | |||
|
60 | ) | |||
|
61 | ||||
|
62 | download_action = OrderedDict( | |||
|
63 | href=self.obj_href, | |||
|
64 | header=OrderedDict([("Authorization", self.get_auth())]) | |||
|
65 | ) | |||
|
66 | if not has_errors: | |||
|
67 | response = OrderedDict(download=download_action) | |||
|
68 | return response, has_errors | |||
|
69 | ||||
|
70 | def upload(self, skip_existing=True): | |||
|
71 | """ | |||
|
72 | Write upload action for git-lfs server | |||
|
73 | """ | |||
|
74 | ||||
|
75 | store = self.get_store() | |||
|
76 | response = None | |||
|
77 | has_errors = None | |||
|
78 | ||||
|
79 | # verify if we have the OID before, if we do, reply with empty | |||
|
80 | if store.has_oid(): | |||
|
81 | log.debug('LFS: store already has oid %s', store.oid) | |||
|
82 | if skip_existing: | |||
|
83 | log.debug('LFS: skipping further action as oid is existing') | |||
|
84 | return response, has_errors | |||
|
85 | ||||
|
86 | upload_action = OrderedDict( | |||
|
87 | href=self.obj_href, | |||
|
88 | header=OrderedDict([("Authorization", self.get_auth())]) | |||
|
89 | ) | |||
|
90 | if not has_errors: | |||
|
91 | response = OrderedDict(upload=upload_action) | |||
|
92 | # if specified in handler, return the verification endpoint | |||
|
93 | if self.obj_verify_href: | |||
|
94 | verify_action = OrderedDict( | |||
|
95 | href=self.obj_verify_href, | |||
|
96 | header=OrderedDict([("Authorization", self.get_auth())]) | |||
|
97 | ) | |||
|
98 | response['verify'] = verify_action | |||
|
99 | return response, has_errors | |||
|
100 | ||||
|
101 | def exec_operation(self, operation, *args, **kwargs): | |||
|
102 | handler = getattr(self, operation) | |||
|
103 | log.debug('LFS: handling request using %s handler', handler) | |||
|
104 | return handler(*args, **kwargs) | |||
|
105 | ||||
|
106 | ||||
|
107 | class LFSOidStore(object): | |||
|
108 | ||||
|
109 | def __init__(self, oid, repo, store_location=None): | |||
|
110 | self.oid = oid | |||
|
111 | self.repo = repo | |||
|
112 | self.store_path = store_location or self.get_default_store() | |||
|
113 | self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp') | |||
|
114 | self.oid_path = os.path.join(self.store_path, oid) | |||
|
115 | self.fd = None | |||
|
116 | ||||
|
117 | def get_engine(self, mode): | |||
|
118 | """ | |||
|
119 | engine = .get_engine(mode='wb') | |||
|
120 | with engine as f: | |||
|
121 | f.write('...') | |||
|
122 | """ | |||
|
123 | ||||
|
124 | class StoreEngine(object): | |||
|
125 | def __init__(self, mode, store_path, oid_path, tmp_oid_path): | |||
|
126 | self.mode = mode | |||
|
127 | self.store_path = store_path | |||
|
128 | self.oid_path = oid_path | |||
|
129 | self.tmp_oid_path = tmp_oid_path | |||
|
130 | ||||
|
131 | def __enter__(self): | |||
|
132 | if not os.path.isdir(self.store_path): | |||
|
133 | os.makedirs(self.store_path) | |||
|
134 | ||||
|
135 | # TODO(marcink): maybe write metadata here with size/oid ? | |||
|
136 | fd = open(self.tmp_oid_path, self.mode) | |||
|
137 | self.fd = fd | |||
|
138 | return fd | |||
|
139 | ||||
|
140 | def __exit__(self, exc_type, exc_value, traceback): | |||
|
141 | # close tmp file, and rename to final destination | |||
|
142 | self.fd.close() | |||
|
143 | shutil.move(self.tmp_oid_path, self.oid_path) | |||
|
144 | ||||
|
145 | return StoreEngine( | |||
|
146 | mode, self.store_path, self.oid_path, self.tmp_oid_path) | |||
|
147 | ||||
|
148 | def get_default_store(self): | |||
|
149 | """ | |||
|
150 | Default store, consistent with defaults of Mercurial large files store | |||
|
151 | which is /home/username/.cache/largefiles | |||
|
152 | """ | |||
|
153 | user_home = os.path.expanduser("~") | |||
|
154 | return os.path.join(user_home, '.cache', 'lfs-store') | |||
|
155 | ||||
|
156 | def has_oid(self): | |||
|
157 | return os.path.exists(os.path.join(self.store_path, self.oid)) | |||
|
158 | ||||
|
159 | def size_oid(self): | |||
|
160 | size = -1 | |||
|
161 | ||||
|
162 | if self.has_oid(): | |||
|
163 | oid = os.path.join(self.store_path, self.oid) | |||
|
164 | size = os.stat(oid).st_size | |||
|
165 | ||||
|
166 | return size |
@@ -0,0 +1,16 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
@@ -0,0 +1,237 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | import os | |||
|
19 | import pytest | |||
|
20 | from webtest.app import TestApp as WebObTestApp | |||
|
21 | ||||
|
22 | from vcsserver.git_lfs.app import create_app | |||
|
23 | ||||
|
24 | ||||
|
25 | @pytest.fixture(scope='function') | |||
|
26 | def git_lfs_app(tmpdir): | |||
|
27 | custom_app = WebObTestApp(create_app( | |||
|
28 | git_lfs_enabled=True, git_lfs_store_path=str(tmpdir))) | |||
|
29 | custom_app._store = str(tmpdir) | |||
|
30 | return custom_app | |||
|
31 | ||||
|
32 | ||||
|
33 | @pytest.fixture() | |||
|
34 | def http_auth(): | |||
|
35 | return {'HTTP_AUTHORIZATION': "Basic XXXXX"} | |||
|
36 | ||||
|
37 | ||||
|
38 | class TestLFSApplication(object): | |||
|
39 | ||||
|
40 | def test_app_wrong_path(self, git_lfs_app): | |||
|
41 | git_lfs_app.get('/repo/info/lfs/xxx', status=404) | |||
|
42 | ||||
|
43 | def test_app_deprecated_endpoint(self, git_lfs_app): | |||
|
44 | response = git_lfs_app.post('/repo/info/lfs/objects', status=501) | |||
|
45 | assert response.status_code == 501 | |||
|
46 | assert response.json == {u'message': u'LFS: v1 api not supported'} | |||
|
47 | ||||
|
48 | def test_app_lock_verify_api_not_available(self, git_lfs_app): | |||
|
49 | response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501) | |||
|
50 | assert response.status_code == 501 | |||
|
51 | assert response.json == { | |||
|
52 | u'message': u'GIT LFS locking api not supported'} | |||
|
53 | ||||
|
54 | def test_app_lock_api_not_available(self, git_lfs_app): | |||
|
55 | response = git_lfs_app.post('/repo/info/lfs/locks', status=501) | |||
|
56 | assert response.status_code == 501 | |||
|
57 | assert response.json == { | |||
|
58 | u'message': u'GIT LFS locking api not supported'} | |||
|
59 | ||||
|
60 | def test_app_batch_api_missing_auth(self, git_lfs_app,): | |||
|
61 | git_lfs_app.post_json( | |||
|
62 | '/repo/info/lfs/objects/batch', params={}, status=403) | |||
|
63 | ||||
|
64 | def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth): | |||
|
65 | response = git_lfs_app.post_json( | |||
|
66 | '/repo/info/lfs/objects/batch', params={}, status=400, | |||
|
67 | extra_environ=http_auth) | |||
|
68 | assert response.json == { | |||
|
69 | u'message': u'unsupported operation mode: `None`'} | |||
|
70 | ||||
|
71 | def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth): | |||
|
72 | response = git_lfs_app.post_json( | |||
|
73 | '/repo/info/lfs/objects/batch', params={'operation': 'download'}, | |||
|
74 | status=400, extra_environ=http_auth) | |||
|
75 | assert response.json == { | |||
|
76 | u'message': u'missing objects data'} | |||
|
77 | ||||
|
78 | def test_app_batch_api_unsupported_data_in_objects( | |||
|
79 | self, git_lfs_app, http_auth): | |||
|
80 | params = {'operation': 'download', | |||
|
81 | 'objects': [{}]} | |||
|
82 | response = git_lfs_app.post_json( | |||
|
83 | '/repo/info/lfs/objects/batch', params=params, status=400, | |||
|
84 | extra_environ=http_auth) | |||
|
85 | assert response.json == { | |||
|
86 | u'message': u'unsupported data in objects'} | |||
|
87 | ||||
|
88 | def test_app_batch_api_download_missing_object( | |||
|
89 | self, git_lfs_app, http_auth): | |||
|
90 | params = {'operation': 'download', | |||
|
91 | 'objects': [{'oid': '123', 'size': '1024'}]} | |||
|
92 | response = git_lfs_app.post_json( | |||
|
93 | '/repo/info/lfs/objects/batch', params=params, | |||
|
94 | extra_environ=http_auth) | |||
|
95 | ||||
|
96 | expected_objects = [ | |||
|
97 | {u'authenticated': True, | |||
|
98 | u'errors': {u'error': { | |||
|
99 | u'code': 404, | |||
|
100 | u'message': u'object: 123 does not exist in store'}}, | |||
|
101 | u'oid': u'123', | |||
|
102 | u'size': u'1024'} | |||
|
103 | ] | |||
|
104 | assert response.json == { | |||
|
105 | 'objects': expected_objects, 'transfer': 'basic'} | |||
|
106 | ||||
|
107 | def test_app_batch_api_download(self, git_lfs_app, http_auth): | |||
|
108 | oid = '456' | |||
|
109 | oid_path = os.path.join(git_lfs_app._store, oid) | |||
|
110 | if not os.path.isdir(os.path.dirname(oid_path)): | |||
|
111 | os.makedirs(os.path.dirname(oid_path)) | |||
|
112 | with open(oid_path, 'wb') as f: | |||
|
113 | f.write('OID_CONTENT') | |||
|
114 | ||||
|
115 | params = {'operation': 'download', | |||
|
116 | 'objects': [{'oid': oid, 'size': '1024'}]} | |||
|
117 | response = git_lfs_app.post_json( | |||
|
118 | '/repo/info/lfs/objects/batch', params=params, | |||
|
119 | extra_environ=http_auth) | |||
|
120 | ||||
|
121 | expected_objects = [ | |||
|
122 | {u'authenticated': True, | |||
|
123 | u'actions': { | |||
|
124 | u'download': { | |||
|
125 | u'header': {u'Authorization': u'Basic XXXXX'}, | |||
|
126 | u'href': u'http://localhost/repo/info/lfs/objects/456'}, | |||
|
127 | }, | |||
|
128 | u'oid': u'456', | |||
|
129 | u'size': u'1024'} | |||
|
130 | ] | |||
|
131 | assert response.json == { | |||
|
132 | 'objects': expected_objects, 'transfer': 'basic'} | |||
|
133 | ||||
|
134 | def test_app_batch_api_upload(self, git_lfs_app, http_auth): | |||
|
135 | params = {'operation': 'upload', | |||
|
136 | 'objects': [{'oid': '123', 'size': '1024'}]} | |||
|
137 | response = git_lfs_app.post_json( | |||
|
138 | '/repo/info/lfs/objects/batch', params=params, | |||
|
139 | extra_environ=http_auth) | |||
|
140 | expected_objects = [ | |||
|
141 | {u'authenticated': True, | |||
|
142 | u'actions': { | |||
|
143 | u'upload': { | |||
|
144 | u'header': {u'Authorization': u'Basic XXXXX'}, | |||
|
145 | u'href': u'http://localhost/repo/info/lfs/objects/123'}, | |||
|
146 | u'verify': { | |||
|
147 | u'header': {u'Authorization': u'Basic XXXXX'}, | |||
|
148 | u'href': u'http://localhost/repo/info/lfs/verify'} | |||
|
149 | }, | |||
|
150 | u'oid': u'123', | |||
|
151 | u'size': u'1024'} | |||
|
152 | ] | |||
|
153 | assert response.json == { | |||
|
154 | 'objects': expected_objects, 'transfer': 'basic'} | |||
|
155 | ||||
|
156 | def test_app_verify_api_missing_data(self, git_lfs_app): | |||
|
157 | params = {'oid': 'missing',} | |||
|
158 | response = git_lfs_app.post_json( | |||
|
159 | '/repo/info/lfs/verify', params=params, | |||
|
160 | status=400) | |||
|
161 | ||||
|
162 | assert response.json == { | |||
|
163 | u'message': u'missing oid and size in request data'} | |||
|
164 | ||||
|
165 | def test_app_verify_api_missing_obj(self, git_lfs_app): | |||
|
166 | params = {'oid': 'missing', 'size': '1024'} | |||
|
167 | response = git_lfs_app.post_json( | |||
|
168 | '/repo/info/lfs/verify', params=params, | |||
|
169 | status=404) | |||
|
170 | ||||
|
171 | assert response.json == { | |||
|
172 | u'message': u'oid `missing` does not exists in store'} | |||
|
173 | ||||
|
174 | def test_app_verify_api_size_mismatch(self, git_lfs_app): | |||
|
175 | oid = 'existing' | |||
|
176 | oid_path = os.path.join(git_lfs_app._store, oid) | |||
|
177 | if not os.path.isdir(os.path.dirname(oid_path)): | |||
|
178 | os.makedirs(os.path.dirname(oid_path)) | |||
|
179 | with open(oid_path, 'wb') as f: | |||
|
180 | f.write('OID_CONTENT') | |||
|
181 | ||||
|
182 | params = {'oid': oid, 'size': '1024'} | |||
|
183 | response = git_lfs_app.post_json( | |||
|
184 | '/repo/info/lfs/verify', params=params, status=422) | |||
|
185 | ||||
|
186 | assert response.json == { | |||
|
187 | u'message': u'requested file size mismatch ' | |||
|
188 | u'store size:11 requested:1024'} | |||
|
189 | ||||
|
190 | def test_app_verify_api(self, git_lfs_app): | |||
|
191 | oid = 'existing' | |||
|
192 | oid_path = os.path.join(git_lfs_app._store, oid) | |||
|
193 | if not os.path.isdir(os.path.dirname(oid_path)): | |||
|
194 | os.makedirs(os.path.dirname(oid_path)) | |||
|
195 | with open(oid_path, 'wb') as f: | |||
|
196 | f.write('OID_CONTENT') | |||
|
197 | ||||
|
198 | params = {'oid': oid, 'size': 11} | |||
|
199 | response = git_lfs_app.post_json( | |||
|
200 | '/repo/info/lfs/verify', params=params) | |||
|
201 | ||||
|
202 | assert response.json == { | |||
|
203 | u'message': {u'size': u'ok', u'in_store': u'ok'}} | |||
|
204 | ||||
|
205 | def test_app_download_api_oid_not_existing(self, git_lfs_app): | |||
|
206 | oid = 'missing' | |||
|
207 | ||||
|
208 | response = git_lfs_app.get( | |||
|
209 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404) | |||
|
210 | ||||
|
211 | assert response.json == { | |||
|
212 | u'message': u'requested file with oid `missing` not found in store'} | |||
|
213 | ||||
|
214 | def test_app_download_api(self, git_lfs_app): | |||
|
215 | oid = 'existing' | |||
|
216 | oid_path = os.path.join(git_lfs_app._store, oid) | |||
|
217 | if not os.path.isdir(os.path.dirname(oid_path)): | |||
|
218 | os.makedirs(os.path.dirname(oid_path)) | |||
|
219 | with open(oid_path, 'wb') as f: | |||
|
220 | f.write('OID_CONTENT') | |||
|
221 | ||||
|
222 | response = git_lfs_app.get( | |||
|
223 | '/repo/info/lfs/objects/{oid}'.format(oid=oid)) | |||
|
224 | assert response | |||
|
225 | ||||
|
226 | def test_app_upload(self, git_lfs_app): | |||
|
227 | oid = 'uploaded' | |||
|
228 | ||||
|
229 | response = git_lfs_app.put( | |||
|
230 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT') | |||
|
231 | ||||
|
232 | assert response.json == {u'upload': u'ok'} | |||
|
233 | ||||
|
234 | # verify that we actually wrote that OID | |||
|
235 | oid_path = os.path.join(git_lfs_app._store, oid) | |||
|
236 | assert os.path.isfile(oid_path) | |||
|
237 | assert 'CONTENT' == open(oid_path).read() |
@@ -0,0 +1,123 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | import os | |||
|
19 | import pytest | |||
|
20 | from vcsserver.git_lfs.lib import OidHandler, LFSOidStore | |||
|
21 | ||||
|
22 | ||||
|
23 | @pytest.fixture() | |||
|
24 | def lfs_store(tmpdir): | |||
|
25 | repo = 'test' | |||
|
26 | oid = '123456789' | |||
|
27 | store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir)) | |||
|
28 | return store | |||
|
29 | ||||
|
30 | ||||
|
31 | @pytest.fixture() | |||
|
32 | def oid_handler(lfs_store): | |||
|
33 | store = lfs_store | |||
|
34 | repo = store.repo | |||
|
35 | oid = store.oid | |||
|
36 | ||||
|
37 | oid_handler = OidHandler( | |||
|
38 | store=store, repo_name=repo, auth=('basic', 'xxxx'), | |||
|
39 | oid=oid, | |||
|
40 | obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid', | |||
|
41 | obj_verify_href='http://localhost/verify') | |||
|
42 | return oid_handler | |||
|
43 | ||||
|
44 | ||||
|
45 | class TestOidHandler(object): | |||
|
46 | ||||
|
47 | @pytest.mark.parametrize('exec_action', [ | |||
|
48 | 'download', | |||
|
49 | 'upload', | |||
|
50 | ]) | |||
|
51 | def test_exec_action(self, exec_action, oid_handler): | |||
|
52 | handler = oid_handler.exec_operation(exec_action) | |||
|
53 | assert handler | |||
|
54 | ||||
|
55 | def test_exec_action_undefined(self, oid_handler): | |||
|
56 | with pytest.raises(AttributeError): | |||
|
57 | oid_handler.exec_operation('wrong') | |||
|
58 | ||||
|
59 | def test_download_oid_not_existing(self, oid_handler): | |||
|
60 | response, has_errors = oid_handler.exec_operation('download') | |||
|
61 | ||||
|
62 | assert response is None | |||
|
63 | assert has_errors['error'] == { | |||
|
64 | 'code': 404, | |||
|
65 | 'message': 'object: 123456789 does not exist in store'} | |||
|
66 | ||||
|
67 | def test_download_oid(self, oid_handler): | |||
|
68 | store = oid_handler.get_store() | |||
|
69 | if not os.path.isdir(os.path.dirname(store.oid_path)): | |||
|
70 | os.makedirs(os.path.dirname(store.oid_path)) | |||
|
71 | ||||
|
72 | with open(store.oid_path, 'wb') as f: | |||
|
73 | f.write('CONTENT') | |||
|
74 | ||||
|
75 | response, has_errors = oid_handler.exec_operation('download') | |||
|
76 | ||||
|
77 | assert has_errors is None | |||
|
78 | assert response['download'] == { | |||
|
79 | 'header': {'Authorization': 'basic xxxx'}, | |||
|
80 | 'href': 'http://localhost/handle_oid' | |||
|
81 | } | |||
|
82 | ||||
|
83 | def test_upload_oid_that_exists(self, oid_handler): | |||
|
84 | store = oid_handler.get_store() | |||
|
85 | if not os.path.isdir(os.path.dirname(store.oid_path)): | |||
|
86 | os.makedirs(os.path.dirname(store.oid_path)) | |||
|
87 | ||||
|
88 | with open(store.oid_path, 'wb') as f: | |||
|
89 | f.write('CONTENT') | |||
|
90 | ||||
|
91 | response, has_errors = oid_handler.exec_operation('upload') | |||
|
92 | assert has_errors is None | |||
|
93 | assert response is None | |||
|
94 | ||||
|
95 | def test_upload_oid(self, oid_handler): | |||
|
96 | response, has_errors = oid_handler.exec_operation('upload') | |||
|
97 | assert has_errors is None | |||
|
98 | assert response['upload'] == { | |||
|
99 | 'header': {'Authorization': 'basic xxxx'}, | |||
|
100 | 'href': 'http://localhost/handle_oid' | |||
|
101 | } | |||
|
102 | ||||
|
103 | ||||
|
104 | class TestLFSStore(object): | |||
|
105 | def test_write_oid(self, lfs_store): | |||
|
106 | oid_location = lfs_store.oid_path | |||
|
107 | ||||
|
108 | assert not os.path.isfile(oid_location) | |||
|
109 | ||||
|
110 | engine = lfs_store.get_engine(mode='wb') | |||
|
111 | with engine as f: | |||
|
112 | f.write('CONTENT') | |||
|
113 | ||||
|
114 | assert os.path.isfile(oid_location) | |||
|
115 | ||||
|
116 | def test_detect_has_oid(self, lfs_store): | |||
|
117 | ||||
|
118 | assert lfs_store.has_oid() is False | |||
|
119 | engine = lfs_store.get_engine(mode='wb') | |||
|
120 | with engine as f: | |||
|
121 | f.write('CONTENT') | |||
|
122 | ||||
|
123 | assert lfs_store.has_oid() is True No newline at end of file |
@@ -0,0 +1,50 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2017 RodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | import copy | |||
|
18 | from functools import wraps | |||
|
19 | ||||
|
20 | ||||
|
21 | def get_cython_compat_decorator(wrapper, func): | |||
|
22 | """ | |||
|
23 | Creates a cython compatible decorator. The previously used | |||
|
24 | decorator.decorator() function seems to be incompatible with cython. | |||
|
25 | ||||
|
26 | :param wrapper: __wrapper method of the decorator class | |||
|
27 | :param func: decorated function | |||
|
28 | """ | |||
|
29 | @wraps(func) | |||
|
30 | def local_wrapper(*args, **kwds): | |||
|
31 | return wrapper(func, *args, **kwds) | |||
|
32 | local_wrapper.__wrapped__ = func | |||
|
33 | return local_wrapper | |||
|
34 | ||||
|
35 | ||||
|
36 | def safe_result(result): | |||
|
37 | """clean result for better representation in logs""" | |||
|
38 | clean_copy = copy.deepcopy(result) | |||
|
39 | ||||
|
40 | try: | |||
|
41 | if 'objects' in clean_copy: | |||
|
42 | for oid_data in clean_copy['objects']: | |||
|
43 | if 'actions' in oid_data: | |||
|
44 | for action_name, data in oid_data['actions'].items(): | |||
|
45 | if 'header' in data: | |||
|
46 | data['header'] = {'Authorization': '*****'} | |||
|
47 | except Exception: | |||
|
48 | return result | |||
|
49 | ||||
|
50 | return clean_copy |
@@ -1,5 +1,5 b'' | |||||
1 | [bumpversion] |
|
1 | [bumpversion] | |
2 |
current_version = 4. |
|
2 | current_version = 4.7.0 | |
3 | message = release: Bump version {current_version} to {new_version} |
|
3 | message = release: Bump version {current_version} to {new_version} | |
4 |
|
4 | |||
5 | [bumpversion:file:vcsserver/VERSION] |
|
5 | [bumpversion:file:vcsserver/VERSION] |
@@ -5,12 +5,10 b' done = false' | |||||
5 | done = true |
|
5 | done = true | |
6 |
|
6 | |||
7 | [task:fixes_on_stable] |
|
7 | [task:fixes_on_stable] | |
8 | done = true |
|
|||
9 |
|
8 | |||
10 | [task:pip2nix_generated] |
|
9 | [task:pip2nix_generated] | |
11 | done = true |
|
|||
12 |
|
10 | |||
13 | [release] |
|
11 | [release] | |
14 |
state = |
|
12 | state = in_progress | |
15 |
version = 4. |
|
13 | version = 4.7.0 | |
16 |
|
14 |
@@ -15,10 +15,8 b' port = 9900' | |||||
15 | ########################## |
|
15 | ########################## | |
16 | ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini |
|
16 | ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini | |
17 | use = egg:gunicorn#main |
|
17 | use = egg:gunicorn#main | |
18 |
## Sets the number of process workers. |
|
18 | ## Sets the number of process workers. Recommended | |
19 | ## when this option is set to more than one worker, recommended |
|
|||
20 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
19 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers | |
21 | ## The `instance_id = *` must be set in the [app:main] section below |
|
|||
22 | workers = 2 |
|
20 | workers = 2 | |
23 | ## process name |
|
21 | ## process name | |
24 | proc_name = rhodecode_vcsserver |
|
22 | proc_name = rhodecode_vcsserver |
@@ -89,6 +89,7 b' let' | |||||
89 | name = "rhodecode-vcsserver-${version}"; |
|
89 | name = "rhodecode-vcsserver-${version}"; | |
90 | releaseName = "RhodeCodeVCSServer-${version}"; |
|
90 | releaseName = "RhodeCodeVCSServer-${version}"; | |
91 | src = rhodecode-vcsserver-src; |
|
91 | src = rhodecode-vcsserver-src; | |
|
92 | dontStrip = true; # prevent strip, we don't need it. | |||
92 |
|
93 | |||
93 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([ |
|
94 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([ | |
94 | pkgs.git |
|
95 | pkgs.git |
@@ -159,13 +159,13 b'' | |||||
159 | }; |
|
159 | }; | |
160 | }; |
|
160 | }; | |
161 | decorator = super.buildPythonPackage { |
|
161 | decorator = super.buildPythonPackage { | |
162 |
name = "decorator-4.0.1 |
|
162 | name = "decorator-4.0.11"; | |
163 | buildInputs = with self; []; |
|
163 | buildInputs = with self; []; | |
164 | doCheck = false; |
|
164 | doCheck = false; | |
165 | propagatedBuildInputs = with self; []; |
|
165 | propagatedBuildInputs = with self; []; | |
166 | src = fetchurl { |
|
166 | src = fetchurl { | |
167 |
url = "https://pypi.python.org/packages/ |
|
167 | url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz"; | |
168 | md5 = "434b57fdc3230c500716c5aff8896100"; |
|
168 | md5 = "73644c8f0bd4983d1b6a34b49adec0ae"; | |
169 | }; |
|
169 | }; | |
170 | meta = { |
|
170 | meta = { | |
171 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ]; |
|
171 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ]; | |
@@ -315,13 +315,13 b'' | |||||
315 | }; |
|
315 | }; | |
316 | }; |
|
316 | }; | |
317 | mercurial = super.buildPythonPackage { |
|
317 | mercurial = super.buildPythonPackage { | |
318 |
name = "mercurial-4. |
|
318 | name = "mercurial-4.1.2"; | |
319 | buildInputs = with self; []; |
|
319 | buildInputs = with self; []; | |
320 | doCheck = false; |
|
320 | doCheck = false; | |
321 | propagatedBuildInputs = with self; []; |
|
321 | propagatedBuildInputs = with self; []; | |
322 | src = fetchurl { |
|
322 | src = fetchurl { | |
323 |
url = "https://pypi.python.org/packages/85 |
|
323 | url = "https://pypi.python.org/packages/88/c1/f0501fd67f5e69346da41ee0bd7b2619ce4bbc9854bb645074c418b9941f/mercurial-4.1.2.tar.gz"; | |
324 | md5 = "fa72a08e2723e4fa2a21c4e66437f3fa"; |
|
324 | md5 = "934c99808bdc8385e074b902d59b0d93"; | |
325 | }; |
|
325 | }; | |
326 | meta = { |
|
326 | meta = { | |
327 | license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ]; |
|
327 | license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ]; | |
@@ -445,13 +445,13 b'' | |||||
445 | }; |
|
445 | }; | |
446 | }; |
|
446 | }; | |
447 | pyramid = super.buildPythonPackage { |
|
447 | pyramid = super.buildPythonPackage { | |
448 |
name = "pyramid-1. |
|
448 | name = "pyramid-1.7.4"; | |
449 | buildInputs = with self; []; |
|
449 | buildInputs = with self; []; | |
450 | doCheck = false; |
|
450 | doCheck = false; | |
451 | propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy]; |
|
451 | propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy]; | |
452 | src = fetchurl { |
|
452 | src = fetchurl { | |
453 |
url = "https://pypi.python.org/packages/3 |
|
453 | url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz"; | |
454 | md5 = "b18688ff3cc33efdbb098a35b45dd122"; |
|
454 | md5 = "6ef1dfdcff9136d04490410757c4c446"; | |
455 | }; |
|
455 | }; | |
456 | meta = { |
|
456 | meta = { | |
457 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
457 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; | |
@@ -588,7 +588,7 b'' | |||||
588 | }; |
|
588 | }; | |
589 | }; |
|
589 | }; | |
590 | rhodecode-vcsserver = super.buildPythonPackage { |
|
590 | rhodecode-vcsserver = super.buildPythonPackage { | |
591 |
name = "rhodecode-vcsserver-4. |
|
591 | name = "rhodecode-vcsserver-4.7.0"; | |
592 | buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj]; |
|
592 | buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj]; | |
593 | doCheck = true; |
|
593 | doCheck = true; | |
594 | propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage]; |
|
594 | propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage]; |
@@ -1,15 +1,16 b'' | |||||
1 | # core |
|
1 | ## core | |
2 | setuptools==30.1.0 |
|
2 | setuptools==30.1.0 | |
3 |
|
3 | |||
4 | Beaker==1.7.0 |
|
4 | Beaker==1.7.0 | |
5 | configobj==5.0.6 |
|
5 | configobj==5.0.6 | |
|
6 | decorator==4.0.11 | |||
6 | dulwich==0.13.0 |
|
7 | dulwich==0.13.0 | |
7 | hgsubversion==1.8.6 |
|
8 | hgsubversion==1.8.6 | |
8 | infrae.cache==1.0.1 |
|
9 | infrae.cache==1.0.1 | |
9 |
mercurial==4. |
|
10 | mercurial==4.1.2 | |
10 | msgpack-python==0.4.8 |
|
11 | msgpack-python==0.4.8 | |
11 | pyramid==1.6.1 |
|
|||
12 | pyramid-jinja2==2.5 |
|
12 | pyramid-jinja2==2.5 | |
|
13 | pyramid==1.7.4 | |||
13 | pyramid-mako==1.0.2 |
|
14 | pyramid-mako==1.0.2 | |
14 | repoze.lru==0.6 |
|
15 | repoze.lru==0.6 | |
15 | simplejson==3.7.2 |
|
16 | simplejson==3.7.2 | |
@@ -28,7 +29,6 b' zope.interface==4.1.3' | |||||
28 | ## debug |
|
29 | ## debug | |
29 | ipdb==0.10.1 |
|
30 | ipdb==0.10.1 | |
30 | ipython==5.1.0 |
|
31 | ipython==5.1.0 | |
31 |
|
||||
32 | # http servers |
|
32 | # http servers | |
33 | gevent==1.1.2 |
|
33 | gevent==1.1.2 | |
34 | greenlet==0.4.10 |
|
34 | greenlet==0.4.10 |
@@ -15,6 +15,8 b'' | |||||
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
|
18 | import sys | |||
|
19 | import traceback | |||
18 | import logging |
|
20 | import logging | |
19 | import urlparse |
|
21 | import urlparse | |
20 |
|
22 | |||
@@ -80,3 +82,17 b' def obfuscate_qs(query_string):' | |||||
80 |
|
82 | |||
81 | return '&'.join('{}{}'.format( |
|
83 | return '&'.join('{}{}'.format( | |
82 | k, '={}'.format(v) if v else '') for k, v in parsed) |
|
84 | k, '={}'.format(v) if v else '') for k, v in parsed) | |
|
85 | ||||
|
86 | ||||
|
87 | def raise_from_original(new_type): | |||
|
88 | """ | |||
|
89 | Raise a new exception type with original args and traceback. | |||
|
90 | """ | |||
|
91 | exc_type, exc_value, exc_traceback = sys.exc_info() | |||
|
92 | ||||
|
93 | traceback.format_exception(exc_type, exc_value, exc_traceback) | |||
|
94 | ||||
|
95 | try: | |||
|
96 | raise new_type(*exc_value.args), None, exc_traceback | |||
|
97 | finally: | |||
|
98 | del exc_traceback |
@@ -35,10 +35,10 b' from dulwich.server import update_server' | |||||
35 |
|
35 | |||
36 | from vcsserver import exceptions, settings, subprocessio |
|
36 | from vcsserver import exceptions, settings, subprocessio | |
37 | from vcsserver.utils import safe_str |
|
37 | from vcsserver.utils import safe_str | |
38 | from vcsserver.base import RepoFactory, obfuscate_qs |
|
38 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original | |
39 | from vcsserver.hgcompat import ( |
|
39 | from vcsserver.hgcompat import ( | |
40 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) |
|
40 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) | |
41 |
|
41 | from vcsserver.git_lfs.lib import LFSOidStore | ||
42 |
|
42 | |||
43 | DIR_STAT = stat.S_IFDIR |
|
43 | DIR_STAT = stat.S_IFDIR | |
44 | FILE_MODE = stat.S_IFMT |
|
44 | FILE_MODE = stat.S_IFMT | |
@@ -58,6 +58,14 b' def reraise_safe_exceptions(func):' | |||||
58 | raise exceptions.LookupException(e.message) |
|
58 | raise exceptions.LookupException(e.message) | |
59 | except (HangupException, UnexpectedCommandError) as e: |
|
59 | except (HangupException, UnexpectedCommandError) as e: | |
60 | raise exceptions.VcsException(e.message) |
|
60 | raise exceptions.VcsException(e.message) | |
|
61 | except Exception as e: | |||
|
62 | # NOTE(marcink): becuase of how dulwich handles some exceptions | |||
|
63 | # (KeyError on empty repos), we cannot track this and catch all | |||
|
64 | # exceptions, it's an exceptions from other handlers | |||
|
65 | #if not hasattr(e, '_vcs_kind'): | |||
|
66 | #log.exception("Unhandled exception in git remote call") | |||
|
67 | #raise_from_original(exceptions.UnhandledException) | |||
|
68 | raise | |||
61 | return wrapper |
|
69 | return wrapper | |
62 |
|
70 | |||
63 |
|
71 | |||
@@ -97,6 +105,11 b' class GitRemote(object):' | |||||
97 | "_commit": self.revision, |
|
105 | "_commit": self.revision, | |
98 | } |
|
106 | } | |
99 |
|
107 | |||
|
108 | def _wire_to_config(self, wire): | |||
|
109 | if 'config' in wire: | |||
|
110 | return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) | |||
|
111 | return {} | |||
|
112 | ||||
100 | def _assign_ref(self, wire, ref, commit_id): |
|
113 | def _assign_ref(self, wire, ref, commit_id): | |
101 | repo = self._factory.repo(wire) |
|
114 | repo = self._factory.repo(wire) | |
102 | repo[ref] = commit_id |
|
115 | repo[ref] = commit_id | |
@@ -133,6 +146,56 b' class GitRemote(object):' | |||||
133 | blob = repo[sha] |
|
146 | blob = repo[sha] | |
134 | return blob.raw_length() |
|
147 | return blob.raw_length() | |
135 |
|
148 | |||
|
149 | def _parse_lfs_pointer(self, raw_content): | |||
|
150 | ||||
|
151 | spec_string = 'version https://git-lfs.github.com/spec' | |||
|
152 | if raw_content and raw_content.startswith(spec_string): | |||
|
153 | pattern = re.compile(r""" | |||
|
154 | (?:\n)? | |||
|
155 | ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n | |||
|
156 | ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n | |||
|
157 | ^size[ ](?P<oid_size>[0-9]+)\n | |||
|
158 | (?:\n)? | |||
|
159 | """, re.VERBOSE | re.MULTILINE) | |||
|
160 | match = pattern.match(raw_content) | |||
|
161 | if match: | |||
|
162 | return match.groupdict() | |||
|
163 | ||||
|
164 | return {} | |||
|
165 | ||||
|
166 | @reraise_safe_exceptions | |||
|
167 | def is_large_file(self, wire, sha): | |||
|
168 | repo = self._factory.repo(wire) | |||
|
169 | blob = repo[sha] | |||
|
170 | return self._parse_lfs_pointer(blob.as_raw_string()) | |||
|
171 | ||||
|
172 | @reraise_safe_exceptions | |||
|
173 | def in_largefiles_store(self, wire, oid): | |||
|
174 | repo = self._factory.repo(wire) | |||
|
175 | conf = self._wire_to_config(wire) | |||
|
176 | ||||
|
177 | store_location = conf.get('vcs_git_lfs_store_location') | |||
|
178 | if store_location: | |||
|
179 | repo_name = repo.path | |||
|
180 | store = LFSOidStore( | |||
|
181 | oid=oid, repo=repo_name, store_location=store_location) | |||
|
182 | return store.has_oid() | |||
|
183 | ||||
|
184 | return False | |||
|
185 | ||||
|
186 | @reraise_safe_exceptions | |||
|
187 | def store_path(self, wire, oid): | |||
|
188 | repo = self._factory.repo(wire) | |||
|
189 | conf = self._wire_to_config(wire) | |||
|
190 | ||||
|
191 | store_location = conf.get('vcs_git_lfs_store_location') | |||
|
192 | if store_location: | |||
|
193 | repo_name = repo.path | |||
|
194 | store = LFSOidStore( | |||
|
195 | oid=oid, repo=repo_name, store_location=store_location) | |||
|
196 | return store.oid_path | |||
|
197 | raise ValueError('Unable to fetch oid with path {}'.format(oid)) | |||
|
198 | ||||
136 | @reraise_safe_exceptions |
|
199 | @reraise_safe_exceptions | |
137 | def bulk_request(self, wire, rev, pre_load): |
|
200 | def bulk_request(self, wire, rev, pre_load): | |
138 | result = {} |
|
201 | result = {} |
@@ -18,7 +18,6 b'' | |||||
18 | import io |
|
18 | import io | |
19 | import logging |
|
19 | import logging | |
20 | import stat |
|
20 | import stat | |
21 | import sys |
|
|||
22 | import urllib |
|
21 | import urllib | |
23 | import urllib2 |
|
22 | import urllib2 | |
24 |
|
23 | |||
@@ -26,9 +25,10 b' from hgext import largefiles, rebase' | |||||
26 | from hgext.strip import strip as hgext_strip |
|
25 | from hgext.strip import strip as hgext_strip | |
27 | from mercurial import commands |
|
26 | from mercurial import commands | |
28 | from mercurial import unionrepo |
|
27 | from mercurial import unionrepo | |
|
28 | from mercurial import verify | |||
29 |
|
29 | |||
30 | from vcsserver import exceptions |
|
30 | from vcsserver import exceptions | |
31 | from vcsserver.base import RepoFactory, obfuscate_qs |
|
31 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original | |
32 | from vcsserver.hgcompat import ( |
|
32 | from vcsserver.hgcompat import ( | |
33 | archival, bin, clone, config as hgconfig, diffopts, hex, |
|
33 | archival, bin, clone, config as hgconfig, diffopts, hex, | |
34 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, |
|
34 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, | |
@@ -91,17 +91,6 b' def reraise_safe_exceptions(func):' | |||||
91 | return wrapper |
|
91 | return wrapper | |
92 |
|
92 | |||
93 |
|
93 | |||
94 | def raise_from_original(new_type): |
|
|||
95 | """ |
|
|||
96 | Raise a new exception type with original args and traceback. |
|
|||
97 | """ |
|
|||
98 | _, original, traceback = sys.exc_info() |
|
|||
99 | try: |
|
|||
100 | raise new_type(*original.args), None, traceback |
|
|||
101 | finally: |
|
|||
102 | del traceback |
|
|||
103 |
|
||||
104 |
|
||||
105 | class MercurialFactory(RepoFactory): |
|
94 | class MercurialFactory(RepoFactory): | |
106 |
|
95 | |||
107 | def _create_config(self, config, hooks=True): |
|
96 | def _create_config(self, config, hooks=True): | |
@@ -496,7 +485,7 b' class HgRemote(object):' | |||||
496 | return largefiles.lfutil.isstandin(path) |
|
485 | return largefiles.lfutil.isstandin(path) | |
497 |
|
486 | |||
498 | @reraise_safe_exceptions |
|
487 | @reraise_safe_exceptions | |
499 | def in_store(self, wire, sha): |
|
488 | def in_largefiles_store(self, wire, sha): | |
500 | repo = self._factory.repo(wire) |
|
489 | repo = self._factory.repo(wire) | |
501 | return largefiles.lfutil.instore(repo, sha) |
|
490 | return largefiles.lfutil.instore(repo, sha) | |
502 |
|
491 | |||
@@ -598,6 +587,21 b' class HgRemote(object):' | |||||
598 | repo.baseui, repo, ctx.node(), update=update, backup=backup) |
|
587 | repo.baseui, repo, ctx.node(), update=update, backup=backup) | |
599 |
|
588 | |||
600 | @reraise_safe_exceptions |
|
589 | @reraise_safe_exceptions | |
|
590 | def verify(self, wire,): | |||
|
591 | repo = self._factory.repo(wire) | |||
|
592 | baseui = self._factory._create_config(wire['config']) | |||
|
593 | baseui.setconfig('ui', 'quiet', 'false') | |||
|
594 | output = io.BytesIO() | |||
|
595 | ||||
|
596 | def write(data, **unused_kwargs): | |||
|
597 | output.write(data) | |||
|
598 | baseui.write = write | |||
|
599 | ||||
|
600 | repo.ui = baseui | |||
|
601 | verify.verify(repo) | |||
|
602 | return output.getvalue() | |||
|
603 | ||||
|
604 | @reraise_safe_exceptions | |||
601 | def tag(self, wire, name, revision, message, local, user, |
|
605 | def tag(self, wire, name, revision, message, local, user, | |
602 | tag_time, tag_timezone): |
|
606 | tag_time, tag_timezone): | |
603 | repo = self._factory.repo(wire) |
|
607 | repo = self._factory.repo(wire) | |
@@ -674,12 +678,10 b' class HgRemote(object):' | |||||
674 | @reraise_safe_exceptions |
|
678 | @reraise_safe_exceptions | |
675 | def ancestor(self, wire, revision1, revision2): |
|
679 | def ancestor(self, wire, revision1, revision2): | |
676 | repo = self._factory.repo(wire) |
|
680 | repo = self._factory.repo(wire) | |
677 | baseui = self._factory._create_config(wire['config']) |
|
681 | changelog = repo.changelog | |
678 | output = io.BytesIO() |
|
682 | lookup = repo.lookup | |
679 | baseui.write = output.write |
|
683 | a = changelog.ancestor(lookup(revision1), lookup(revision2)) | |
680 | commands.debugancestor(baseui, repo, revision1, revision2) |
|
684 | return hex(a) | |
681 |
|
||||
682 | return output.getvalue() |
|
|||
683 |
|
685 | |||
684 | @reraise_safe_exceptions |
|
686 | @reraise_safe_exceptions | |
685 | def push(self, wire, revisions, dest_path, hooks=True, |
|
687 | def push(self, wire, revisions, dest_path, hooks=True, |
@@ -17,12 +17,14 b'' | |||||
17 | # along with this program; if not, write to the Free Software Foundation, |
|
17 | # along with this program; if not, write to the Free Software Foundation, | |
18 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
18 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
19 |
|
19 | |||
|
20 | import io | |||
|
21 | import sys | |||
|
22 | import json | |||
|
23 | import logging | |||
20 | import collections |
|
24 | import collections | |
21 | import importlib |
|
25 | import importlib | |
22 | import io |
|
|||
23 | import json |
|
|||
24 | import subprocess |
|
26 | import subprocess | |
25 | import sys |
|
27 | ||
26 | from httplib import HTTPConnection |
|
28 | from httplib import HTTPConnection | |
27 |
|
29 | |||
28 |
|
30 | |||
@@ -33,6 +35,8 b' import simplejson as json' | |||||
33 |
|
35 | |||
34 | from vcsserver import exceptions |
|
36 | from vcsserver import exceptions | |
35 |
|
37 | |||
|
38 | log = logging.getLogger(__name__) | |||
|
39 | ||||
36 |
|
40 | |||
37 | class HooksHttpClient(object): |
|
41 | class HooksHttpClient(object): | |
38 | connection = None |
|
42 | connection = None | |
@@ -105,6 +109,11 b' class GitMessageWriter(RemoteMessageWrit' | |||||
105 |
|
109 | |||
106 | def _handle_exception(result): |
|
110 | def _handle_exception(result): | |
107 | exception_class = result.get('exception') |
|
111 | exception_class = result.get('exception') | |
|
112 | exception_traceback = result.get('exception_traceback') | |||
|
113 | ||||
|
114 | if exception_traceback: | |||
|
115 | log.error('Got traceback from remote call:%s', exception_traceback) | |||
|
116 | ||||
108 | if exception_class == 'HTTPLockedRC': |
|
117 | if exception_class == 'HTTPLockedRC': | |
109 | raise exceptions.RepositoryLockedException(*result['exception_args']) |
|
118 | raise exceptions.RepositoryLockedException(*result['exception_args']) | |
110 | elif exception_class == 'RepositoryError': |
|
119 | elif exception_class == 'RepositoryError': | |
@@ -152,26 +161,42 b' def post_pull(ui, repo, **kwargs):' | |||||
152 | return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui)) |
|
161 | return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui)) | |
153 |
|
162 | |||
154 |
|
163 | |||
155 | def pre_push(ui, repo, **kwargs): |
|
164 | def pre_push(ui, repo, node=None, **kwargs): | |
156 | return _call_hook('pre_push', _extras_from_ui(ui), HgMessageWriter(ui)) |
|
165 | extras = _extras_from_ui(ui) | |
|
166 | ||||
|
167 | rev_data = [] | |||
|
168 | if node and kwargs.get('hooktype') == 'pretxnchangegroup': | |||
|
169 | branches = collections.defaultdict(list) | |||
|
170 | for commit_id, branch in _rev_range_hash(repo, node, with_branch=True): | |||
|
171 | branches[branch].append(commit_id) | |||
|
172 | ||||
|
173 | for branch, commits in branches.iteritems(): | |||
|
174 | old_rev = kwargs.get('node_last') or commits[0] | |||
|
175 | rev_data.append({ | |||
|
176 | 'old_rev': old_rev, | |||
|
177 | 'new_rev': commits[-1], | |||
|
178 | 'ref': '', | |||
|
179 | 'type': 'branch', | |||
|
180 | 'name': branch, | |||
|
181 | }) | |||
|
182 | ||||
|
183 | extras['commit_ids'] = rev_data | |||
|
184 | return _call_hook('pre_push', extras, HgMessageWriter(ui)) | |||
157 |
|
185 | |||
158 |
|
186 | |||
159 | # N.B.(skreft): the two functions below were taken and adapted from |
|
187 | def _rev_range_hash(repo, node, with_branch=False): | |
160 | # rhodecode.lib.vcs.remote.handle_git_pre_receive |
|
|||
161 | # They are required to compute the commit_ids |
|
|||
162 | def _get_revs(repo, rev_opt): |
|
|||
163 | revs = [rev for rev in mercurial.scmutil.revrange(repo, rev_opt)] |
|
|||
164 | if len(revs) == 0: |
|
|||
165 | return (mercurial.node.nullrev, mercurial.node.nullrev) |
|
|||
166 |
|
188 | |||
167 | return max(revs), min(revs) |
|
189 | commits = [] | |
168 |
|
190 | for rev in xrange(repo[node], len(repo)): | ||
|
191 | ctx = repo[rev] | |||
|
192 | commit_id = mercurial.node.hex(ctx.node()) | |||
|
193 | branch = ctx.branch() | |||
|
194 | if with_branch: | |||
|
195 | commits.append((commit_id, branch)) | |||
|
196 | else: | |||
|
197 | commits.append(commit_id) | |||
169 |
|
198 | |||
170 | def _rev_range_hash(repo, node): |
|
199 | return commits | |
171 | stop, start = _get_revs(repo, [node + ':']) |
|
|||
172 | revs = [mercurial.node.hex(repo[r].node()) for r in xrange(start, stop + 1)] |
|
|||
173 |
|
||||
174 | return revs |
|
|||
175 |
|
200 | |||
176 |
|
201 | |||
177 | def post_push(ui, repo, node, **kwargs): |
|
202 | def post_push(ui, repo, node, **kwargs): | |
@@ -257,7 +282,23 b' def git_post_pull(extras):' | |||||
257 | return HookResponse(status, stdout.getvalue()) |
|
282 | return HookResponse(status, stdout.getvalue()) | |
258 |
|
283 | |||
259 |
|
284 | |||
260 | def git_pre_receive(unused_repo_path, unused_revs, env): |
|
285 | def _parse_git_ref_lines(revision_lines): | |
|
286 | rev_data = [] | |||
|
287 | for revision_line in revision_lines or []: | |||
|
288 | old_rev, new_rev, ref = revision_line.strip().split(' ') | |||
|
289 | ref_data = ref.split('/', 2) | |||
|
290 | if ref_data[1] in ('tags', 'heads'): | |||
|
291 | rev_data.append({ | |||
|
292 | 'old_rev': old_rev, | |||
|
293 | 'new_rev': new_rev, | |||
|
294 | 'ref': ref, | |||
|
295 | 'type': ref_data[1], | |||
|
296 | 'name': ref_data[2], | |||
|
297 | }) | |||
|
298 | return rev_data | |||
|
299 | ||||
|
300 | ||||
|
301 | def git_pre_receive(unused_repo_path, revision_lines, env): | |||
261 | """ |
|
302 | """ | |
262 | Pre push hook. |
|
303 | Pre push hook. | |
263 |
|
304 | |||
@@ -268,8 +309,10 b' def git_pre_receive(unused_repo_path, un' | |||||
268 | :rtype: int |
|
309 | :rtype: int | |
269 | """ |
|
310 | """ | |
270 | extras = json.loads(env['RC_SCM_DATA']) |
|
311 | extras = json.loads(env['RC_SCM_DATA']) | |
|
312 | rev_data = _parse_git_ref_lines(revision_lines) | |||
271 | if 'push' not in extras['hooks']: |
|
313 | if 'push' not in extras['hooks']: | |
272 | return 0 |
|
314 | return 0 | |
|
315 | extras['commit_ids'] = rev_data | |||
273 | return _call_hook('pre_push', extras, GitMessageWriter()) |
|
316 | return _call_hook('pre_push', extras, GitMessageWriter()) | |
274 |
|
317 | |||
275 |
|
318 | |||
@@ -277,7 +320,7 b' def _run_command(arguments):' | |||||
277 | """ |
|
320 | """ | |
278 | Run the specified command and return the stdout. |
|
321 | Run the specified command and return the stdout. | |
279 |
|
322 | |||
280 |
:param arguments: sequence of program ar |
|
323 | :param arguments: sequence of program arguments (including the program name) | |
281 | :type arguments: list[str] |
|
324 | :type arguments: list[str] | |
282 | """ |
|
325 | """ | |
283 | # TODO(skreft): refactor this method and all the other similar ones. |
|
326 | # TODO(skreft): refactor this method and all the other similar ones. | |
@@ -308,18 +351,7 b' def git_post_receive(unused_repo_path, r' | |||||
308 | if 'push' not in extras['hooks']: |
|
351 | if 'push' not in extras['hooks']: | |
309 | return 0 |
|
352 | return 0 | |
310 |
|
353 | |||
311 | rev_data = [] |
|
354 | rev_data = _parse_git_ref_lines(revision_lines) | |
312 | for revision_line in revision_lines: |
|
|||
313 | old_rev, new_rev, ref = revision_line.strip().split(' ') |
|
|||
314 | ref_data = ref.split('/', 2) |
|
|||
315 | if ref_data[1] in ('tags', 'heads'): |
|
|||
316 | rev_data.append({ |
|
|||
317 | 'old_rev': old_rev, |
|
|||
318 | 'new_rev': new_rev, |
|
|||
319 | 'ref': ref, |
|
|||
320 | 'type': ref_data[1], |
|
|||
321 | 'name': ref_data[2], |
|
|||
322 | }) |
|
|||
323 |
|
355 | |||
324 | git_revs = [] |
|
356 | git_revs = [] | |
325 |
|
357 | |||
@@ -339,7 +371,7 b' def git_post_receive(unused_repo_path, r' | |||||
339 | except Exception: |
|
371 | except Exception: | |
340 | cmd = ['git', 'symbolic-ref', 'HEAD', |
|
372 | cmd = ['git', 'symbolic-ref', 'HEAD', | |
341 | 'refs/heads/%s' % push_ref['name']] |
|
373 | 'refs/heads/%s' % push_ref['name']] | |
342 |
print |
|
374 | print("Setting default branch to %s" % push_ref['name']) | |
343 | _run_command(cmd) |
|
375 | _run_command(cmd) | |
344 |
|
376 | |||
345 | cmd = ['git', 'for-each-ref', '--format=%(refname)', |
|
377 | cmd = ['git', 'for-each-ref', '--format=%(refname)', |
@@ -30,6 +30,7 b' from pyramid.config import Configurator' | |||||
30 | from pyramid.wsgi import wsgiapp |
|
30 | from pyramid.wsgi import wsgiapp | |
31 |
|
31 | |||
32 | from vcsserver import remote_wsgi, scm_app, settings, hgpatches |
|
32 | from vcsserver import remote_wsgi, scm_app, settings, hgpatches | |
|
33 | from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT | |||
33 | from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub |
|
34 | from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub | |
34 | from vcsserver.echo_stub.echo_app import EchoApp |
|
35 | from vcsserver.echo_stub.echo_app import EchoApp | |
35 | from vcsserver.exceptions import HTTPRepoLocked |
|
36 | from vcsserver.exceptions import HTTPRepoLocked | |
@@ -40,11 +41,13 b' try:' | |||||
40 | except ImportError: |
|
41 | except ImportError: | |
41 | GitFactory = None |
|
42 | GitFactory = None | |
42 | GitRemote = None |
|
43 | GitRemote = None | |
|
44 | ||||
43 | try: |
|
45 | try: | |
44 | from vcsserver.hg import MercurialFactory, HgRemote |
|
46 | from vcsserver.hg import MercurialFactory, HgRemote | |
45 | except ImportError: |
|
47 | except ImportError: | |
46 | MercurialFactory = None |
|
48 | MercurialFactory = None | |
47 | HgRemote = None |
|
49 | HgRemote = None | |
|
50 | ||||
48 | try: |
|
51 | try: | |
49 | from vcsserver.svn import SubversionFactory, SvnRemote |
|
52 | from vcsserver.svn import SubversionFactory, SvnRemote | |
50 | except ImportError: |
|
53 | except ImportError: | |
@@ -153,8 +156,10 b' class HTTPApplication(object):' | |||||
153 | remote_wsgi = remote_wsgi |
|
156 | remote_wsgi = remote_wsgi | |
154 | _use_echo_app = False |
|
157 | _use_echo_app = False | |
155 |
|
158 | |||
156 | def __init__(self, settings=None): |
|
159 | def __init__(self, settings=None, global_config=None): | |
157 | self.config = Configurator(settings=settings) |
|
160 | self.config = Configurator(settings=settings) | |
|
161 | self.global_config = global_config | |||
|
162 | ||||
158 | locale = settings.get('locale', '') or 'en_US.UTF-8' |
|
163 | locale = settings.get('locale', '') or 'en_US.UTF-8' | |
159 | vcs = VCS(locale=locale, cache_config=settings) |
|
164 | vcs = VCS(locale=locale, cache_config=settings) | |
160 | self._remotes = { |
|
165 | self._remotes = { | |
@@ -209,12 +214,7 b' class HTTPApplication(object):' | |||||
209 | return {'status': '404 NOT FOUND'} |
|
214 | return {'status': '404 NOT FOUND'} | |
210 | self.config.add_notfound_view(notfound, renderer='json') |
|
215 | self.config.add_notfound_view(notfound, renderer='json') | |
211 |
|
216 | |||
212 | self.config.add_view( |
|
217 | self.config.add_view(self.handle_vcs_exception, context=Exception) | |
213 | self.handle_vcs_exception, context=Exception, |
|
|||
214 | custom_predicates=[self.is_vcs_exception]) |
|
|||
215 |
|
||||
216 | self.config.add_view( |
|
|||
217 | self.general_error_handler, context=Exception) |
|
|||
218 |
|
218 | |||
219 | self.config.add_tween( |
|
219 | self.config.add_tween( | |
220 | 'vcsserver.tweens.RequestWrapperTween', |
|
220 | 'vcsserver.tweens.RequestWrapperTween', | |
@@ -273,12 +273,26 b' class HTTPApplication(object):' | |||||
273 |
|
273 | |||
274 | def service_view(self, request): |
|
274 | def service_view(self, request): | |
275 | import vcsserver |
|
275 | import vcsserver | |
|
276 | import ConfigParser as configparser | |||
|
277 | ||||
276 | payload = msgpack.unpackb(request.body, use_list=True) |
|
278 | payload = msgpack.unpackb(request.body, use_list=True) | |
|
279 | ||||
|
280 | try: | |||
|
281 | path = self.global_config['__file__'] | |||
|
282 | config = configparser.ConfigParser() | |||
|
283 | config.read(path) | |||
|
284 | parsed_ini = config | |||
|
285 | if parsed_ini.has_section('server:main'): | |||
|
286 | parsed_ini = dict(parsed_ini.items('server:main')) | |||
|
287 | except Exception: | |||
|
288 | log.exception('Failed to read .ini file for display') | |||
|
289 | parsed_ini = {} | |||
|
290 | ||||
277 | resp = { |
|
291 | resp = { | |
278 | 'id': payload.get('id'), |
|
292 | 'id': payload.get('id'), | |
279 | 'result': dict( |
|
293 | 'result': dict( | |
280 | version=vcsserver.__version__, |
|
294 | version=vcsserver.__version__, | |
281 |
config= |
|
295 | config=parsed_ini, | |
282 | payload=payload, |
|
296 | payload=payload, | |
283 | ) |
|
297 | ) | |
284 | } |
|
298 | } | |
@@ -351,9 +365,31 b' class HTTPApplication(object):' | |||||
351 | config = msgpack.unpackb(packed_config) |
|
365 | config = msgpack.unpackb(packed_config) | |
352 |
|
366 | |||
353 | environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO'] |
|
367 | environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO'] | |
354 | app = scm_app.create_git_wsgi_app( |
|
368 | content_type = environ.get('CONTENT_TYPE', '') | |
355 | repo_path, repo_name, config) |
|
369 | ||
|
370 | path = environ['PATH_INFO'] | |||
|
371 | is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type | |||
|
372 | log.debug( | |||
|
373 | 'LFS: Detecting if request `%s` is LFS server path based ' | |||
|
374 | 'on content type:`%s`, is_lfs:%s', | |||
|
375 | path, content_type, is_lfs_request) | |||
|
376 | ||||
|
377 | if not is_lfs_request: | |||
|
378 | # fallback detection by path | |||
|
379 | if GIT_LFS_PROTO_PAT.match(path): | |||
|
380 | is_lfs_request = True | |||
|
381 | log.debug( | |||
|
382 | 'LFS: fallback detection by path of: `%s`, is_lfs:%s', | |||
|
383 | path, is_lfs_request) | |||
|
384 | ||||
|
385 | if is_lfs_request: | |||
|
386 | app = scm_app.create_git_lfs_wsgi_app( | |||
|
387 | repo_path, repo_name, config) | |||
|
388 | else: | |||
|
389 | app = scm_app.create_git_wsgi_app( | |||
|
390 | repo_path, repo_name, config) | |||
356 | return app(environ, start_response) |
|
391 | return app(environ, start_response) | |
|
392 | ||||
357 | return _git_stream |
|
393 | return _git_stream | |
358 |
|
394 | |||
359 | def is_vcs_view(self, context, request): |
|
395 | def is_vcs_view(self, context, request): | |
@@ -364,27 +400,17 b' class HTTPApplication(object):' | |||||
364 | backend = request.matchdict.get('backend') |
|
400 | backend = request.matchdict.get('backend') | |
365 | return backend in self._remotes |
|
401 | return backend in self._remotes | |
366 |
|
402 | |||
367 | def is_vcs_exception(self, context, request): |
|
|||
368 | """ |
|
|||
369 | View predicate that returns true if the context object is a VCS |
|
|||
370 | exception. |
|
|||
371 | """ |
|
|||
372 | return hasattr(context, '_vcs_kind') |
|
|||
373 |
|
||||
374 | def handle_vcs_exception(self, exception, request): |
|
403 | def handle_vcs_exception(self, exception, request): | |
375 | if exception._vcs_kind == 'repo_locked': |
|
404 | _vcs_kind = getattr(exception, '_vcs_kind', '') | |
|
405 | if _vcs_kind == 'repo_locked': | |||
376 | # Get custom repo-locked status code if present. |
|
406 | # Get custom repo-locked status code if present. | |
377 | status_code = request.headers.get('X-RC-Locked-Status-Code') |
|
407 | status_code = request.headers.get('X-RC-Locked-Status-Code') | |
378 | return HTTPRepoLocked( |
|
408 | return HTTPRepoLocked( | |
379 | title=exception.message, status_code=status_code) |
|
409 | title=exception.message, status_code=status_code) | |
380 |
|
410 | |||
381 | # Re-raise exception if we can not handle it. |
|
411 | # Re-raise exception if we can not handle it. | |
382 | raise exception |
|
|||
383 |
|
||||
384 | def general_error_handler(self, exception, request): |
|
|||
385 | log.exception( |
|
412 | log.exception( | |
386 | 'error occurred handling this request for path: %s', |
|
413 | 'error occurred handling this request for path: %s', request.path) | |
387 | request.path) |
|
|||
388 | raise exception |
|
414 | raise exception | |
389 |
|
415 | |||
390 |
|
416 | |||
@@ -404,5 +430,5 b' def main(global_config, **settings):' | |||||
404 | if MercurialFactory: |
|
430 | if MercurialFactory: | |
405 | hgpatches.patch_largefiles_capabilities() |
|
431 | hgpatches.patch_largefiles_capabilities() | |
406 | hgpatches.patch_subrepo_type_mapping() |
|
432 | hgpatches.patch_subrepo_type_mapping() | |
407 | app = HTTPApplication(settings=settings) |
|
433 | app = HTTPApplication(settings=settings, global_config=global_config) | |
408 | return app.wsgi_app() |
|
434 | return app.wsgi_app() |
@@ -15,8 +15,8 b'' | |||||
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
|
18 | import os | |||
18 | import logging |
|
19 | import logging | |
19 | import os |
|
|||
20 |
|
20 | |||
21 | import mercurial |
|
21 | import mercurial | |
22 | import mercurial.error |
|
22 | import mercurial.error | |
@@ -25,7 +25,7 b' import mercurial.hgweb.hgweb_mod' | |||||
25 | import mercurial.hgweb.protocol |
|
25 | import mercurial.hgweb.protocol | |
26 | import webob.exc |
|
26 | import webob.exc | |
27 |
|
27 | |||
28 | from vcsserver import pygrack, exceptions, settings |
|
28 | from vcsserver import pygrack, exceptions, settings, git_lfs | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | log = logging.getLogger(__name__) |
|
31 | log = logging.getLogger(__name__) | |
@@ -132,6 +132,9 b' def create_hg_wsgi_app(repo_path, repo_n' | |||||
132 |
|
132 | |||
133 |
|
133 | |||
134 | class GitHandler(object): |
|
134 | class GitHandler(object): | |
|
135 | """ | |||
|
136 | Handler for Git operations like push/pull etc | |||
|
137 | """ | |||
135 | def __init__(self, repo_location, repo_name, git_path, update_server_info, |
|
138 | def __init__(self, repo_location, repo_name, git_path, update_server_info, | |
136 | extras): |
|
139 | extras): | |
137 | if not os.path.isdir(repo_location): |
|
140 | if not os.path.isdir(repo_location): | |
@@ -172,3 +175,35 b' def create_git_wsgi_app(repo_path, repo_' | |||||
172 | repo_path, repo_name, git_path, update_server_info, config) |
|
175 | repo_path, repo_name, git_path, update_server_info, config) | |
173 |
|
176 | |||
174 | return app |
|
177 | return app | |
|
178 | ||||
|
179 | ||||
|
180 | class GitLFSHandler(object): | |||
|
181 | """ | |||
|
182 | Handler for Git LFS operations | |||
|
183 | """ | |||
|
184 | ||||
|
185 | def __init__(self, repo_location, repo_name, git_path, update_server_info, | |||
|
186 | extras): | |||
|
187 | if not os.path.isdir(repo_location): | |||
|
188 | raise OSError(repo_location) | |||
|
189 | self.content_path = repo_location | |||
|
190 | self.repo_name = repo_name | |||
|
191 | self.repo_location = repo_location | |||
|
192 | self.extras = extras | |||
|
193 | self.git_path = git_path | |||
|
194 | self.update_server_info = update_server_info | |||
|
195 | ||||
|
196 | def get_app(self, git_lfs_enabled, git_lfs_store_path): | |||
|
197 | app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path) | |||
|
198 | return app | |||
|
199 | ||||
|
200 | ||||
|
201 | def create_git_lfs_wsgi_app(repo_path, repo_name, config): | |||
|
202 | git_path = settings.GIT_EXECUTABLE | |||
|
203 | update_server_info = config.pop('git_update_server_info') | |||
|
204 | git_lfs_enabled = config.pop('git_lfs_enabled') | |||
|
205 | git_lfs_store_path = config.pop('git_lfs_store_path') | |||
|
206 | app = GitLFSHandler( | |||
|
207 | repo_path, repo_name, git_path, update_server_info, config) | |||
|
208 | ||||
|
209 | return app.get_app(git_lfs_enabled, git_lfs_store_path) |
@@ -33,7 +33,7 b' import svn.repos' | |||||
33 |
|
33 | |||
34 | from vcsserver import svn_diff |
|
34 | from vcsserver import svn_diff | |
35 | from vcsserver import exceptions |
|
35 | from vcsserver import exceptions | |
36 | from vcsserver.base import RepoFactory |
|
36 | from vcsserver.base import RepoFactory, raise_from_original | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
@@ -62,17 +62,6 b' def reraise_safe_exceptions(func):' | |||||
62 | return wrapper |
|
62 | return wrapper | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | def raise_from_original(new_type): |
|
|||
66 | """ |
|
|||
67 | Raise a new exception type with original args and traceback. |
|
|||
68 | """ |
|
|||
69 | _, original, traceback = sys.exc_info() |
|
|||
70 | try: |
|
|||
71 | raise new_type(*original.args), None, traceback |
|
|||
72 | finally: |
|
|||
73 | del traceback |
|
|||
74 |
|
||||
75 |
|
||||
76 | class SubversionFactory(RepoFactory): |
|
65 | class SubversionFactory(RepoFactory): | |
77 |
|
66 | |||
78 | def _create_repo(self, wire, create, compatible_version): |
|
67 | def _create_repo(self, wire, create, compatible_version): | |
@@ -388,6 +377,10 b' class SvnRemote(object):' | |||||
388 | "Path might not exist %s, %s" % (path1, path2)) |
|
377 | "Path might not exist %s, %s" % (path1, path2)) | |
389 | return "" |
|
378 | return "" | |
390 |
|
379 | |||
|
380 | @reraise_safe_exceptions | |||
|
381 | def is_large_file(self, wire, path): | |||
|
382 | return False | |||
|
383 | ||||
391 |
|
384 | |||
392 | class SvnDiffer(object): |
|
385 | class SvnDiffer(object): | |
393 | """ |
|
386 | """ |
@@ -16,8 +16,23 b'' | |||||
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 |
|
18 | |||
|
19 | def safe_int(val, default=None): | |||
|
20 | """ | |||
|
21 | Returns int() of val if val is not convertable to int use default | |||
|
22 | instead | |||
19 |
|
|
23 | ||
20 | # TODO: johbo: That's a copy from rhodecode |
|
24 | :param val: | |
|
25 | :param default: | |||
|
26 | """ | |||
|
27 | ||||
|
28 | try: | |||
|
29 | val = int(val) | |||
|
30 | except (ValueError, TypeError): | |||
|
31 | val = default | |||
|
32 | ||||
|
33 | return val | |||
|
34 | ||||
|
35 | ||||
21 | def safe_str(unicode_, to_encoding=['utf8']): |
|
36 | def safe_str(unicode_, to_encoding=['utf8']): | |
22 | """ |
|
37 | """ | |
23 | safe str function. Does few trick to turn unicode_ into string |
|
38 | safe str function. Does few trick to turn unicode_ into string |
General Comments 0
You need to be logged in to leave comments.
Login now