##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r184:102735b3 merge stable
parent child Browse files
Show More
@@ -0,0 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 from app import create_app
@@ -0,0 +1,276 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import re
19 import logging
20 from wsgiref.util import FileWrapper
21
22 import simplejson as json
23 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
28
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
32
33 log = logging.getLogger(__name__)
34
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
39
40 def write_response_error(http_exception, text=None):
41 content_type = 'application/json'
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
44 if text:
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
48 return _exception
49
50
51 class AuthHeaderRequired(object):
52 """
53 Decorator to check if request has proper auth-header
54 """
55
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
58
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
61 auth = request.authorization
62 if not auth:
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
65
66
67 # views
68
69 def lfs_objects(request):
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
74
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
88 auth = request.authorization
89
90 repo = request.matchdict.get('repo')
91
92 data = request.json
93 operation = data.get('operation')
94 if operation not in ('download', 'upload'):
95 log.debug('LFS: unsupported operation:%s', operation)
96 return write_response_error(
97 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
98
99 if 'objects' not in data:
100 log.debug('LFS: missing objects data')
101 return write_response_error(
102 HTTPBadRequest, 'missing objects data')
103
104 log.debug('LFS: handling operation of type: %s', operation)
105
106 objects = []
107 for o in data['objects']:
108 try:
109 oid = o['oid']
110 obj_size = o['size']
111 except KeyError:
112 log.exception('LFS, failed to extract data')
113 return write_response_error(
114 HTTPBadRequest, 'unsupported data in objects')
115
116 obj_data = {'oid': oid}
117
118 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
119 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
120 store = LFSOidStore(
121 oid, repo, store_location=request.registry.git_lfs_store_path)
122 handler = OidHandler(
123 store, repo, auth, oid, obj_size, obj_data,
124 obj_href, obj_verify_href)
125
126 # this verifies also OIDs
127 actions, errors = handler.exec_operation(operation)
128 if errors:
129 log.warning('LFS: got following errors: %s', errors)
130 obj_data['errors'] = errors
131
132 if actions:
133 obj_data['actions'] = actions
134
135 obj_data['size'] = obj_size
136 obj_data['authenticated'] = True
137 objects.append(obj_data)
138
139 result = {'objects': objects, 'transfer': 'basic'}
140 log.debug('LFS Response %s', safe_result(result))
141
142 return result
143
144
145 def lfs_objects_oid_upload(request):
146 repo = request.matchdict.get('repo')
147 oid = request.matchdict.get('oid')
148 store = LFSOidStore(
149 oid, repo, store_location=request.registry.git_lfs_store_path)
150 engine = store.get_engine(mode='wb')
151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152 with engine as f:
153 for chunk in FileWrapper(request.body_file_seekable, blksize=64 * 1024):
154 f.write(chunk)
155
156 return {'upload': 'ok'}
157
158
159 def lfs_objects_oid_download(request):
160 repo = request.matchdict.get('repo')
161 oid = request.matchdict.get('oid')
162
163 store = LFSOidStore(
164 oid, repo, store_location=request.registry.git_lfs_store_path)
165 if not store.has_oid():
166 log.debug('LFS: oid %s does not exists in store', oid)
167 return write_response_error(
168 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
169
170 # TODO(marcink): support range header ?
171 # Range: bytes=0-, `bytes=(\d+)\-.*`
172
173 f = open(store.oid_path, 'rb')
174 response = Response(
175 content_type='application/octet-stream', app_iter=FileIter(f))
176 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
177 return response
178
179
180 def lfs_objects_verify(request):
181 repo = request.matchdict.get('repo')
182
183 data = request.json
184 oid = data.get('oid')
185 size = safe_int(data.get('size'))
186
187 if not (oid and size):
188 return write_response_error(
189 HTTPBadRequest, 'missing oid and size in request data')
190
191 store = LFSOidStore(
192 oid, repo, store_location=request.registry.git_lfs_store_path)
193 if not store.has_oid():
194 log.debug('LFS: oid %s does not exists in store', oid)
195 return write_response_error(
196 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
197
198 store_size = store.size_oid()
199 if store_size != size:
200 msg = 'requested file size mismatch store size:%s requested:%s' % (
201 store_size, size)
202 return write_response_error(
203 HTTPUnprocessableEntity, msg)
204
205 return {'message': {'size': 'ok', 'in_store': 'ok'}}
206
207
208 def lfs_objects_lock(request):
209 return write_response_error(
210 HTTPNotImplemented, 'GIT LFS locking api not supported')
211
212
213 def not_found(request):
214 return write_response_error(
215 HTTPNotFound, 'request path not found')
216
217
218 def lfs_disabled(request):
219 return write_response_error(
220 HTTPNotImplemented, 'GIT LFS disabled for this repo')
221
222
223 def git_lfs_app(config):
224
225 # v1 API deprecation endpoint
226 config.add_route('lfs_objects',
227 '/{repo:.*?[^/]}/info/lfs/objects')
228 config.add_view(lfs_objects, route_name='lfs_objects',
229 request_method='POST', renderer='json')
230
231 # locking API
232 config.add_route('lfs_objects_lock',
233 '/{repo:.*?[^/]}/info/lfs/locks')
234 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
235 request_method=('POST', 'GET'), renderer='json')
236
237 config.add_route('lfs_objects_lock_verify',
238 '/{repo:.*?[^/]}/info/lfs/locks/verify')
239 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
240 request_method=('POST', 'GET'), renderer='json')
241
242 # batch API
243 config.add_route('lfs_objects_batch',
244 '/{repo:.*?[^/]}/info/lfs/objects/batch')
245 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
246 request_method='POST', renderer='json')
247
248 # oid upload/download API
249 config.add_route('lfs_objects_oid',
250 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
251 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
252 request_method='PUT', renderer='json')
253 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
254 request_method='GET', renderer='json')
255
256 # verification API
257 config.add_route('lfs_objects_verify',
258 '/{repo:.*?[^/]}/info/lfs/verify')
259 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
260 request_method='POST', renderer='json')
261
262 # not found handler for API
263 config.add_notfound_view(not_found, renderer='json')
264
265
266 def create_app(git_lfs_enabled, git_lfs_store_path):
267 config = Configurator()
268 if git_lfs_enabled:
269 config.include(git_lfs_app)
270 config.registry.git_lfs_store_path = git_lfs_store_path
271 else:
272 # not found handler for API, reporting disabled LFS support
273 config.add_notfound_view(lfs_disabled, renderer='json')
274
275 app = config.make_wsgi_app()
276 return app
@@ -0,0 +1,166 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import shutil
20 import logging
21 from collections import OrderedDict
22
23 log = logging.getLogger(__name__)
24
25
26 class OidHandler(object):
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
30 self.current_store = store
31 self.repo_name = repo_name
32 self.auth = auth
33 self.oid = oid
34 self.obj_size = obj_size
35 self.obj_data = obj_data
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
38
39 def get_store(self, mode=None):
40 return self.current_store
41
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
45
46 def download(self):
47
48 store = self.get_store()
49 response = None
50 has_errors = None
51
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 has_errors = OrderedDict(
56 error=OrderedDict(
57 code=404,
58 message=err_msg
59 )
60 )
61
62 download_action = OrderedDict(
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
66 if not has_errors:
67 response = OrderedDict(download=download_action)
68 return response, has_errors
69
70 def upload(self, skip_existing=True):
71 """
72 Write upload action for git-lfs server
73 """
74
75 store = self.get_store()
76 response = None
77 has_errors = None
78
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
82 if skip_existing:
83 log.debug('LFS: skipping further action as oid is existing')
84 return response, has_errors
85
86 upload_action = OrderedDict(
87 href=self.obj_href,
88 header=OrderedDict([("Authorization", self.get_auth())])
89 )
90 if not has_errors:
91 response = OrderedDict(upload=upload_action)
92 # if specified in handler, return the verification endpoint
93 if self.obj_verify_href:
94 verify_action = OrderedDict(
95 href=self.obj_verify_href,
96 header=OrderedDict([("Authorization", self.get_auth())])
97 )
98 response['verify'] = verify_action
99 return response, has_errors
100
101 def exec_operation(self, operation, *args, **kwargs):
102 handler = getattr(self, operation)
103 log.debug('LFS: handling request using %s handler', handler)
104 return handler(*args, **kwargs)
105
106
107 class LFSOidStore(object):
108
109 def __init__(self, oid, repo, store_location=None):
110 self.oid = oid
111 self.repo = repo
112 self.store_path = store_location or self.get_default_store()
113 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
114 self.oid_path = os.path.join(self.store_path, oid)
115 self.fd = None
116
117 def get_engine(self, mode):
118 """
119 engine = .get_engine(mode='wb')
120 with engine as f:
121 f.write('...')
122 """
123
124 class StoreEngine(object):
125 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
126 self.mode = mode
127 self.store_path = store_path
128 self.oid_path = oid_path
129 self.tmp_oid_path = tmp_oid_path
130
131 def __enter__(self):
132 if not os.path.isdir(self.store_path):
133 os.makedirs(self.store_path)
134
135 # TODO(marcink): maybe write metadata here with size/oid ?
136 fd = open(self.tmp_oid_path, self.mode)
137 self.fd = fd
138 return fd
139
140 def __exit__(self, exc_type, exc_value, traceback):
141 # close tmp file, and rename to final destination
142 self.fd.close()
143 shutil.move(self.tmp_oid_path, self.oid_path)
144
145 return StoreEngine(
146 mode, self.store_path, self.oid_path, self.tmp_oid_path)
147
148 def get_default_store(self):
149 """
150 Default store, consistent with defaults of Mercurial large files store
151 which is /home/username/.cache/largefiles
152 """
153 user_home = os.path.expanduser("~")
154 return os.path.join(user_home, '.cache', 'lfs-store')
155
156 def has_oid(self):
157 return os.path.exists(os.path.join(self.store_path, self.oid))
158
159 def size_oid(self):
160 size = -1
161
162 if self.has_oid():
163 oid = os.path.join(self.store_path, self.oid)
164 size = os.stat(oid).st_size
165
166 return size
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,237 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
21
22 from vcsserver.git_lfs.app import create_app
23
24
25 @pytest.fixture(scope='function')
26 def git_lfs_app(tmpdir):
27 custom_app = WebObTestApp(create_app(
28 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
29 custom_app._store = str(tmpdir)
30 return custom_app
31
32
33 @pytest.fixture()
34 def http_auth():
35 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
36
37
38 class TestLFSApplication(object):
39
40 def test_app_wrong_path(self, git_lfs_app):
41 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
42
43 def test_app_deprecated_endpoint(self, git_lfs_app):
44 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
45 assert response.status_code == 501
46 assert response.json == {u'message': u'LFS: v1 api not supported'}
47
48 def test_app_lock_verify_api_not_available(self, git_lfs_app):
49 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
50 assert response.status_code == 501
51 assert response.json == {
52 u'message': u'GIT LFS locking api not supported'}
53
54 def test_app_lock_api_not_available(self, git_lfs_app):
55 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
56 assert response.status_code == 501
57 assert response.json == {
58 u'message': u'GIT LFS locking api not supported'}
59
60 def test_app_batch_api_missing_auth(self, git_lfs_app,):
61 git_lfs_app.post_json(
62 '/repo/info/lfs/objects/batch', params={}, status=403)
63
64 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
65 response = git_lfs_app.post_json(
66 '/repo/info/lfs/objects/batch', params={}, status=400,
67 extra_environ=http_auth)
68 assert response.json == {
69 u'message': u'unsupported operation mode: `None`'}
70
71 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
72 response = git_lfs_app.post_json(
73 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
74 status=400, extra_environ=http_auth)
75 assert response.json == {
76 u'message': u'missing objects data'}
77
78 def test_app_batch_api_unsupported_data_in_objects(
79 self, git_lfs_app, http_auth):
80 params = {'operation': 'download',
81 'objects': [{}]}
82 response = git_lfs_app.post_json(
83 '/repo/info/lfs/objects/batch', params=params, status=400,
84 extra_environ=http_auth)
85 assert response.json == {
86 u'message': u'unsupported data in objects'}
87
88 def test_app_batch_api_download_missing_object(
89 self, git_lfs_app, http_auth):
90 params = {'operation': 'download',
91 'objects': [{'oid': '123', 'size': '1024'}]}
92 response = git_lfs_app.post_json(
93 '/repo/info/lfs/objects/batch', params=params,
94 extra_environ=http_auth)
95
96 expected_objects = [
97 {u'authenticated': True,
98 u'errors': {u'error': {
99 u'code': 404,
100 u'message': u'object: 123 does not exist in store'}},
101 u'oid': u'123',
102 u'size': u'1024'}
103 ]
104 assert response.json == {
105 'objects': expected_objects, 'transfer': 'basic'}
106
107 def test_app_batch_api_download(self, git_lfs_app, http_auth):
108 oid = '456'
109 oid_path = os.path.join(git_lfs_app._store, oid)
110 if not os.path.isdir(os.path.dirname(oid_path)):
111 os.makedirs(os.path.dirname(oid_path))
112 with open(oid_path, 'wb') as f:
113 f.write('OID_CONTENT')
114
115 params = {'operation': 'download',
116 'objects': [{'oid': oid, 'size': '1024'}]}
117 response = git_lfs_app.post_json(
118 '/repo/info/lfs/objects/batch', params=params,
119 extra_environ=http_auth)
120
121 expected_objects = [
122 {u'authenticated': True,
123 u'actions': {
124 u'download': {
125 u'header': {u'Authorization': u'Basic XXXXX'},
126 u'href': u'http://localhost/repo/info/lfs/objects/456'},
127 },
128 u'oid': u'456',
129 u'size': u'1024'}
130 ]
131 assert response.json == {
132 'objects': expected_objects, 'transfer': 'basic'}
133
134 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
135 params = {'operation': 'upload',
136 'objects': [{'oid': '123', 'size': '1024'}]}
137 response = git_lfs_app.post_json(
138 '/repo/info/lfs/objects/batch', params=params,
139 extra_environ=http_auth)
140 expected_objects = [
141 {u'authenticated': True,
142 u'actions': {
143 u'upload': {
144 u'header': {u'Authorization': u'Basic XXXXX'},
145 u'href': u'http://localhost/repo/info/lfs/objects/123'},
146 u'verify': {
147 u'header': {u'Authorization': u'Basic XXXXX'},
148 u'href': u'http://localhost/repo/info/lfs/verify'}
149 },
150 u'oid': u'123',
151 u'size': u'1024'}
152 ]
153 assert response.json == {
154 'objects': expected_objects, 'transfer': 'basic'}
155
156 def test_app_verify_api_missing_data(self, git_lfs_app):
157 params = {'oid': 'missing',}
158 response = git_lfs_app.post_json(
159 '/repo/info/lfs/verify', params=params,
160 status=400)
161
162 assert response.json == {
163 u'message': u'missing oid and size in request data'}
164
165 def test_app_verify_api_missing_obj(self, git_lfs_app):
166 params = {'oid': 'missing', 'size': '1024'}
167 response = git_lfs_app.post_json(
168 '/repo/info/lfs/verify', params=params,
169 status=404)
170
171 assert response.json == {
172 u'message': u'oid `missing` does not exists in store'}
173
174 def test_app_verify_api_size_mismatch(self, git_lfs_app):
175 oid = 'existing'
176 oid_path = os.path.join(git_lfs_app._store, oid)
177 if not os.path.isdir(os.path.dirname(oid_path)):
178 os.makedirs(os.path.dirname(oid_path))
179 with open(oid_path, 'wb') as f:
180 f.write('OID_CONTENT')
181
182 params = {'oid': oid, 'size': '1024'}
183 response = git_lfs_app.post_json(
184 '/repo/info/lfs/verify', params=params, status=422)
185
186 assert response.json == {
187 u'message': u'requested file size mismatch '
188 u'store size:11 requested:1024'}
189
190 def test_app_verify_api(self, git_lfs_app):
191 oid = 'existing'
192 oid_path = os.path.join(git_lfs_app._store, oid)
193 if not os.path.isdir(os.path.dirname(oid_path)):
194 os.makedirs(os.path.dirname(oid_path))
195 with open(oid_path, 'wb') as f:
196 f.write('OID_CONTENT')
197
198 params = {'oid': oid, 'size': 11}
199 response = git_lfs_app.post_json(
200 '/repo/info/lfs/verify', params=params)
201
202 assert response.json == {
203 u'message': {u'size': u'ok', u'in_store': u'ok'}}
204
205 def test_app_download_api_oid_not_existing(self, git_lfs_app):
206 oid = 'missing'
207
208 response = git_lfs_app.get(
209 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
210
211 assert response.json == {
212 u'message': u'requested file with oid `missing` not found in store'}
213
214 def test_app_download_api(self, git_lfs_app):
215 oid = 'existing'
216 oid_path = os.path.join(git_lfs_app._store, oid)
217 if not os.path.isdir(os.path.dirname(oid_path)):
218 os.makedirs(os.path.dirname(oid_path))
219 with open(oid_path, 'wb') as f:
220 f.write('OID_CONTENT')
221
222 response = git_lfs_app.get(
223 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
224 assert response
225
226 def test_app_upload(self, git_lfs_app):
227 oid = 'uploaded'
228
229 response = git_lfs_app.put(
230 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
231
232 assert response.json == {u'upload': u'ok'}
233
234 # verify that we actually wrote that OID
235 oid_path = os.path.join(git_lfs_app._store, oid)
236 assert os.path.isfile(oid_path)
237 assert 'CONTENT' == open(oid_path).read()
@@ -0,0 +1,123 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import pytest
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21
22
23 @pytest.fixture()
24 def lfs_store(tmpdir):
25 repo = 'test'
26 oid = '123456789'
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 return store
29
30
31 @pytest.fixture()
32 def oid_handler(lfs_store):
33 store = lfs_store
34 repo = store.repo
35 oid = store.oid
36
37 oid_handler = OidHandler(
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 oid=oid,
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_verify_href='http://localhost/verify')
42 return oid_handler
43
44
45 class TestOidHandler(object):
46
47 @pytest.mark.parametrize('exec_action', [
48 'download',
49 'upload',
50 ])
51 def test_exec_action(self, exec_action, oid_handler):
52 handler = oid_handler.exec_operation(exec_action)
53 assert handler
54
55 def test_exec_action_undefined(self, oid_handler):
56 with pytest.raises(AttributeError):
57 oid_handler.exec_operation('wrong')
58
59 def test_download_oid_not_existing(self, oid_handler):
60 response, has_errors = oid_handler.exec_operation('download')
61
62 assert response is None
63 assert has_errors['error'] == {
64 'code': 404,
65 'message': 'object: 123456789 does not exist in store'}
66
67 def test_download_oid(self, oid_handler):
68 store = oid_handler.get_store()
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 os.makedirs(os.path.dirname(store.oid_path))
71
72 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
74
75 response, has_errors = oid_handler.exec_operation('download')
76
77 assert has_errors is None
78 assert response['download'] == {
79 'header': {'Authorization': 'basic xxxx'},
80 'href': 'http://localhost/handle_oid'
81 }
82
83 def test_upload_oid_that_exists(self, oid_handler):
84 store = oid_handler.get_store()
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 os.makedirs(os.path.dirname(store.oid_path))
87
88 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
90
91 response, has_errors = oid_handler.exec_operation('upload')
92 assert has_errors is None
93 assert response is None
94
95 def test_upload_oid(self, oid_handler):
96 response, has_errors = oid_handler.exec_operation('upload')
97 assert has_errors is None
98 assert response['upload'] == {
99 'header': {'Authorization': 'basic xxxx'},
100 'href': 'http://localhost/handle_oid'
101 }
102
103
104 class TestLFSStore(object):
105 def test_write_oid(self, lfs_store):
106 oid_location = lfs_store.oid_path
107
108 assert not os.path.isfile(oid_location)
109
110 engine = lfs_store.get_engine(mode='wb')
111 with engine as f:
112 f.write('CONTENT')
113
114 assert os.path.isfile(oid_location)
115
116 def test_detect_has_oid(self, lfs_store):
117
118 assert lfs_store.has_oid() is False
119 engine = lfs_store.get_engine(mode='wb')
120 with engine as f:
121 f.write('CONTENT')
122
123 assert lfs_store.has_oid() is True No newline at end of file
@@ -0,0 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
18 from functools import wraps
19
20
21 def get_cython_compat_decorator(wrapper, func):
22 """
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
25
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
28 """
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
34
35
36 def safe_result(result):
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
39
40 try:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
48 return result
49
50 return clean_copy
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.6.1
2 current_version = 4.7.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.6.1
12 state = in_progress
13 version = 4.7.0
16 14
@@ -1,111 +1,109 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 6
7 7 [server:main]
8 8 ## COMMON ##
9 9 host = 127.0.0.1
10 10 port = 9900
11 11
12 12
13 13 ##########################
14 14 ## GUNICORN WSGI SERVER ##
15 15 ##########################
16 16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 17 use = egg:gunicorn#main
18 ## Sets the number of process workers. You must set `instance_id = *`
19 ## when this option is set to more than one worker, recommended
18 ## Sets the number of process workers. Recommended
20 19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
21 ## The `instance_id = *` must be set in the [app:main] section below
22 20 workers = 2
23 21 ## process name
24 22 proc_name = rhodecode_vcsserver
25 23 ## type of worker class, one of sync, gevent
26 24 ## recommended for bigger setup is using of of other than sync one
27 25 worker_class = sync
28 26 ## The maximum number of simultaneous clients. Valid only for Gevent
29 27 #worker_connections = 10
30 28 ## max number of requests that worker will handle before being gracefully
31 29 ## restarted, could prevent memory leaks
32 30 max_requests = 1000
33 31 max_requests_jitter = 30
34 32 ## amount of time a worker can spend with handling a request before it
35 33 ## gets killed and restarted. Set to 6hrs
36 34 timeout = 21600
37 35
38 36
39 37 [app:main]
40 38 use = egg:rhodecode-vcsserver
41 39
42 40 pyramid.default_locale_name = en
43 41 pyramid.includes =
44 42
45 43 ## default locale used by VCS systems
46 44 locale = en_US.UTF-8
47 45
48 46 # cache regions, please don't change
49 47 beaker.cache.regions = repo_object
50 48 beaker.cache.repo_object.type = memorylru
51 49 beaker.cache.repo_object.max_items = 100
52 50 # cache auto-expires after N seconds
53 51 beaker.cache.repo_object.expire = 300
54 52 beaker.cache.repo_object.enabled = true
55 53
56 54
57 55 ################################
58 56 ### LOGGING CONFIGURATION ####
59 57 ################################
60 58 [loggers]
61 59 keys = root, vcsserver, pyro4, beaker
62 60
63 61 [handlers]
64 62 keys = console
65 63
66 64 [formatters]
67 65 keys = generic
68 66
69 67 #############
70 68 ## LOGGERS ##
71 69 #############
72 70 [logger_root]
73 71 level = NOTSET
74 72 handlers = console
75 73
76 74 [logger_vcsserver]
77 75 level = DEBUG
78 76 handlers =
79 77 qualname = vcsserver
80 78 propagate = 1
81 79
82 80 [logger_beaker]
83 81 level = DEBUG
84 82 handlers =
85 83 qualname = beaker
86 84 propagate = 1
87 85
88 86 [logger_pyro4]
89 87 level = DEBUG
90 88 handlers =
91 89 qualname = Pyro4
92 90 propagate = 1
93 91
94 92
95 93 ##############
96 94 ## HANDLERS ##
97 95 ##############
98 96
99 97 [handler_console]
100 98 class = StreamHandler
101 99 args = (sys.stderr,)
102 100 level = DEBUG
103 101 formatter = generic
104 102
105 103 ################
106 104 ## FORMATTERS ##
107 105 ################
108 106
109 107 [formatter_generic]
110 108 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
111 109 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,153 +1,154 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 { pkgs ? (import <nixpkgs> {})
8 8 , pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 }:
12 12
13 13 let pkgs_ = pkgs; in
14 14
15 15 let
16 16 pkgs = pkgs_.overridePackages (self: super: {
17 17 # bump GIT version
18 18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 19 name = "git-2.9.3";
20 20 src = pkgs.fetchurl {
21 21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
22 22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
23 23 };
24 24
25 25 });
26 26
27 27 # Override subversion derivation to
28 28 # - activate python bindings
29 29 subversion = let
30 30 subversionWithPython = super.subversion.override {
31 31 httpSupport = true;
32 32 pythonBindings = true;
33 33 python = self.python27Packages.python;
34 34 };
35 35
36 36 in
37 37
38 38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
39 39 patches = (oldAttrs.patches or []) ++
40 40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
41 41 # johbo: "import svn.client" fails on darwin currently.
42 42 ./pkgs/subversion-1.9.4-darwin.patch
43 43 ];
44 44 });
45 45
46 46 });
47 47
48 48 inherit (pkgs.lib) fix extends;
49 49 basePythonPackages = with builtins; if isAttrs pythonPackages
50 50 then pythonPackages
51 51 else getAttr pythonPackages pkgs;
52 52
53 53 elem = builtins.elem;
54 54 basename = path: with pkgs.lib; last (splitString "/" path);
55 55 startsWith = prefix: full: let
56 56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
57 57 in actualPrefix == prefix;
58 58
59 59 src-filter = path: type: with pkgs.lib;
60 60 let
61 61 ext = last (splitString "." path);
62 62 in
63 63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
64 64 "node_modules" "build" "data" "tmp"] &&
65 65 !elem ext ["egg-info" "pyc"] &&
66 66 !startsWith "result" path;
67 67
68 68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
69 69
70 70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
71 71 inherit self;
72 72 }) // (scopedImport {
73 73 self = self;
74 74 super = basePythonPackages;
75 75 inherit pkgs;
76 76 inherit (pkgs) fetchurl fetchgit;
77 77 } ./pkgs/python-packages.nix);
78 78
79 79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
80 80 inherit basePythonPackages pkgs;
81 81 };
82 82
83 83 version = builtins.readFile ./vcsserver/VERSION;
84 84
85 85 pythonLocalOverrides = self: super: {
86 86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
87 87 inherit doCheck version;
88 88
89 89 name = "rhodecode-vcsserver-${version}";
90 90 releaseName = "RhodeCodeVCSServer-${version}";
91 91 src = rhodecode-vcsserver-src;
92 dontStrip = true; # prevent strip, we don't need it.
92 93
93 94 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
94 95 pkgs.git
95 96 pkgs.subversion
96 97 ]);
97 98
98 99 # TODO: johbo: Make a nicer way to expose the parts. Maybe
99 100 # pkgs/default.nix?
100 101 passthru = {
101 102 pythonPackages = self;
102 103 };
103 104
104 105 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 106 preCheck = ''
106 107 export PATH="$out/bin:$PATH"
107 108 '';
108 109
109 110 # put custom attrs here
110 111 checkPhase = ''
111 112 runHook preCheck
112 113 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
113 114 runHook postCheck
114 115 '';
115 116
116 117 postInstall = ''
117 118 echo "Writing meta information for rccontrol to nix-support/rccontrol"
118 119 mkdir -p $out/nix-support/rccontrol
119 120 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
120 121 echo "DONE: Meta information for rccontrol written"
121 122
122 123 ln -s ${self.pyramid}/bin/* $out/bin/
123 124 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
124 125
125 126 # Symlink version control utilities
126 127 #
127 128 # We ensure that always the correct version is available as a symlink.
128 129 # So that users calling them via the profile path will always use the
129 130 # correct version.
130 131 ln -s ${pkgs.git}/bin/git $out/bin
131 132 ln -s ${self.mercurial}/bin/hg $out/bin
132 133 ln -s ${pkgs.subversion}/bin/svn* $out/bin
133 134
134 135 for file in $out/bin/*; do
135 136 wrapProgram $file \
136 137 --set PATH $PATH \
137 138 --set PYTHONPATH $PYTHONPATH \
138 139 --set PYTHONHASHSEED random
139 140 done
140 141 '';
141 142
142 143 });
143 144 };
144 145
145 146 # Apply all overrides and fix the final package set
146 147 myPythonPackages =
147 148 (fix
148 149 (extends pythonExternalOverrides
149 150 (extends pythonLocalOverrides
150 151 (extends pythonOverrides
151 152 pythonGeneratedPackages))));
152 153
153 154 in myPythonPackages.rhodecode-vcsserver
@@ -1,812 +1,812 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Beaker = super.buildPythonPackage {
6 6 name = "Beaker-1.7.0";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [];
10 10 src = fetchurl {
11 11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
12 12 md5 = "386be3f7fe427358881eee4622b428b3";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Jinja2 = super.buildPythonPackage {
19 19 name = "Jinja2-2.8";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [MarkupSafe];
23 23 src = fetchurl {
24 24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 Mako = super.buildPythonPackage {
32 32 name = "Mako-1.0.6";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [MarkupSafe];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 38 md5 = "a28e22a339080316b2acc352b9ee631c";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 MarkupSafe = super.buildPythonPackage {
45 45 name = "MarkupSafe-0.23";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 52 };
53 53 meta = {
54 54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 55 };
56 56 };
57 57 PasteDeploy = super.buildPythonPackage {
58 58 name = "PasteDeploy-1.5.2";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 64 md5 = "352b7205c78c8de4987578d19431af3b";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.mit ];
68 68 };
69 69 };
70 70 Pyro4 = super.buildPythonPackage {
71 71 name = "Pyro4-4.41";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [serpent];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
77 77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.mit ];
81 81 };
82 82 };
83 83 WebOb = super.buildPythonPackage {
84 84 name = "WebOb-1.3.1";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [];
88 88 src = fetchurl {
89 89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
90 90 md5 = "20918251c5726956ba8fef22d1556177";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 WebTest = super.buildPythonPackage {
97 97 name = "WebTest-1.4.3";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [WebOb];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
103 103 md5 = "631ce728bed92c681a4020a36adbc353";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.mit ];
107 107 };
108 108 };
109 109 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 110 name = "backports.shutil-get-terminal-size-1.0.0";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 116 md5 = "03267762480bd86b50580dc19dff3c66";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.mit ];
120 120 };
121 121 };
122 122 configobj = super.buildPythonPackage {
123 123 name = "configobj-5.0.6";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [six];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 133 };
134 134 };
135 135 cov-core = super.buildPythonPackage {
136 136 name = "cov-core-1.15.0";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [coverage];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 coverage = super.buildPythonPackage {
149 149 name = "coverage-3.7.1";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 159 };
160 160 };
161 161 decorator = super.buildPythonPackage {
162 name = "decorator-4.0.10";
162 name = "decorator-4.0.11";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [];
166 166 src = fetchurl {
167 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
168 md5 = "434b57fdc3230c500716c5aff8896100";
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 172 };
173 173 };
174 174 dulwich = super.buildPythonPackage {
175 175 name = "dulwich-0.13.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 181 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.gpl2Plus ];
185 185 };
186 186 };
187 187 enum34 = super.buildPythonPackage {
188 188 name = "enum34-1.1.6";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [];
192 192 src = fetchurl {
193 193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 194 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 gevent = super.buildPythonPackage {
201 201 name = "gevent-1.1.2";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [greenlet];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
207 207 md5 = "bb32a2f852a4997138014d5007215c6e";
208 208 };
209 209 meta = {
210 210 license = [ pkgs.lib.licenses.mit ];
211 211 };
212 212 };
213 213 gprof2dot = super.buildPythonPackage {
214 214 name = "gprof2dot-2016.10.13";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
220 220 md5 = "0125401f15fd2afe1df686a76c64a4fd";
221 221 };
222 222 meta = {
223 223 license = [ { fullName = "LGPL"; } ];
224 224 };
225 225 };
226 226 greenlet = super.buildPythonPackage {
227 227 name = "greenlet-0.4.10";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [];
231 231 src = fetchurl {
232 232 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
233 233 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
234 234 };
235 235 meta = {
236 236 license = [ pkgs.lib.licenses.mit ];
237 237 };
238 238 };
239 239 gunicorn = super.buildPythonPackage {
240 240 name = "gunicorn-19.6.0";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 245 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
246 246 md5 = "338e5e8a83ea0f0625f768dba4597530";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 hgsubversion = super.buildPythonPackage {
253 253 name = "hgsubversion-1.8.6";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [mercurial subvertpy];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
259 259 md5 = "9310cb266031cf8d0779885782a84a5b";
260 260 };
261 261 meta = {
262 262 license = [ pkgs.lib.licenses.gpl1 ];
263 263 };
264 264 };
265 265 infrae.cache = super.buildPythonPackage {
266 266 name = "infrae.cache-1.0.1";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [Beaker repoze.lru];
270 270 src = fetchurl {
271 271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
272 272 md5 = "b09076a766747e6ed2a755cc62088e32";
273 273 };
274 274 meta = {
275 275 license = [ pkgs.lib.licenses.zpt21 ];
276 276 };
277 277 };
278 278 ipdb = super.buildPythonPackage {
279 279 name = "ipdb-0.10.1";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [ipython setuptools];
283 283 src = fetchurl {
284 284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
285 285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 289 };
290 290 };
291 291 ipython = super.buildPythonPackage {
292 292 name = "ipython-5.1.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
298 298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.bsdOriginal ];
302 302 };
303 303 };
304 304 ipython-genutils = super.buildPythonPackage {
305 305 name = "ipython-genutils-0.1.0";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [];
309 309 src = fetchurl {
310 310 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
311 311 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.bsdOriginal ];
315 315 };
316 316 };
317 317 mercurial = super.buildPythonPackage {
318 name = "mercurial-4.0.2";
318 name = "mercurial-4.1.2";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 321 propagatedBuildInputs = with self; [];
322 322 src = fetchurl {
323 url = "https://pypi.python.org/packages/85/1b/0296aacd697228974a473d2508f013532f987ed6b1bacfe5abd6d5be6332/mercurial-4.0.2.tar.gz";
324 md5 = "fa72a08e2723e4fa2a21c4e66437f3fa";
323 url = "https://pypi.python.org/packages/88/c1/f0501fd67f5e69346da41ee0bd7b2619ce4bbc9854bb645074c418b9941f/mercurial-4.1.2.tar.gz";
324 md5 = "934c99808bdc8385e074b902d59b0d93";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
328 328 };
329 329 };
330 330 mock = super.buildPythonPackage {
331 331 name = "mock-1.0.1";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
337 337 md5 = "869f08d003c289a97c1a6610faf5e913";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 341 };
342 342 };
343 343 msgpack-python = super.buildPythonPackage {
344 344 name = "msgpack-python-0.4.8";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
350 350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.asl20 ];
354 354 };
355 355 };
356 356 pathlib2 = super.buildPythonPackage {
357 357 name = "pathlib2-2.1.0";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [six];
361 361 src = fetchurl {
362 362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
363 363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
364 364 };
365 365 meta = {
366 366 license = [ pkgs.lib.licenses.mit ];
367 367 };
368 368 };
369 369 pexpect = super.buildPythonPackage {
370 370 name = "pexpect-4.2.1";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [ptyprocess];
374 374 src = fetchurl {
375 375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
376 376 md5 = "3694410001a99dff83f0b500a1ca1c95";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
380 380 };
381 381 };
382 382 pickleshare = super.buildPythonPackage {
383 383 name = "pickleshare-0.7.4";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [pathlib2];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
389 389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.mit ];
393 393 };
394 394 };
395 395 prompt-toolkit = super.buildPythonPackage {
396 396 name = "prompt-toolkit-1.0.9";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [six wcwidth];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
402 402 md5 = "a39f91a54308fb7446b1a421c11f227c";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.bsdOriginal ];
406 406 };
407 407 };
408 408 ptyprocess = super.buildPythonPackage {
409 409 name = "ptyprocess-0.5.1";
410 410 buildInputs = with self; [];
411 411 doCheck = false;
412 412 propagatedBuildInputs = with self; [];
413 413 src = fetchurl {
414 414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
415 415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
416 416 };
417 417 meta = {
418 418 license = [ ];
419 419 };
420 420 };
421 421 py = super.buildPythonPackage {
422 422 name = "py-1.4.31";
423 423 buildInputs = with self; [];
424 424 doCheck = false;
425 425 propagatedBuildInputs = with self; [];
426 426 src = fetchurl {
427 427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
428 428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
429 429 };
430 430 meta = {
431 431 license = [ pkgs.lib.licenses.mit ];
432 432 };
433 433 };
434 434 pygments = super.buildPythonPackage {
435 435 name = "pygments-2.2.0";
436 436 buildInputs = with self; [];
437 437 doCheck = false;
438 438 propagatedBuildInputs = with self; [];
439 439 src = fetchurl {
440 440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
441 441 md5 = "13037baca42f16917cbd5ad2fab50844";
442 442 };
443 443 meta = {
444 444 license = [ pkgs.lib.licenses.bsdOriginal ];
445 445 };
446 446 };
447 447 pyramid = super.buildPythonPackage {
448 name = "pyramid-1.6.1";
448 name = "pyramid-1.7.4";
449 449 buildInputs = with self; [];
450 450 doCheck = false;
451 451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
452 452 src = fetchurl {
453 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
454 md5 = "b18688ff3cc33efdbb098a35b45dd122";
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
455 455 };
456 456 meta = {
457 457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 458 };
459 459 };
460 460 pyramid-jinja2 = super.buildPythonPackage {
461 461 name = "pyramid-jinja2-2.5";
462 462 buildInputs = with self; [];
463 463 doCheck = false;
464 464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
465 465 src = fetchurl {
466 466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
467 467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
468 468 };
469 469 meta = {
470 470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
471 471 };
472 472 };
473 473 pyramid-mako = super.buildPythonPackage {
474 474 name = "pyramid-mako-1.0.2";
475 475 buildInputs = with self; [];
476 476 doCheck = false;
477 477 propagatedBuildInputs = with self; [pyramid Mako];
478 478 src = fetchurl {
479 479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
480 480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
481 481 };
482 482 meta = {
483 483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
484 484 };
485 485 };
486 486 pytest = super.buildPythonPackage {
487 487 name = "pytest-3.0.5";
488 488 buildInputs = with self; [];
489 489 doCheck = false;
490 490 propagatedBuildInputs = with self; [py];
491 491 src = fetchurl {
492 492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
493 493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
494 494 };
495 495 meta = {
496 496 license = [ pkgs.lib.licenses.mit ];
497 497 };
498 498 };
499 499 pytest-catchlog = super.buildPythonPackage {
500 500 name = "pytest-catchlog-1.2.2";
501 501 buildInputs = with self; [];
502 502 doCheck = false;
503 503 propagatedBuildInputs = with self; [py pytest];
504 504 src = fetchurl {
505 505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 506 md5 = "09d890c54c7456c818102b7ff8c182c8";
507 507 };
508 508 meta = {
509 509 license = [ pkgs.lib.licenses.mit ];
510 510 };
511 511 };
512 512 pytest-cov = super.buildPythonPackage {
513 513 name = "pytest-cov-2.4.0";
514 514 buildInputs = with self; [];
515 515 doCheck = false;
516 516 propagatedBuildInputs = with self; [pytest coverage];
517 517 src = fetchurl {
518 518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
519 519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
520 520 };
521 521 meta = {
522 522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 523 };
524 524 };
525 525 pytest-profiling = super.buildPythonPackage {
526 526 name = "pytest-profiling-1.2.2";
527 527 buildInputs = with self; [];
528 528 doCheck = false;
529 529 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 530 src = fetchurl {
531 531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
532 532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
533 533 };
534 534 meta = {
535 535 license = [ pkgs.lib.licenses.mit ];
536 536 };
537 537 };
538 538 pytest-runner = super.buildPythonPackage {
539 539 name = "pytest-runner-2.9";
540 540 buildInputs = with self; [];
541 541 doCheck = false;
542 542 propagatedBuildInputs = with self; [];
543 543 src = fetchurl {
544 544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
545 545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
546 546 };
547 547 meta = {
548 548 license = [ pkgs.lib.licenses.mit ];
549 549 };
550 550 };
551 551 pytest-sugar = super.buildPythonPackage {
552 552 name = "pytest-sugar-0.7.1";
553 553 buildInputs = with self; [];
554 554 doCheck = false;
555 555 propagatedBuildInputs = with self; [pytest termcolor];
556 556 src = fetchurl {
557 557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
558 558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
559 559 };
560 560 meta = {
561 561 license = [ pkgs.lib.licenses.bsdOriginal ];
562 562 };
563 563 };
564 564 pytest-timeout = super.buildPythonPackage {
565 565 name = "pytest-timeout-1.2.0";
566 566 buildInputs = with self; [];
567 567 doCheck = false;
568 568 propagatedBuildInputs = with self; [pytest];
569 569 src = fetchurl {
570 570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 571 md5 = "83607d91aa163562c7ee835da57d061d";
572 572 };
573 573 meta = {
574 574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 575 };
576 576 };
577 577 repoze.lru = super.buildPythonPackage {
578 578 name = "repoze.lru-0.6";
579 579 buildInputs = with self; [];
580 580 doCheck = false;
581 581 propagatedBuildInputs = with self; [];
582 582 src = fetchurl {
583 583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
584 584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
585 585 };
586 586 meta = {
587 587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
588 588 };
589 589 };
590 590 rhodecode-vcsserver = super.buildPythonPackage {
591 name = "rhodecode-vcsserver-4.6.1";
591 name = "rhodecode-vcsserver-4.7.0";
592 592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
593 593 doCheck = true;
594 594 propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
595 595 src = ./.;
596 596 meta = {
597 597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
598 598 };
599 599 };
600 600 serpent = super.buildPythonPackage {
601 601 name = "serpent-1.15";
602 602 buildInputs = with self; [];
603 603 doCheck = false;
604 604 propagatedBuildInputs = with self; [];
605 605 src = fetchurl {
606 606 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
607 607 md5 = "e27b1aad5c218e16442f52abb7c7053a";
608 608 };
609 609 meta = {
610 610 license = [ pkgs.lib.licenses.mit ];
611 611 };
612 612 };
613 613 setuptools = super.buildPythonPackage {
614 614 name = "setuptools-30.1.0";
615 615 buildInputs = with self; [];
616 616 doCheck = false;
617 617 propagatedBuildInputs = with self; [];
618 618 src = fetchurl {
619 619 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
620 620 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
621 621 };
622 622 meta = {
623 623 license = [ pkgs.lib.licenses.mit ];
624 624 };
625 625 };
626 626 simplegeneric = super.buildPythonPackage {
627 627 name = "simplegeneric-0.8.1";
628 628 buildInputs = with self; [];
629 629 doCheck = false;
630 630 propagatedBuildInputs = with self; [];
631 631 src = fetchurl {
632 632 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
633 633 md5 = "f9c1fab00fd981be588fc32759f474e3";
634 634 };
635 635 meta = {
636 636 license = [ pkgs.lib.licenses.zpt21 ];
637 637 };
638 638 };
639 639 simplejson = super.buildPythonPackage {
640 640 name = "simplejson-3.7.2";
641 641 buildInputs = with self; [];
642 642 doCheck = false;
643 643 propagatedBuildInputs = with self; [];
644 644 src = fetchurl {
645 645 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
646 646 md5 = "a5fc7d05d4cb38492285553def5d4b46";
647 647 };
648 648 meta = {
649 649 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
650 650 };
651 651 };
652 652 six = super.buildPythonPackage {
653 653 name = "six-1.9.0";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
659 659 md5 = "476881ef4012262dfc8adc645ee786c4";
660 660 };
661 661 meta = {
662 662 license = [ pkgs.lib.licenses.mit ];
663 663 };
664 664 };
665 665 subprocess32 = super.buildPythonPackage {
666 666 name = "subprocess32-3.2.6";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 671 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
672 672 md5 = "754c5ab9f533e764f931136974b618f1";
673 673 };
674 674 meta = {
675 675 license = [ pkgs.lib.licenses.psfl ];
676 676 };
677 677 };
678 678 subvertpy = super.buildPythonPackage {
679 679 name = "subvertpy-0.9.3";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
685 685 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.lgpl21Plus ];
689 689 };
690 690 };
691 691 termcolor = super.buildPythonPackage {
692 692 name = "termcolor-1.1.0";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
698 698 md5 = "043e89644f8909d462fbbfa511c768df";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.mit ];
702 702 };
703 703 };
704 704 traitlets = super.buildPythonPackage {
705 705 name = "traitlets-4.3.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
711 711 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
712 712 };
713 713 meta = {
714 714 license = [ pkgs.lib.licenses.bsdOriginal ];
715 715 };
716 716 };
717 717 translationstring = super.buildPythonPackage {
718 718 name = "translationstring-1.3";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
724 724 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
725 725 };
726 726 meta = {
727 727 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
728 728 };
729 729 };
730 730 venusian = super.buildPythonPackage {
731 731 name = "venusian-1.0";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
737 737 md5 = "dccf2eafb7113759d60c86faf5538756";
738 738 };
739 739 meta = {
740 740 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
741 741 };
742 742 };
743 743 waitress = super.buildPythonPackage {
744 744 name = "waitress-1.0.1";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
750 750 md5 = "dda92358a7569669086155923a46e57c";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.zpt21 ];
754 754 };
755 755 };
756 756 wcwidth = super.buildPythonPackage {
757 757 name = "wcwidth-0.1.7";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
763 763 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.mit ];
767 767 };
768 768 };
769 769 wheel = super.buildPythonPackage {
770 770 name = "wheel-0.29.0";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
776 776 md5 = "555a67e4507cedee23a0deb9651e452f";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.mit ];
780 780 };
781 781 };
782 782 zope.deprecation = super.buildPythonPackage {
783 783 name = "zope.deprecation-4.1.2";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [setuptools];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
789 789 md5 = "e9a663ded58f4f9f7881beb56cae2782";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.zpt21 ];
793 793 };
794 794 };
795 795 zope.interface = super.buildPythonPackage {
796 796 name = "zope.interface-4.1.3";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [setuptools];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
802 802 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
803 803 };
804 804 meta = {
805 805 license = [ pkgs.lib.licenses.zpt21 ];
806 806 };
807 807 };
808 808
809 809 ### Test requirements
810 810
811 811
812 812 }
@@ -1,43 +1,43 b''
1 # core
1 ## core
2 2 setuptools==30.1.0
3 3
4 4 Beaker==1.7.0
5 5 configobj==5.0.6
6 decorator==4.0.11
6 7 dulwich==0.13.0
7 8 hgsubversion==1.8.6
8 9 infrae.cache==1.0.1
9 mercurial==4.0.2
10 mercurial==4.1.2
10 11 msgpack-python==0.4.8
11 pyramid==1.6.1
12 12 pyramid-jinja2==2.5
13 pyramid==1.7.4
13 14 pyramid-mako==1.0.2
14 15 repoze.lru==0.6
15 16 simplejson==3.7.2
16 17 subprocess32==3.2.6
17 18
18 19 # Custom subvertpy that is not available on pypi.
19 20 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
20 21
21 22 six==1.9.0
22 23 translationstring==1.3
23 24 WebOb==1.3.1
24 25 wheel==0.29.0
25 26 zope.deprecation==4.1.2
26 27 zope.interface==4.1.3
27 28
28 29 ## debug
29 30 ipdb==0.10.1
30 31 ipython==5.1.0
31
32 32 # http servers
33 33 gevent==1.1.2
34 34 greenlet==0.4.10
35 35 gunicorn==19.6.0
36 36 waitress==1.0.1
37 37
38 38 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
39 39 Pyro4==4.41
40 40 serpent==1.15
41 41
42 42 ## test related requirements
43 43 -r requirements_test.txt
@@ -1,1 +1,1 b''
1 4.6.1 No newline at end of file
1 4.7.0 No newline at end of file
@@ -1,82 +1,98 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import sys
19 import traceback
18 20 import logging
19 21 import urlparse
20 22
21 23 log = logging.getLogger(__name__)
22 24
23 25
24 26 class RepoFactory(object):
25 27 """
26 28 Utility to create instances of repository
27 29
28 30 It provides internal caching of the `repo` object based on
29 31 the :term:`call context`.
30 32 """
31 33
32 34 def __init__(self, repo_cache):
33 35 self._cache = repo_cache
34 36
35 37 def _create_config(self, path, config):
36 38 config = {}
37 39 return config
38 40
39 41 def _create_repo(self, wire, create):
40 42 raise NotImplementedError()
41 43
42 44 def repo(self, wire, create=False):
43 45 """
44 46 Get a repository instance for the given path.
45 47
46 48 Uses internally the low level beaker API since the decorators introduce
47 49 significant overhead.
48 50 """
49 51 def create_new_repo():
50 52 return self._create_repo(wire, create)
51 53
52 54 return self._repo(wire, create_new_repo)
53 55
54 56 def _repo(self, wire, createfunc):
55 57 context = wire.get('context', None)
56 58 cache = wire.get('cache', True)
57 59
58 60 if context and cache:
59 61 cache_key = (context, wire['path'])
60 62 log.debug(
61 63 'FETCH %s@%s repo object from cache. Context: %s',
62 64 self.__class__.__name__, wire['path'], context)
63 65 return self._cache.get(key=cache_key, createfunc=createfunc)
64 66 else:
65 67 log.debug(
66 68 'INIT %s@%s repo object based on wire %s. Context: %s',
67 69 self.__class__.__name__, wire['path'], wire, context)
68 70 return createfunc()
69 71
70 72
71 73 def obfuscate_qs(query_string):
72 74 if query_string is None:
73 75 return None
74 76
75 77 parsed = []
76 78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
77 79 if k in ['auth_token', 'api_key']:
78 80 v = "*****"
79 81 parsed.append((k, v))
80 82
81 83 return '&'.join('{}{}'.format(
82 84 k, '={}'.format(v) if v else '') for k, v in parsed)
85
86
87 def raise_from_original(new_type):
88 """
89 Raise a new exception type with original args and traceback.
90 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
92
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 try:
96 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
98 del exc_traceback
@@ -1,581 +1,644 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 39 from vcsserver.hgcompat import (
40 40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41 from vcsserver.git_lfs.lib import LFSOidStore
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 except Exception as e:
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # (KeyError on empty repos), we cannot track this and catch all
64 # exceptions, it's an exceptions from other handlers
65 #if not hasattr(e, '_vcs_kind'):
66 #log.exception("Unhandled exception in git remote call")
67 #raise_from_original(exceptions.UnhandledException)
68 raise
61 69 return wrapper
62 70
63 71
64 72 class Repo(DulwichRepo):
65 73 """
66 74 A wrapper for dulwich Repo class.
67 75
68 76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 77 "Too many open files" error. We need to close all opened file descriptors
70 78 once the repo object is destroyed.
71 79
72 80 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 81 to 0.12.0 +
74 82 """
75 83 def __del__(self):
76 84 if hasattr(self, 'object_store'):
77 85 self.close()
78 86
79 87
80 88 class GitFactory(RepoFactory):
81 89
82 90 def _create_repo(self, wire, create):
83 91 repo_path = str_to_dulwich(wire['path'])
84 92 return Repo(repo_path)
85 93
86 94
87 95 class GitRemote(object):
88 96
89 97 def __init__(self, factory):
90 98 self._factory = factory
91 99
92 100 self._bulk_methods = {
93 101 "author": self.commit_attribute,
94 102 "date": self.get_object_attrs,
95 103 "message": self.commit_attribute,
96 104 "parents": self.commit_attribute,
97 105 "_commit": self.revision,
98 106 }
99 107
108 def _wire_to_config(self, wire):
109 if 'config' in wire:
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return {}
112
100 113 def _assign_ref(self, wire, ref, commit_id):
101 114 repo = self._factory.repo(wire)
102 115 repo[ref] = commit_id
103 116
104 117 @reraise_safe_exceptions
105 118 def add_object(self, wire, content):
106 119 repo = self._factory.repo(wire)
107 120 blob = objects.Blob()
108 121 blob.set_raw_string(content)
109 122 repo.object_store.add_object(blob)
110 123 return blob.id
111 124
112 125 @reraise_safe_exceptions
113 126 def assert_correct_path(self, wire):
114 127 try:
115 128 self._factory.repo(wire)
116 129 except NotGitRepository as e:
117 130 # Exception can contain unicode which we convert
118 131 raise exceptions.AbortException(repr(e))
119 132
120 133 @reraise_safe_exceptions
121 134 def bare(self, wire):
122 135 repo = self._factory.repo(wire)
123 136 return repo.bare
124 137
125 138 @reraise_safe_exceptions
126 139 def blob_as_pretty_string(self, wire, sha):
127 140 repo = self._factory.repo(wire)
128 141 return repo[sha].as_pretty_string()
129 142
130 143 @reraise_safe_exceptions
131 144 def blob_raw_length(self, wire, sha):
132 145 repo = self._factory.repo(wire)
133 146 blob = repo[sha]
134 147 return blob.raw_length()
135 148
149 def _parse_lfs_pointer(self, raw_content):
150
151 spec_string = 'version https://git-lfs.github.com/spec'
152 if raw_content and raw_content.startswith(spec_string):
153 pattern = re.compile(r"""
154 (?:\n)?
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
158 (?:\n)?
159 """, re.VERBOSE | re.MULTILINE)
160 match = pattern.match(raw_content)
161 if match:
162 return match.groupdict()
163
164 return {}
165
166 @reraise_safe_exceptions
167 def is_large_file(self, wire, sha):
168 repo = self._factory.repo(wire)
169 blob = repo[sha]
170 return self._parse_lfs_pointer(blob.as_raw_string())
171
172 @reraise_safe_exceptions
173 def in_largefiles_store(self, wire, oid):
174 repo = self._factory.repo(wire)
175 conf = self._wire_to_config(wire)
176
177 store_location = conf.get('vcs_git_lfs_store_location')
178 if store_location:
179 repo_name = repo.path
180 store = LFSOidStore(
181 oid=oid, repo=repo_name, store_location=store_location)
182 return store.has_oid()
183
184 return False
185
186 @reraise_safe_exceptions
187 def store_path(self, wire, oid):
188 repo = self._factory.repo(wire)
189 conf = self._wire_to_config(wire)
190
191 store_location = conf.get('vcs_git_lfs_store_location')
192 if store_location:
193 repo_name = repo.path
194 store = LFSOidStore(
195 oid=oid, repo=repo_name, store_location=store_location)
196 return store.oid_path
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198
136 199 @reraise_safe_exceptions
137 200 def bulk_request(self, wire, rev, pre_load):
138 201 result = {}
139 202 for attr in pre_load:
140 203 try:
141 204 method = self._bulk_methods[attr]
142 205 args = [wire, rev]
143 206 if attr == "date":
144 207 args.extend(["commit_time", "commit_timezone"])
145 208 elif attr in ["author", "message", "parents"]:
146 209 args.append(attr)
147 210 result[attr] = method(*args)
148 211 except KeyError:
149 212 raise exceptions.VcsException(
150 213 "Unknown bulk attribute: %s" % attr)
151 214 return result
152 215
153 216 def _build_opener(self, url):
154 217 handlers = []
155 218 url_obj = url_parser(url)
156 219 _, authinfo = url_obj.authinfo()
157 220
158 221 if authinfo:
159 222 # create a password manager
160 223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 224 passmgr.add_password(*authinfo)
162 225
163 226 handlers.extend((httpbasicauthhandler(passmgr),
164 227 httpdigestauthhandler(passmgr)))
165 228
166 229 return urllib2.build_opener(*handlers)
167 230
168 231 @reraise_safe_exceptions
169 232 def check_url(self, url, config):
170 233 url_obj = url_parser(url)
171 234 test_uri, _ = url_obj.authinfo()
172 235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
173 236 url_obj.query = obfuscate_qs(url_obj.query)
174 237 cleaned_uri = str(url_obj)
175 238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
176 239
177 240 if not test_uri.endswith('info/refs'):
178 241 test_uri = test_uri.rstrip('/') + '/info/refs'
179 242
180 243 o = self._build_opener(url)
181 244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
182 245
183 246 q = {"service": 'git-upload-pack'}
184 247 qs = '?%s' % urllib.urlencode(q)
185 248 cu = "%s%s" % (test_uri, qs)
186 249 req = urllib2.Request(cu, None, {})
187 250
188 251 try:
189 252 log.debug("Trying to open URL %s", cleaned_uri)
190 253 resp = o.open(req)
191 254 if resp.code != 200:
192 255 raise exceptions.URLError('Return Code is not 200')
193 256 except Exception as e:
194 257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
195 258 # means it cannot be cloned
196 259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
197 260
198 261 # now detect if it's proper git repo
199 262 gitdata = resp.read()
200 263 if 'service=git-upload-pack' in gitdata:
201 264 pass
202 265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
203 266 # old style git can return some other format !
204 267 pass
205 268 else:
206 269 raise exceptions.URLError(
207 270 "url [%s] does not look like an git" % (cleaned_uri,))
208 271
209 272 return True
210 273
211 274 @reraise_safe_exceptions
212 275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
213 276 remote_refs = self.fetch(wire, url, apply_refs=False)
214 277 repo = self._factory.repo(wire)
215 278 if isinstance(valid_refs, list):
216 279 valid_refs = tuple(valid_refs)
217 280
218 281 for k in remote_refs:
219 282 # only parse heads/tags and skip so called deferred tags
220 283 if k.startswith(valid_refs) and not k.endswith(deferred):
221 284 repo[k] = remote_refs[k]
222 285
223 286 if update_after_clone:
224 287 # we want to checkout HEAD
225 288 repo["HEAD"] = remote_refs["HEAD"]
226 289 index.build_index_from_tree(repo.path, repo.index_path(),
227 290 repo.object_store, repo["HEAD"].tree)
228 291
229 292 # TODO: this is quite complex, check if that can be simplified
230 293 @reraise_safe_exceptions
231 294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
232 295 repo = self._factory.repo(wire)
233 296 object_store = repo.object_store
234 297
235 298 # Create tree and populates it with blobs
236 299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
237 300
238 301 for node in updated:
239 302 # Compute subdirs if needed
240 303 dirpath, nodename = vcspath.split(node['path'])
241 304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
242 305 parent = commit_tree
243 306 ancestors = [('', parent)]
244 307
245 308 # Tries to dig for the deepest existing tree
246 309 while dirnames:
247 310 curdir = dirnames.pop(0)
248 311 try:
249 312 dir_id = parent[curdir][1]
250 313 except KeyError:
251 314 # put curdir back into dirnames and stops
252 315 dirnames.insert(0, curdir)
253 316 break
254 317 else:
255 318 # If found, updates parent
256 319 parent = repo[dir_id]
257 320 ancestors.append((curdir, parent))
258 321 # Now parent is deepest existing tree and we need to create
259 322 # subtrees for dirnames (in reverse order)
260 323 # [this only applies for nodes from added]
261 324 new_trees = []
262 325
263 326 blob = objects.Blob.from_string(node['content'])
264 327
265 328 if dirnames:
266 329 # If there are trees which should be created we need to build
267 330 # them now (in reverse order)
268 331 reversed_dirnames = list(reversed(dirnames))
269 332 curtree = objects.Tree()
270 333 curtree[node['node_path']] = node['mode'], blob.id
271 334 new_trees.append(curtree)
272 335 for dirname in reversed_dirnames[:-1]:
273 336 newtree = objects.Tree()
274 337 newtree[dirname] = (DIR_STAT, curtree.id)
275 338 new_trees.append(newtree)
276 339 curtree = newtree
277 340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
278 341 else:
279 342 parent.add(
280 343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
281 344
282 345 new_trees.append(parent)
283 346 # Update ancestors
284 347 reversed_ancestors = reversed(
285 348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
286 349 for parent, tree, path in reversed_ancestors:
287 350 parent[path] = (DIR_STAT, tree.id)
288 351 object_store.add_object(tree)
289 352
290 353 object_store.add_object(blob)
291 354 for tree in new_trees:
292 355 object_store.add_object(tree)
293 356
294 357 for node_path in removed:
295 358 paths = node_path.split('/')
296 359 tree = commit_tree
297 360 trees = [tree]
298 361 # Traverse deep into the forest...
299 362 for path in paths:
300 363 try:
301 364 obj = repo[tree[path][1]]
302 365 if isinstance(obj, objects.Tree):
303 366 trees.append(obj)
304 367 tree = obj
305 368 except KeyError:
306 369 break
307 370 # Cut down the blob and all rotten trees on the way back...
308 371 for path, tree in reversed(zip(paths, trees)):
309 372 del tree[path]
310 373 if tree:
311 374 # This tree still has elements - don't remove it or any
312 375 # of it's parents
313 376 break
314 377
315 378 object_store.add_object(commit_tree)
316 379
317 380 # Create commit
318 381 commit = objects.Commit()
319 382 commit.tree = commit_tree.id
320 383 for k, v in commit_data.iteritems():
321 384 setattr(commit, k, v)
322 385 object_store.add_object(commit)
323 386
324 387 ref = 'refs/heads/%s' % branch
325 388 repo.refs[ref] = commit.id
326 389
327 390 return commit.id
328 391
329 392 @reraise_safe_exceptions
330 393 def fetch(self, wire, url, apply_refs=True, refs=None):
331 394 if url != 'default' and '://' not in url:
332 395 client = LocalGitClient(url)
333 396 else:
334 397 url_obj = url_parser(url)
335 398 o = self._build_opener(url)
336 399 url, _ = url_obj.authinfo()
337 400 client = HttpGitClient(base_url=url, opener=o)
338 401 repo = self._factory.repo(wire)
339 402
340 403 determine_wants = repo.object_store.determine_wants_all
341 404 if refs:
342 405 def determine_wants_requested(references):
343 406 return [references[r] for r in references if r in refs]
344 407 determine_wants = determine_wants_requested
345 408
346 409 try:
347 410 remote_refs = client.fetch(
348 411 path=url, target=repo, determine_wants=determine_wants)
349 412 except NotGitRepository as e:
350 413 log.warning(
351 414 'Trying to fetch from "%s" failed, not a Git repository.', url)
352 415 # Exception can contain unicode which we convert
353 416 raise exceptions.AbortException(repr(e))
354 417
355 418 # mikhail: client.fetch() returns all the remote refs, but fetches only
356 419 # refs filtered by `determine_wants` function. We need to filter result
357 420 # as well
358 421 if refs:
359 422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
360 423
361 424 if apply_refs:
362 425 # TODO: johbo: Needs proper test coverage with a git repository
363 426 # that contains a tag object, so that we would end up with
364 427 # a peeled ref at this point.
365 428 PEELED_REF_MARKER = '^{}'
366 429 for k in remote_refs:
367 430 if k.endswith(PEELED_REF_MARKER):
368 431 log.info("Skipping peeled reference %s", k)
369 432 continue
370 433 repo[k] = remote_refs[k]
371 434
372 435 if refs:
373 436 # mikhail: explicitly set the head to the last ref.
374 437 repo['HEAD'] = remote_refs[refs[-1]]
375 438
376 439 # TODO: mikhail: should we return remote_refs here to be
377 440 # consistent?
378 441 else:
379 442 return remote_refs
380 443
381 444 @reraise_safe_exceptions
382 445 def get_remote_refs(self, wire, url):
383 446 repo = Repo(url)
384 447 return repo.get_refs()
385 448
386 449 @reraise_safe_exceptions
387 450 def get_description(self, wire):
388 451 repo = self._factory.repo(wire)
389 452 return repo.get_description()
390 453
391 454 @reraise_safe_exceptions
392 455 def get_file_history(self, wire, file_path, commit_id, limit):
393 456 repo = self._factory.repo(wire)
394 457 include = [commit_id]
395 458 paths = [file_path]
396 459
397 460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
398 461 return [x.commit.id for x in walker]
399 462
400 463 @reraise_safe_exceptions
401 464 def get_missing_revs(self, wire, rev1, rev2, path2):
402 465 repo = self._factory.repo(wire)
403 466 LocalGitClient(thin_packs=False).fetch(path2, repo)
404 467
405 468 wire_remote = wire.copy()
406 469 wire_remote['path'] = path2
407 470 repo_remote = self._factory.repo(wire_remote)
408 471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
409 472
410 473 revs = [
411 474 x.commit.id
412 475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
413 476 return revs
414 477
415 478 @reraise_safe_exceptions
416 479 def get_object(self, wire, sha):
417 480 repo = self._factory.repo(wire)
418 481 obj = repo.get_object(sha)
419 482 commit_id = obj.id
420 483
421 484 if isinstance(obj, Tag):
422 485 commit_id = obj.object[1]
423 486
424 487 return {
425 488 'id': obj.id,
426 489 'type': obj.type_name,
427 490 'commit_id': commit_id
428 491 }
429 492
430 493 @reraise_safe_exceptions
431 494 def get_object_attrs(self, wire, sha, *attrs):
432 495 repo = self._factory.repo(wire)
433 496 obj = repo.get_object(sha)
434 497 return list(getattr(obj, a) for a in attrs)
435 498
436 499 @reraise_safe_exceptions
437 500 def get_refs(self, wire):
438 501 repo = self._factory.repo(wire)
439 502 result = {}
440 503 for ref, sha in repo.refs.as_dict().items():
441 504 peeled_sha = repo.get_peeled(ref)
442 505 result[ref] = peeled_sha
443 506 return result
444 507
445 508 @reraise_safe_exceptions
446 509 def get_refs_path(self, wire):
447 510 repo = self._factory.repo(wire)
448 511 return repo.refs.path
449 512
450 513 @reraise_safe_exceptions
451 514 def head(self, wire):
452 515 repo = self._factory.repo(wire)
453 516 return repo.head()
454 517
455 518 @reraise_safe_exceptions
456 519 def init(self, wire):
457 520 repo_path = str_to_dulwich(wire['path'])
458 521 self.repo = Repo.init(repo_path)
459 522
460 523 @reraise_safe_exceptions
461 524 def init_bare(self, wire):
462 525 repo_path = str_to_dulwich(wire['path'])
463 526 self.repo = Repo.init_bare(repo_path)
464 527
465 528 @reraise_safe_exceptions
466 529 def revision(self, wire, rev):
467 530 repo = self._factory.repo(wire)
468 531 obj = repo[rev]
469 532 obj_data = {
470 533 'id': obj.id,
471 534 }
472 535 try:
473 536 obj_data['tree'] = obj.tree
474 537 except AttributeError:
475 538 pass
476 539 return obj_data
477 540
478 541 @reraise_safe_exceptions
479 542 def commit_attribute(self, wire, rev, attr):
480 543 repo = self._factory.repo(wire)
481 544 obj = repo[rev]
482 545 return getattr(obj, attr)
483 546
484 547 @reraise_safe_exceptions
485 548 def set_refs(self, wire, key, value):
486 549 repo = self._factory.repo(wire)
487 550 repo.refs[key] = value
488 551
489 552 @reraise_safe_exceptions
490 553 def remove_ref(self, wire, key):
491 554 repo = self._factory.repo(wire)
492 555 del repo.refs[key]
493 556
494 557 @reraise_safe_exceptions
495 558 def tree_changes(self, wire, source_id, target_id):
496 559 repo = self._factory.repo(wire)
497 560 source = repo[source_id].tree if source_id else None
498 561 target = repo[target_id].tree
499 562 result = repo.object_store.tree_changes(source, target)
500 563 return list(result)
501 564
502 565 @reraise_safe_exceptions
503 566 def tree_items(self, wire, tree_id):
504 567 repo = self._factory.repo(wire)
505 568 tree = repo[tree_id]
506 569
507 570 result = []
508 571 for item in tree.iteritems():
509 572 item_sha = item.sha
510 573 item_mode = item.mode
511 574
512 575 if FILE_MODE(item_mode) == GIT_LINK:
513 576 item_type = "link"
514 577 else:
515 578 item_type = repo[item_sha].type_name
516 579
517 580 result.append((item.path, item_mode, item_sha, item_type))
518 581 return result
519 582
520 583 @reraise_safe_exceptions
521 584 def update_server_info(self, wire):
522 585 repo = self._factory.repo(wire)
523 586 update_server_info(repo)
524 587
525 588 @reraise_safe_exceptions
526 589 def discover_git_version(self):
527 590 stdout, _ = self.run_git_command(
528 591 {}, ['--version'], _bare=True, _safe=True)
529 592 prefix = 'git version'
530 593 if stdout.startswith(prefix):
531 594 stdout = stdout[len(prefix):]
532 595 return stdout.strip()
533 596
534 597 @reraise_safe_exceptions
535 598 def run_git_command(self, wire, cmd, **opts):
536 599 path = wire.get('path', None)
537 600
538 601 if path and os.path.isdir(path):
539 602 opts['cwd'] = path
540 603
541 604 if '_bare' in opts:
542 605 _copts = []
543 606 del opts['_bare']
544 607 else:
545 608 _copts = ['-c', 'core.quotepath=false', ]
546 609 safe_call = False
547 610 if '_safe' in opts:
548 611 # no exc on failure
549 612 del opts['_safe']
550 613 safe_call = True
551 614
552 615 gitenv = os.environ.copy()
553 616 gitenv.update(opts.pop('extra_env', {}))
554 617 # need to clean fix GIT_DIR !
555 618 if 'GIT_DIR' in gitenv:
556 619 del gitenv['GIT_DIR']
557 620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
558 621
559 622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
560 623
561 624 try:
562 625 _opts = {'env': gitenv, 'shell': False}
563 626 _opts.update(opts)
564 627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
565 628
566 629 return ''.join(p), ''.join(p.error)
567 630 except (EnvironmentError, OSError) as err:
568 631 tb_err = ("Couldn't run git command (%s).\n"
569 632 "Original error was:%s\n" % (cmd, err))
570 633 log.exception(tb_err)
571 634 if safe_call:
572 635 return '', err
573 636 else:
574 637 raise exceptions.VcsException(tb_err)
575 638
576 639
577 640 def str_to_dulwich(value):
578 641 """
579 642 Dulwich 0.10.1a requires `unicode` objects to be passed in.
580 643 """
581 644 return value.decode(settings.WIRE_ENCODING)
@@ -1,725 +1,727 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 import sys
22 21 import urllib
23 22 import urllib2
24 23
25 24 from hgext import largefiles, rebase
26 25 from hgext.strip import strip as hgext_strip
27 26 from mercurial import commands
28 27 from mercurial import unionrepo
28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 37 InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 def raise_from_original(new_type):
95 """
96 Raise a new exception type with original args and traceback.
97 """
98 _, original, traceback = sys.exc_info()
99 try:
100 raise new_type(*original.args), None, traceback
101 finally:
102 del traceback
103
104
105 94 class MercurialFactory(RepoFactory):
106 95
107 96 def _create_config(self, config, hooks=True):
108 97 if not hooks:
109 98 hooks_to_clean = frozenset((
110 99 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 101 new_config = []
113 102 for section, option, value in config:
114 103 if section == 'hooks' and option in hooks_to_clean:
115 104 continue
116 105 new_config.append((section, option, value))
117 106 config = new_config
118 107
119 108 baseui = make_ui_from_config(config)
120 109 return baseui
121 110
122 111 def _create_repo(self, wire, create):
123 112 baseui = self._create_config(wire["config"])
124 113 return localrepository(baseui, wire["path"], create)
125 114
126 115
127 116 class HgRemote(object):
128 117
129 118 def __init__(self, factory):
130 119 self._factory = factory
131 120
132 121 self._bulk_methods = {
133 122 "affected_files": self.ctx_files,
134 123 "author": self.ctx_user,
135 124 "branch": self.ctx_branch,
136 125 "children": self.ctx_children,
137 126 "date": self.ctx_date,
138 127 "message": self.ctx_description,
139 128 "parents": self.ctx_parents,
140 129 "status": self.ctx_status,
141 130 "_file_paths": self.ctx_list,
142 131 }
143 132
144 133 @reraise_safe_exceptions
145 134 def discover_hg_version(self):
146 135 from mercurial import util
147 136 return util.version()
148 137
149 138 @reraise_safe_exceptions
150 139 def archive_repo(self, archive_path, mtime, file_info, kind):
151 140 if kind == "tgz":
152 141 archiver = archival.tarit(archive_path, mtime, "gz")
153 142 elif kind == "tbz2":
154 143 archiver = archival.tarit(archive_path, mtime, "bz2")
155 144 elif kind == 'zip':
156 145 archiver = archival.zipit(archive_path, mtime)
157 146 else:
158 147 raise exceptions.ArchiveException(
159 148 'Remote does not support: "%s".' % kind)
160 149
161 150 for f_path, f_mode, f_is_link, f_content in file_info:
162 151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
163 152 archiver.done()
164 153
165 154 @reraise_safe_exceptions
166 155 def bookmarks(self, wire):
167 156 repo = self._factory.repo(wire)
168 157 return dict(repo._bookmarks)
169 158
170 159 @reraise_safe_exceptions
171 160 def branches(self, wire, normal, closed):
172 161 repo = self._factory.repo(wire)
173 162 iter_branches = repo.branchmap().iterbranches()
174 163 bt = {}
175 164 for branch_name, _heads, tip, is_closed in iter_branches:
176 165 if normal and not is_closed:
177 166 bt[branch_name] = tip
178 167 if closed and is_closed:
179 168 bt[branch_name] = tip
180 169
181 170 return bt
182 171
183 172 @reraise_safe_exceptions
184 173 def bulk_request(self, wire, rev, pre_load):
185 174 result = {}
186 175 for attr in pre_load:
187 176 try:
188 177 method = self._bulk_methods[attr]
189 178 result[attr] = method(wire, rev)
190 179 except KeyError:
191 180 raise exceptions.VcsException(
192 181 'Unknown bulk attribute: "%s"' % attr)
193 182 return result
194 183
195 184 @reraise_safe_exceptions
196 185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
197 186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
198 187 clone(baseui, source, dest, noupdate=not update_after_clone)
199 188
200 189 @reraise_safe_exceptions
201 190 def commitctx(
202 191 self, wire, message, parents, commit_time, commit_timezone,
203 192 user, files, extra, removed, updated):
204 193
205 194 def _filectxfn(_repo, memctx, path):
206 195 """
207 196 Marks given path as added/changed/removed in a given _repo. This is
208 197 for internal mercurial commit function.
209 198 """
210 199
211 200 # check if this path is removed
212 201 if path in removed:
213 202 # returning None is a way to mark node for removal
214 203 return None
215 204
216 205 # check if this path is added
217 206 for node in updated:
218 207 if node['path'] == path:
219 208 return memfilectx(
220 209 _repo,
221 210 path=node['path'],
222 211 data=node['content'],
223 212 islink=False,
224 213 isexec=bool(node['mode'] & stat.S_IXUSR),
225 214 copied=False,
226 215 memctx=memctx)
227 216
228 217 raise exceptions.AbortException(
229 218 "Given path haven't been marked as added, "
230 219 "changed or removed (%s)" % path)
231 220
232 221 repo = self._factory.repo(wire)
233 222
234 223 commit_ctx = memctx(
235 224 repo=repo,
236 225 parents=parents,
237 226 text=message,
238 227 files=files,
239 228 filectxfn=_filectxfn,
240 229 user=user,
241 230 date=(commit_time, commit_timezone),
242 231 extra=extra)
243 232
244 233 n = repo.commitctx(commit_ctx)
245 234 new_id = hex(n)
246 235
247 236 return new_id
248 237
249 238 @reraise_safe_exceptions
250 239 def ctx_branch(self, wire, revision):
251 240 repo = self._factory.repo(wire)
252 241 ctx = repo[revision]
253 242 return ctx.branch()
254 243
255 244 @reraise_safe_exceptions
256 245 def ctx_children(self, wire, revision):
257 246 repo = self._factory.repo(wire)
258 247 ctx = repo[revision]
259 248 return [child.rev() for child in ctx.children()]
260 249
261 250 @reraise_safe_exceptions
262 251 def ctx_date(self, wire, revision):
263 252 repo = self._factory.repo(wire)
264 253 ctx = repo[revision]
265 254 return ctx.date()
266 255
267 256 @reraise_safe_exceptions
268 257 def ctx_description(self, wire, revision):
269 258 repo = self._factory.repo(wire)
270 259 ctx = repo[revision]
271 260 return ctx.description()
272 261
273 262 @reraise_safe_exceptions
274 263 def ctx_diff(
275 264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
276 265 repo = self._factory.repo(wire)
277 266 ctx = repo[revision]
278 267 result = ctx.diff(
279 268 git=git, ignore_whitespace=ignore_whitespace, context=context)
280 269 return list(result)
281 270
282 271 @reraise_safe_exceptions
283 272 def ctx_files(self, wire, revision):
284 273 repo = self._factory.repo(wire)
285 274 ctx = repo[revision]
286 275 return ctx.files()
287 276
288 277 @reraise_safe_exceptions
289 278 def ctx_list(self, path, revision):
290 279 repo = self._factory.repo(path)
291 280 ctx = repo[revision]
292 281 return list(ctx)
293 282
294 283 @reraise_safe_exceptions
295 284 def ctx_parents(self, wire, revision):
296 285 repo = self._factory.repo(wire)
297 286 ctx = repo[revision]
298 287 return [parent.rev() for parent in ctx.parents()]
299 288
300 289 @reraise_safe_exceptions
301 290 def ctx_substate(self, wire, revision):
302 291 repo = self._factory.repo(wire)
303 292 ctx = repo[revision]
304 293 return ctx.substate
305 294
306 295 @reraise_safe_exceptions
307 296 def ctx_status(self, wire, revision):
308 297 repo = self._factory.repo(wire)
309 298 ctx = repo[revision]
310 299 status = repo[ctx.p1().node()].status(other=ctx.node())
311 300 # object of status (odd, custom named tuple in mercurial) is not
312 301 # correctly serializable via Pyro, we make it a list, as the underling
313 302 # API expects this to be a list
314 303 return list(status)
315 304
316 305 @reraise_safe_exceptions
317 306 def ctx_user(self, wire, revision):
318 307 repo = self._factory.repo(wire)
319 308 ctx = repo[revision]
320 309 return ctx.user()
321 310
322 311 @reraise_safe_exceptions
323 312 def check_url(self, url, config):
324 313 _proto = None
325 314 if '+' in url[:url.find('://')]:
326 315 _proto = url[0:url.find('+')]
327 316 url = url[url.find('+') + 1:]
328 317 handlers = []
329 318 url_obj = url_parser(url)
330 319 test_uri, authinfo = url_obj.authinfo()
331 320 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
332 321 url_obj.query = obfuscate_qs(url_obj.query)
333 322
334 323 cleaned_uri = str(url_obj)
335 324 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
336 325
337 326 if authinfo:
338 327 # create a password manager
339 328 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
340 329 passmgr.add_password(*authinfo)
341 330
342 331 handlers.extend((httpbasicauthhandler(passmgr),
343 332 httpdigestauthhandler(passmgr)))
344 333
345 334 o = urllib2.build_opener(*handlers)
346 335 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
347 336 ('Accept', 'application/mercurial-0.1')]
348 337
349 338 q = {"cmd": 'between'}
350 339 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
351 340 qs = '?%s' % urllib.urlencode(q)
352 341 cu = "%s%s" % (test_uri, qs)
353 342 req = urllib2.Request(cu, None, {})
354 343
355 344 try:
356 345 log.debug("Trying to open URL %s", cleaned_uri)
357 346 resp = o.open(req)
358 347 if resp.code != 200:
359 348 raise exceptions.URLError('Return Code is not 200')
360 349 except Exception as e:
361 350 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
362 351 # means it cannot be cloned
363 352 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
364 353
365 354 # now check if it's a proper hg repo, but don't do it for svn
366 355 try:
367 356 if _proto == 'svn':
368 357 pass
369 358 else:
370 359 # check for pure hg repos
371 360 log.debug(
372 361 "Verifying if URL is a Mercurial repository: %s",
373 362 cleaned_uri)
374 363 httppeer(make_ui_from_config(config), url).lookup('tip')
375 364 except Exception as e:
376 365 log.warning("URL is not a valid Mercurial repository: %s",
377 366 cleaned_uri)
378 367 raise exceptions.URLError(
379 368 "url [%s] does not look like an hg repo org_exc: %s"
380 369 % (cleaned_uri, e))
381 370
382 371 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
383 372 return True
384 373
385 374 @reraise_safe_exceptions
386 375 def diff(
387 376 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
388 377 context):
389 378 repo = self._factory.repo(wire)
390 379
391 380 if file_filter:
392 381 match_filter = match(file_filter[0], '', [file_filter[1]])
393 382 else:
394 383 match_filter = file_filter
395 384 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
396 385
397 386 try:
398 387 return "".join(patch.diff(
399 388 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
400 389 except RepoLookupError:
401 390 raise exceptions.LookupException()
402 391
403 392 @reraise_safe_exceptions
404 393 def file_history(self, wire, revision, path, limit):
405 394 repo = self._factory.repo(wire)
406 395
407 396 ctx = repo[revision]
408 397 fctx = ctx.filectx(path)
409 398
410 399 def history_iter():
411 400 limit_rev = fctx.rev()
412 401 for obj in reversed(list(fctx.filelog())):
413 402 obj = fctx.filectx(obj)
414 403 if limit_rev >= obj.rev():
415 404 yield obj
416 405
417 406 history = []
418 407 for cnt, obj in enumerate(history_iter()):
419 408 if limit and cnt >= limit:
420 409 break
421 410 history.append(hex(obj.node()))
422 411
423 412 return [x for x in history]
424 413
425 414 @reraise_safe_exceptions
426 415 def file_history_untill(self, wire, revision, path, limit):
427 416 repo = self._factory.repo(wire)
428 417 ctx = repo[revision]
429 418 fctx = ctx.filectx(path)
430 419
431 420 file_log = list(fctx.filelog())
432 421 if limit:
433 422 # Limit to the last n items
434 423 file_log = file_log[-limit:]
435 424
436 425 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
437 426
438 427 @reraise_safe_exceptions
439 428 def fctx_annotate(self, wire, revision, path):
440 429 repo = self._factory.repo(wire)
441 430 ctx = repo[revision]
442 431 fctx = ctx.filectx(path)
443 432
444 433 result = []
445 434 for i, annotate_data in enumerate(fctx.annotate()):
446 435 ln_no = i + 1
447 436 node_info, content = annotate_data
448 437 sha = hex(node_info[0].node())
449 438 result.append((ln_no, sha, content))
450 439 return result
451 440
452 441 @reraise_safe_exceptions
453 442 def fctx_data(self, wire, revision, path):
454 443 repo = self._factory.repo(wire)
455 444 ctx = repo[revision]
456 445 fctx = ctx.filectx(path)
457 446 return fctx.data()
458 447
459 448 @reraise_safe_exceptions
460 449 def fctx_flags(self, wire, revision, path):
461 450 repo = self._factory.repo(wire)
462 451 ctx = repo[revision]
463 452 fctx = ctx.filectx(path)
464 453 return fctx.flags()
465 454
466 455 @reraise_safe_exceptions
467 456 def fctx_size(self, wire, revision, path):
468 457 repo = self._factory.repo(wire)
469 458 ctx = repo[revision]
470 459 fctx = ctx.filectx(path)
471 460 return fctx.size()
472 461
473 462 @reraise_safe_exceptions
474 463 def get_all_commit_ids(self, wire, name):
475 464 repo = self._factory.repo(wire)
476 465 revs = repo.filtered(name).changelog.index
477 466 return map(lambda x: hex(x[7]), revs)[:-1]
478 467
479 468 @reraise_safe_exceptions
480 469 def get_config_value(self, wire, section, name, untrusted=False):
481 470 repo = self._factory.repo(wire)
482 471 return repo.ui.config(section, name, untrusted=untrusted)
483 472
484 473 @reraise_safe_exceptions
485 474 def get_config_bool(self, wire, section, name, untrusted=False):
486 475 repo = self._factory.repo(wire)
487 476 return repo.ui.configbool(section, name, untrusted=untrusted)
488 477
489 478 @reraise_safe_exceptions
490 479 def get_config_list(self, wire, section, name, untrusted=False):
491 480 repo = self._factory.repo(wire)
492 481 return repo.ui.configlist(section, name, untrusted=untrusted)
493 482
494 483 @reraise_safe_exceptions
495 484 def is_large_file(self, wire, path):
496 485 return largefiles.lfutil.isstandin(path)
497 486
498 487 @reraise_safe_exceptions
499 def in_store(self, wire, sha):
488 def in_largefiles_store(self, wire, sha):
500 489 repo = self._factory.repo(wire)
501 490 return largefiles.lfutil.instore(repo, sha)
502 491
503 492 @reraise_safe_exceptions
504 493 def in_user_cache(self, wire, sha):
505 494 repo = self._factory.repo(wire)
506 495 return largefiles.lfutil.inusercache(repo.ui, sha)
507 496
508 497 @reraise_safe_exceptions
509 498 def store_path(self, wire, sha):
510 499 repo = self._factory.repo(wire)
511 500 return largefiles.lfutil.storepath(repo, sha)
512 501
513 502 @reraise_safe_exceptions
514 503 def link(self, wire, sha, path):
515 504 repo = self._factory.repo(wire)
516 505 largefiles.lfutil.link(
517 506 largefiles.lfutil.usercachepath(repo.ui, sha), path)
518 507
519 508 @reraise_safe_exceptions
520 509 def localrepository(self, wire, create=False):
521 510 self._factory.repo(wire, create=create)
522 511
523 512 @reraise_safe_exceptions
524 513 def lookup(self, wire, revision, both):
525 514 # TODO Paris: Ugly hack to "deserialize" long for msgpack
526 515 if isinstance(revision, float):
527 516 revision = long(revision)
528 517 repo = self._factory.repo(wire)
529 518 try:
530 519 ctx = repo[revision]
531 520 except RepoLookupError:
532 521 raise exceptions.LookupException(revision)
533 522 except LookupError as e:
534 523 raise exceptions.LookupException(e.name)
535 524
536 525 if not both:
537 526 return ctx.hex()
538 527
539 528 ctx = repo[ctx.hex()]
540 529 return ctx.hex(), ctx.rev()
541 530
542 531 @reraise_safe_exceptions
543 532 def pull(self, wire, url, commit_ids=None):
544 533 repo = self._factory.repo(wire)
545 534 remote = peer(repo, {}, url)
546 535 if commit_ids:
547 536 commit_ids = [bin(commit_id) for commit_id in commit_ids]
548 537
549 538 return exchange.pull(
550 539 repo, remote, heads=commit_ids, force=None).cgresult
551 540
552 541 @reraise_safe_exceptions
553 542 def revision(self, wire, rev):
554 543 repo = self._factory.repo(wire)
555 544 ctx = repo[rev]
556 545 return ctx.rev()
557 546
558 547 @reraise_safe_exceptions
559 548 def rev_range(self, wire, filter):
560 549 repo = self._factory.repo(wire)
561 550 revisions = [rev for rev in revrange(repo, filter)]
562 551 return revisions
563 552
564 553 @reraise_safe_exceptions
565 554 def rev_range_hash(self, wire, node):
566 555 repo = self._factory.repo(wire)
567 556
568 557 def get_revs(repo, rev_opt):
569 558 if rev_opt:
570 559 revs = revrange(repo, rev_opt)
571 560 if len(revs) == 0:
572 561 return (nullrev, nullrev)
573 562 return max(revs), min(revs)
574 563 else:
575 564 return len(repo) - 1, 0
576 565
577 566 stop, start = get_revs(repo, [node + ':'])
578 567 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
579 568 return revs
580 569
581 570 @reraise_safe_exceptions
582 571 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
583 572 other_path = kwargs.pop('other_path', None)
584 573
585 574 # case when we want to compare two independent repositories
586 575 if other_path and other_path != wire["path"]:
587 576 baseui = self._factory._create_config(wire["config"])
588 577 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
589 578 else:
590 579 repo = self._factory.repo(wire)
591 580 return list(repo.revs(rev_spec, *args))
592 581
593 582 @reraise_safe_exceptions
594 583 def strip(self, wire, revision, update, backup):
595 584 repo = self._factory.repo(wire)
596 585 ctx = repo[revision]
597 586 hgext_strip(
598 587 repo.baseui, repo, ctx.node(), update=update, backup=backup)
599 588
600 589 @reraise_safe_exceptions
590 def verify(self, wire,):
591 repo = self._factory.repo(wire)
592 baseui = self._factory._create_config(wire['config'])
593 baseui.setconfig('ui', 'quiet', 'false')
594 output = io.BytesIO()
595
596 def write(data, **unused_kwargs):
597 output.write(data)
598 baseui.write = write
599
600 repo.ui = baseui
601 verify.verify(repo)
602 return output.getvalue()
603
604 @reraise_safe_exceptions
601 605 def tag(self, wire, name, revision, message, local, user,
602 606 tag_time, tag_timezone):
603 607 repo = self._factory.repo(wire)
604 608 ctx = repo[revision]
605 609 node = ctx.node()
606 610
607 611 date = (tag_time, tag_timezone)
608 612 try:
609 613 repo.tag(name, node, message, local, user, date)
610 614 except Abort as e:
611 615 log.exception("Tag operation aborted")
612 616 # Exception can contain unicode which we convert
613 617 raise exceptions.AbortException(repr(e))
614 618
615 619 @reraise_safe_exceptions
616 620 def tags(self, wire):
617 621 repo = self._factory.repo(wire)
618 622 return repo.tags()
619 623
620 624 @reraise_safe_exceptions
621 625 def update(self, wire, node=None, clean=False):
622 626 repo = self._factory.repo(wire)
623 627 baseui = self._factory._create_config(wire['config'])
624 628 commands.update(baseui, repo, node=node, clean=clean)
625 629
626 630 @reraise_safe_exceptions
627 631 def identify(self, wire):
628 632 repo = self._factory.repo(wire)
629 633 baseui = self._factory._create_config(wire['config'])
630 634 output = io.BytesIO()
631 635 baseui.write = output.write
632 636 # This is required to get a full node id
633 637 baseui.debugflag = True
634 638 commands.identify(baseui, repo, id=True)
635 639
636 640 return output.getvalue()
637 641
638 642 @reraise_safe_exceptions
639 643 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
640 644 hooks=True):
641 645 repo = self._factory.repo(wire)
642 646 baseui = self._factory._create_config(wire['config'], hooks=hooks)
643 647
644 648 # Mercurial internally has a lot of logic that checks ONLY if
645 649 # option is defined, we just pass those if they are defined then
646 650 opts = {}
647 651 if bookmark:
648 652 opts['bookmark'] = bookmark
649 653 if branch:
650 654 opts['branch'] = branch
651 655 if revision:
652 656 opts['rev'] = revision
653 657
654 658 commands.pull(baseui, repo, source, **opts)
655 659
656 660 @reraise_safe_exceptions
657 661 def heads(self, wire, branch=None):
658 662 repo = self._factory.repo(wire)
659 663 baseui = self._factory._create_config(wire['config'])
660 664 output = io.BytesIO()
661 665
662 666 def write(data, **unused_kwargs):
663 667 output.write(data)
664 668
665 669 baseui.write = write
666 670 if branch:
667 671 args = [branch]
668 672 else:
669 673 args = []
670 674 commands.heads(baseui, repo, template='{node} ', *args)
671 675
672 676 return output.getvalue()
673 677
674 678 @reraise_safe_exceptions
675 679 def ancestor(self, wire, revision1, revision2):
676 680 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'])
678 output = io.BytesIO()
679 baseui.write = output.write
680 commands.debugancestor(baseui, repo, revision1, revision2)
681
682 return output.getvalue()
681 changelog = repo.changelog
682 lookup = repo.lookup
683 a = changelog.ancestor(lookup(revision1), lookup(revision2))
684 return hex(a)
683 685
684 686 @reraise_safe_exceptions
685 687 def push(self, wire, revisions, dest_path, hooks=True,
686 688 push_branches=False):
687 689 repo = self._factory.repo(wire)
688 690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
689 691 commands.push(baseui, repo, dest=dest_path, rev=revisions,
690 692 new_branch=push_branches)
691 693
692 694 @reraise_safe_exceptions
693 695 def merge(self, wire, revision):
694 696 repo = self._factory.repo(wire)
695 697 baseui = self._factory._create_config(wire['config'])
696 698 repo.ui.setconfig('ui', 'merge', 'internal:dump')
697 699
698 700 # In case of sub repositories are used mercurial prompts the user in
699 701 # case of merge conflicts or different sub repository sources. By
700 702 # setting the interactive flag to `False` mercurial doesn't prompt the
701 703 # used but instead uses a default value.
702 704 repo.ui.setconfig('ui', 'interactive', False)
703 705
704 706 commands.merge(baseui, repo, rev=revision)
705 707
706 708 @reraise_safe_exceptions
707 709 def commit(self, wire, message, username):
708 710 repo = self._factory.repo(wire)
709 711 baseui = self._factory._create_config(wire['config'])
710 712 repo.ui.setconfig('ui', 'username', username)
711 713 commands.commit(baseui, repo, message=message)
712 714
713 715 @reraise_safe_exceptions
714 716 def rebase(self, wire, source=None, dest=None, abort=False):
715 717 repo = self._factory.repo(wire)
716 718 baseui = self._factory._create_config(wire['config'])
717 719 repo.ui.setconfig('ui', 'merge', 'internal:dump')
718 720 rebase.rebase(
719 721 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
720 722
721 723 @reraise_safe_exceptions
722 724 def bookmark(self, wire, bookmark, revision=None):
723 725 repo = self._factory.repo(wire)
724 726 baseui = self._factory._create_config(wire['config'])
725 727 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,372 +1,404 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2017 RodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 import io
21 import sys
22 import json
23 import logging
20 24 import collections
21 25 import importlib
22 import io
23 import json
24 26 import subprocess
25 import sys
27
26 28 from httplib import HTTPConnection
27 29
28 30
29 31 import mercurial.scmutil
30 32 import mercurial.node
31 33 import Pyro4
32 34 import simplejson as json
33 35
34 36 from vcsserver import exceptions
35 37
38 log = logging.getLogger(__name__)
39
36 40
37 41 class HooksHttpClient(object):
38 42 connection = None
39 43
40 44 def __init__(self, hooks_uri):
41 45 self.hooks_uri = hooks_uri
42 46
43 47 def __call__(self, method, extras):
44 48 connection = HTTPConnection(self.hooks_uri)
45 49 body = self._serialize(method, extras)
46 50 connection.request('POST', '/', body)
47 51 response = connection.getresponse()
48 52 return json.loads(response.read())
49 53
50 54 def _serialize(self, hook_name, extras):
51 55 data = {
52 56 'method': hook_name,
53 57 'extras': extras
54 58 }
55 59 return json.dumps(data)
56 60
57 61
58 62 class HooksDummyClient(object):
59 63 def __init__(self, hooks_module):
60 64 self._hooks_module = importlib.import_module(hooks_module)
61 65
62 66 def __call__(self, hook_name, extras):
63 67 with self._hooks_module.Hooks() as hooks:
64 68 return getattr(hooks, hook_name)(extras)
65 69
66 70
67 71 class HooksPyro4Client(object):
68 72 def __init__(self, hooks_uri):
69 73 self.hooks_uri = hooks_uri
70 74
71 75 def __call__(self, hook_name, extras):
72 76 with Pyro4.Proxy(self.hooks_uri) as hooks:
73 77 return getattr(hooks, hook_name)(extras)
74 78
75 79
76 80 class RemoteMessageWriter(object):
77 81 """Writer base class."""
78 82 def write(message):
79 83 raise NotImplementedError()
80 84
81 85
82 86 class HgMessageWriter(RemoteMessageWriter):
83 87 """Writer that knows how to send messages to mercurial clients."""
84 88
85 89 def __init__(self, ui):
86 90 self.ui = ui
87 91
88 92 def write(self, message):
89 93 # TODO: Check why the quiet flag is set by default.
90 94 old = self.ui.quiet
91 95 self.ui.quiet = False
92 96 self.ui.status(message.encode('utf-8'))
93 97 self.ui.quiet = old
94 98
95 99
96 100 class GitMessageWriter(RemoteMessageWriter):
97 101 """Writer that knows how to send messages to git clients."""
98 102
99 103 def __init__(self, stdout=None):
100 104 self.stdout = stdout or sys.stdout
101 105
102 106 def write(self, message):
103 107 self.stdout.write(message.encode('utf-8'))
104 108
105 109
106 110 def _handle_exception(result):
107 111 exception_class = result.get('exception')
112 exception_traceback = result.get('exception_traceback')
113
114 if exception_traceback:
115 log.error('Got traceback from remote call:%s', exception_traceback)
116
108 117 if exception_class == 'HTTPLockedRC':
109 118 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 119 elif exception_class == 'RepositoryError':
111 120 raise exceptions.VcsException(*result['exception_args'])
112 121 elif exception_class:
113 122 raise Exception('Got remote exception "%s" with args "%s"' %
114 123 (exception_class, result['exception_args']))
115 124
116 125
117 126 def _get_hooks_client(extras):
118 127 if 'hooks_uri' in extras:
119 128 protocol = extras.get('hooks_protocol')
120 129 return (
121 130 HooksHttpClient(extras['hooks_uri'])
122 131 if protocol == 'http'
123 132 else HooksPyro4Client(extras['hooks_uri'])
124 133 )
125 134 else:
126 135 return HooksDummyClient(extras['hooks_module'])
127 136
128 137
129 138 def _call_hook(hook_name, extras, writer):
130 139 hooks = _get_hooks_client(extras)
131 140 result = hooks(hook_name, extras)
132 141 writer.write(result['output'])
133 142 _handle_exception(result)
134 143
135 144 return result['status']
136 145
137 146
138 147 def _extras_from_ui(ui):
139 148 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
140 149 return extras
141 150
142 151
143 152 def repo_size(ui, repo, **kwargs):
144 153 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
145 154
146 155
147 156 def pre_pull(ui, repo, **kwargs):
148 157 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
149 158
150 159
151 160 def post_pull(ui, repo, **kwargs):
152 161 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
153 162
154 163
155 def pre_push(ui, repo, **kwargs):
156 return _call_hook('pre_push', _extras_from_ui(ui), HgMessageWriter(ui))
164 def pre_push(ui, repo, node=None, **kwargs):
165 extras = _extras_from_ui(ui)
166
167 rev_data = []
168 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
169 branches = collections.defaultdict(list)
170 for commit_id, branch in _rev_range_hash(repo, node, with_branch=True):
171 branches[branch].append(commit_id)
172
173 for branch, commits in branches.iteritems():
174 old_rev = kwargs.get('node_last') or commits[0]
175 rev_data.append({
176 'old_rev': old_rev,
177 'new_rev': commits[-1],
178 'ref': '',
179 'type': 'branch',
180 'name': branch,
181 })
182
183 extras['commit_ids'] = rev_data
184 return _call_hook('pre_push', extras, HgMessageWriter(ui))
157 185
158 186
159 # N.B.(skreft): the two functions below were taken and adapted from
160 # rhodecode.lib.vcs.remote.handle_git_pre_receive
161 # They are required to compute the commit_ids
162 def _get_revs(repo, rev_opt):
163 revs = [rev for rev in mercurial.scmutil.revrange(repo, rev_opt)]
164 if len(revs) == 0:
165 return (mercurial.node.nullrev, mercurial.node.nullrev)
187 def _rev_range_hash(repo, node, with_branch=False):
166 188
167 return max(revs), min(revs)
168
189 commits = []
190 for rev in xrange(repo[node], len(repo)):
191 ctx = repo[rev]
192 commit_id = mercurial.node.hex(ctx.node())
193 branch = ctx.branch()
194 if with_branch:
195 commits.append((commit_id, branch))
196 else:
197 commits.append(commit_id)
169 198
170 def _rev_range_hash(repo, node):
171 stop, start = _get_revs(repo, [node + ':'])
172 revs = [mercurial.node.hex(repo[r].node()) for r in xrange(start, stop + 1)]
173
174 return revs
199 return commits
175 200
176 201
177 202 def post_push(ui, repo, node, **kwargs):
178 203 commit_ids = _rev_range_hash(repo, node)
179 204
180 205 extras = _extras_from_ui(ui)
181 206 extras['commit_ids'] = commit_ids
182 207
183 208 return _call_hook('post_push', extras, HgMessageWriter(ui))
184 209
185 210
186 211 # backward compat
187 212 log_pull_action = post_pull
188 213
189 214 # backward compat
190 215 log_push_action = post_push
191 216
192 217
193 218 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
194 219 """
195 220 Old hook name: keep here for backward compatibility.
196 221
197 222 This is only required when the installed git hooks are not upgraded.
198 223 """
199 224 pass
200 225
201 226
202 227 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
203 228 """
204 229 Old hook name: keep here for backward compatibility.
205 230
206 231 This is only required when the installed git hooks are not upgraded.
207 232 """
208 233 pass
209 234
210 235
211 236 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
212 237
213 238
214 239 def git_pre_pull(extras):
215 240 """
216 241 Pre pull hook.
217 242
218 243 :param extras: dictionary containing the keys defined in simplevcs
219 244 :type extras: dict
220 245
221 246 :return: status code of the hook. 0 for success.
222 247 :rtype: int
223 248 """
224 249 if 'pull' not in extras['hooks']:
225 250 return HookResponse(0, '')
226 251
227 252 stdout = io.BytesIO()
228 253 try:
229 254 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
230 255 except Exception as error:
231 256 status = 128
232 257 stdout.write('ERROR: %s\n' % str(error))
233 258
234 259 return HookResponse(status, stdout.getvalue())
235 260
236 261
237 262 def git_post_pull(extras):
238 263 """
239 264 Post pull hook.
240 265
241 266 :param extras: dictionary containing the keys defined in simplevcs
242 267 :type extras: dict
243 268
244 269 :return: status code of the hook. 0 for success.
245 270 :rtype: int
246 271 """
247 272 if 'pull' not in extras['hooks']:
248 273 return HookResponse(0, '')
249 274
250 275 stdout = io.BytesIO()
251 276 try:
252 277 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
253 278 except Exception as error:
254 279 status = 128
255 280 stdout.write('ERROR: %s\n' % error)
256 281
257 282 return HookResponse(status, stdout.getvalue())
258 283
259 284
260 def git_pre_receive(unused_repo_path, unused_revs, env):
285 def _parse_git_ref_lines(revision_lines):
286 rev_data = []
287 for revision_line in revision_lines or []:
288 old_rev, new_rev, ref = revision_line.strip().split(' ')
289 ref_data = ref.split('/', 2)
290 if ref_data[1] in ('tags', 'heads'):
291 rev_data.append({
292 'old_rev': old_rev,
293 'new_rev': new_rev,
294 'ref': ref,
295 'type': ref_data[1],
296 'name': ref_data[2],
297 })
298 return rev_data
299
300
301 def git_pre_receive(unused_repo_path, revision_lines, env):
261 302 """
262 303 Pre push hook.
263 304
264 305 :param extras: dictionary containing the keys defined in simplevcs
265 306 :type extras: dict
266 307
267 308 :return: status code of the hook. 0 for success.
268 309 :rtype: int
269 310 """
270 311 extras = json.loads(env['RC_SCM_DATA'])
312 rev_data = _parse_git_ref_lines(revision_lines)
271 313 if 'push' not in extras['hooks']:
272 314 return 0
315 extras['commit_ids'] = rev_data
273 316 return _call_hook('pre_push', extras, GitMessageWriter())
274 317
275 318
276 319 def _run_command(arguments):
277 320 """
278 321 Run the specified command and return the stdout.
279 322
280 :param arguments: sequence of program arugments (including the program name)
323 :param arguments: sequence of program arguments (including the program name)
281 324 :type arguments: list[str]
282 325 """
283 326 # TODO(skreft): refactor this method and all the other similar ones.
284 327 # Probably this should be using subprocessio.
285 328 process = subprocess.Popen(
286 329 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
287 330 stdout, _ = process.communicate()
288 331
289 332 if process.returncode != 0:
290 333 raise Exception(
291 334 'Command %s exited with exit code %s' % (arguments,
292 335 process.returncode))
293 336
294 337 return stdout
295 338
296 339
297 340 def git_post_receive(unused_repo_path, revision_lines, env):
298 341 """
299 342 Post push hook.
300 343
301 344 :param extras: dictionary containing the keys defined in simplevcs
302 345 :type extras: dict
303 346
304 347 :return: status code of the hook. 0 for success.
305 348 :rtype: int
306 349 """
307 350 extras = json.loads(env['RC_SCM_DATA'])
308 351 if 'push' not in extras['hooks']:
309 352 return 0
310 353
311 rev_data = []
312 for revision_line in revision_lines:
313 old_rev, new_rev, ref = revision_line.strip().split(' ')
314 ref_data = ref.split('/', 2)
315 if ref_data[1] in ('tags', 'heads'):
316 rev_data.append({
317 'old_rev': old_rev,
318 'new_rev': new_rev,
319 'ref': ref,
320 'type': ref_data[1],
321 'name': ref_data[2],
322 })
354 rev_data = _parse_git_ref_lines(revision_lines)
323 355
324 356 git_revs = []
325 357
326 358 # N.B.(skreft): it is ok to just call git, as git before calling a
327 359 # subcommand sets the PATH environment variable so that it point to the
328 360 # correct version of the git executable.
329 361 empty_commit_id = '0' * 40
330 362 for push_ref in rev_data:
331 363 type_ = push_ref['type']
332 364 if type_ == 'heads':
333 365 if push_ref['old_rev'] == empty_commit_id:
334 366
335 367 # Fix up head revision if needed
336 368 cmd = ['git', 'show', 'HEAD']
337 369 try:
338 370 _run_command(cmd)
339 371 except Exception:
340 372 cmd = ['git', 'symbolic-ref', 'HEAD',
341 373 'refs/heads/%s' % push_ref['name']]
342 print "Setting default branch to %s" % push_ref['name']
374 print("Setting default branch to %s" % push_ref['name'])
343 375 _run_command(cmd)
344 376
345 377 cmd = ['git', 'for-each-ref', '--format=%(refname)',
346 378 'refs/heads/*']
347 379 heads = _run_command(cmd)
348 380 heads = heads.replace(push_ref['ref'], '')
349 381 heads = ' '.join(head for head in heads.splitlines() if head)
350 382 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
351 383 '--', push_ref['new_rev'], '--not', heads]
352 384 git_revs.extend(_run_command(cmd).splitlines())
353 385 elif push_ref['new_rev'] == empty_commit_id:
354 386 # delete branch case
355 387 git_revs.append('delete_branch=>%s' % push_ref['name'])
356 388 else:
357 389 cmd = ['git', 'log',
358 390 '{old_rev}..{new_rev}'.format(**push_ref),
359 391 '--reverse', '--pretty=format:%H']
360 392 git_revs.extend(_run_command(cmd).splitlines())
361 393 elif type_ == 'tags':
362 394 git_revs.append('tag=>%s' % push_ref['name'])
363 395
364 396 extras['commit_ids'] = git_revs
365 397
366 398 if 'repo_size' in extras['hooks']:
367 399 try:
368 400 _call_hook('repo_size', extras, GitMessageWriter())
369 401 except:
370 402 pass
371 403
372 404 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,408 +1,434 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import base64
19 19 import locale
20 20 import logging
21 21 import uuid
22 22 import wsgiref.util
23 23 import traceback
24 24 from itertools import chain
25 25
26 26 import msgpack
27 27 from beaker.cache import CacheManager
28 28 from beaker.util import parse_cache_config_options
29 29 from pyramid.config import Configurator
30 30 from pyramid.wsgi import wsgiapp
31 31
32 32 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
33 34 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
34 35 from vcsserver.echo_stub.echo_app import EchoApp
35 36 from vcsserver.exceptions import HTTPRepoLocked
36 37 from vcsserver.server import VcsServer
37 38
38 39 try:
39 40 from vcsserver.git import GitFactory, GitRemote
40 41 except ImportError:
41 42 GitFactory = None
42 43 GitRemote = None
44
43 45 try:
44 46 from vcsserver.hg import MercurialFactory, HgRemote
45 47 except ImportError:
46 48 MercurialFactory = None
47 49 HgRemote = None
50
48 51 try:
49 52 from vcsserver.svn import SubversionFactory, SvnRemote
50 53 except ImportError:
51 54 SubversionFactory = None
52 55 SvnRemote = None
53 56
54 57 log = logging.getLogger(__name__)
55 58
56 59
57 60 class VCS(object):
58 61 def __init__(self, locale=None, cache_config=None):
59 62 self.locale = locale
60 63 self.cache_config = cache_config
61 64 self._configure_locale()
62 65 self._initialize_cache()
63 66
64 67 if GitFactory and GitRemote:
65 68 git_repo_cache = self.cache.get_cache_region(
66 69 'git', region='repo_object')
67 70 git_factory = GitFactory(git_repo_cache)
68 71 self._git_remote = GitRemote(git_factory)
69 72 else:
70 73 log.info("Git client import failed")
71 74
72 75 if MercurialFactory and HgRemote:
73 76 hg_repo_cache = self.cache.get_cache_region(
74 77 'hg', region='repo_object')
75 78 hg_factory = MercurialFactory(hg_repo_cache)
76 79 self._hg_remote = HgRemote(hg_factory)
77 80 else:
78 81 log.info("Mercurial client import failed")
79 82
80 83 if SubversionFactory and SvnRemote:
81 84 svn_repo_cache = self.cache.get_cache_region(
82 85 'svn', region='repo_object')
83 86 svn_factory = SubversionFactory(svn_repo_cache)
84 87 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
85 88 else:
86 89 log.info("Subversion client import failed")
87 90
88 91 self._vcsserver = VcsServer()
89 92
90 93 def _initialize_cache(self):
91 94 cache_config = parse_cache_config_options(self.cache_config)
92 95 log.info('Initializing beaker cache: %s' % cache_config)
93 96 self.cache = CacheManager(**cache_config)
94 97
95 98 def _configure_locale(self):
96 99 if self.locale:
97 100 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
98 101 else:
99 102 log.info(
100 103 'Configuring locale subsystem based on environment variables')
101 104 try:
102 105 # If self.locale is the empty string, then the locale
103 106 # module will use the environment variables. See the
104 107 # documentation of the package `locale`.
105 108 locale.setlocale(locale.LC_ALL, self.locale)
106 109
107 110 language_code, encoding = locale.getlocale()
108 111 log.info(
109 112 'Locale set to language code "%s" with encoding "%s".',
110 113 language_code, encoding)
111 114 except locale.Error:
112 115 log.exception(
113 116 'Cannot set locale, not configuring the locale system')
114 117
115 118
116 119 class WsgiProxy(object):
117 120 def __init__(self, wsgi):
118 121 self.wsgi = wsgi
119 122
120 123 def __call__(self, environ, start_response):
121 124 input_data = environ['wsgi.input'].read()
122 125 input_data = msgpack.unpackb(input_data)
123 126
124 127 error = None
125 128 try:
126 129 data, status, headers = self.wsgi.handle(
127 130 input_data['environment'], input_data['input_data'],
128 131 *input_data['args'], **input_data['kwargs'])
129 132 except Exception as e:
130 133 data, status, headers = [], None, None
131 134 error = {
132 135 'message': str(e),
133 136 '_vcs_kind': getattr(e, '_vcs_kind', None)
134 137 }
135 138
136 139 start_response(200, {})
137 140 return self._iterator(error, status, headers, data)
138 141
139 142 def _iterator(self, error, status, headers, data):
140 143 initial_data = [
141 144 error,
142 145 status,
143 146 headers,
144 147 ]
145 148
146 149 for d in chain(initial_data, data):
147 150 yield msgpack.packb(d)
148 151
149 152
150 153 class HTTPApplication(object):
151 154 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
152 155
153 156 remote_wsgi = remote_wsgi
154 157 _use_echo_app = False
155 158
156 def __init__(self, settings=None):
159 def __init__(self, settings=None, global_config=None):
157 160 self.config = Configurator(settings=settings)
161 self.global_config = global_config
162
158 163 locale = settings.get('locale', '') or 'en_US.UTF-8'
159 164 vcs = VCS(locale=locale, cache_config=settings)
160 165 self._remotes = {
161 166 'hg': vcs._hg_remote,
162 167 'git': vcs._git_remote,
163 168 'svn': vcs._svn_remote,
164 169 'server': vcs._vcsserver,
165 170 }
166 171 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
167 172 self._use_echo_app = True
168 173 log.warning("Using EchoApp for VCS operations.")
169 174 self.remote_wsgi = remote_wsgi_stub
170 175 self._configure_settings(settings)
171 176 self._configure()
172 177
173 178 def _configure_settings(self, app_settings):
174 179 """
175 180 Configure the settings module.
176 181 """
177 182 git_path = app_settings.get('git_path', None)
178 183 if git_path:
179 184 settings.GIT_EXECUTABLE = git_path
180 185
181 186 def _configure(self):
182 187 self.config.add_renderer(
183 188 name='msgpack',
184 189 factory=self._msgpack_renderer_factory)
185 190
186 191 self.config.add_route('service', '/_service')
187 192 self.config.add_route('status', '/status')
188 193 self.config.add_route('hg_proxy', '/proxy/hg')
189 194 self.config.add_route('git_proxy', '/proxy/git')
190 195 self.config.add_route('vcs', '/{backend}')
191 196 self.config.add_route('stream_git', '/stream/git/*repo_name')
192 197 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
193 198
194 199 self.config.add_view(
195 200 self.status_view, route_name='status', renderer='json')
196 201 self.config.add_view(
197 202 self.service_view, route_name='service', renderer='msgpack')
198 203
199 204 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
200 205 self.config.add_view(self.git_proxy(), route_name='git_proxy')
201 206 self.config.add_view(
202 207 self.vcs_view, route_name='vcs', renderer='msgpack',
203 208 custom_predicates=[self.is_vcs_view])
204 209
205 210 self.config.add_view(self.hg_stream(), route_name='stream_hg')
206 211 self.config.add_view(self.git_stream(), route_name='stream_git')
207 212
208 213 def notfound(request):
209 214 return {'status': '404 NOT FOUND'}
210 215 self.config.add_notfound_view(notfound, renderer='json')
211 216
212 self.config.add_view(
213 self.handle_vcs_exception, context=Exception,
214 custom_predicates=[self.is_vcs_exception])
215
216 self.config.add_view(
217 self.general_error_handler, context=Exception)
217 self.config.add_view(self.handle_vcs_exception, context=Exception)
218 218
219 219 self.config.add_tween(
220 220 'vcsserver.tweens.RequestWrapperTween',
221 221 )
222 222
223 223 def wsgi_app(self):
224 224 return self.config.make_wsgi_app()
225 225
226 226 def vcs_view(self, request):
227 227 remote = self._remotes[request.matchdict['backend']]
228 228 payload = msgpack.unpackb(request.body, use_list=True)
229 229 method = payload.get('method')
230 230 params = payload.get('params')
231 231 wire = params.get('wire')
232 232 args = params.get('args')
233 233 kwargs = params.get('kwargs')
234 234 if wire:
235 235 try:
236 236 wire['context'] = uuid.UUID(wire['context'])
237 237 except KeyError:
238 238 pass
239 239 args.insert(0, wire)
240 240
241 241 log.debug('method called:%s with kwargs:%s', method, kwargs)
242 242 try:
243 243 resp = getattr(remote, method)(*args, **kwargs)
244 244 except Exception as e:
245 245 tb_info = traceback.format_exc()
246 246
247 247 type_ = e.__class__.__name__
248 248 if type_ not in self.ALLOWED_EXCEPTIONS:
249 249 type_ = None
250 250
251 251 resp = {
252 252 'id': payload.get('id'),
253 253 'error': {
254 254 'message': e.message,
255 255 'traceback': tb_info,
256 256 'type': type_
257 257 }
258 258 }
259 259 try:
260 260 resp['error']['_vcs_kind'] = e._vcs_kind
261 261 except AttributeError:
262 262 pass
263 263 else:
264 264 resp = {
265 265 'id': payload.get('id'),
266 266 'result': resp
267 267 }
268 268
269 269 return resp
270 270
271 271 def status_view(self, request):
272 272 return {'status': 'OK'}
273 273
274 274 def service_view(self, request):
275 275 import vcsserver
276 import ConfigParser as configparser
277
276 278 payload = msgpack.unpackb(request.body, use_list=True)
279
280 try:
281 path = self.global_config['__file__']
282 config = configparser.ConfigParser()
283 config.read(path)
284 parsed_ini = config
285 if parsed_ini.has_section('server:main'):
286 parsed_ini = dict(parsed_ini.items('server:main'))
287 except Exception:
288 log.exception('Failed to read .ini file for display')
289 parsed_ini = {}
290
277 291 resp = {
278 292 'id': payload.get('id'),
279 293 'result': dict(
280 294 version=vcsserver.__version__,
281 config={},
295 config=parsed_ini,
282 296 payload=payload,
283 297 )
284 298 }
285 299 return resp
286 300
287 301 def _msgpack_renderer_factory(self, info):
288 302 def _render(value, system):
289 303 value = msgpack.packb(value)
290 304 request = system.get('request')
291 305 if request is not None:
292 306 response = request.response
293 307 ct = response.content_type
294 308 if ct == response.default_content_type:
295 309 response.content_type = 'application/x-msgpack'
296 310 return value
297 311 return _render
298 312
299 313 def hg_proxy(self):
300 314 @wsgiapp
301 315 def _hg_proxy(environ, start_response):
302 316 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
303 317 return app(environ, start_response)
304 318 return _hg_proxy
305 319
306 320 def git_proxy(self):
307 321 @wsgiapp
308 322 def _git_proxy(environ, start_response):
309 323 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
310 324 return app(environ, start_response)
311 325 return _git_proxy
312 326
313 327 def hg_stream(self):
314 328 if self._use_echo_app:
315 329 @wsgiapp
316 330 def _hg_stream(environ, start_response):
317 331 app = EchoApp('fake_path', 'fake_name', None)
318 332 return app(environ, start_response)
319 333 return _hg_stream
320 334 else:
321 335 @wsgiapp
322 336 def _hg_stream(environ, start_response):
323 337 repo_path = environ['HTTP_X_RC_REPO_PATH']
324 338 repo_name = environ['HTTP_X_RC_REPO_NAME']
325 339 packed_config = base64.b64decode(
326 340 environ['HTTP_X_RC_REPO_CONFIG'])
327 341 config = msgpack.unpackb(packed_config)
328 342 app = scm_app.create_hg_wsgi_app(
329 343 repo_path, repo_name, config)
330 344
331 345 # Consitent path information for hgweb
332 346 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
333 347 environ['REPO_NAME'] = repo_name
334 348 return app(environ, ResponseFilter(start_response))
335 349 return _hg_stream
336 350
337 351 def git_stream(self):
338 352 if self._use_echo_app:
339 353 @wsgiapp
340 354 def _git_stream(environ, start_response):
341 355 app = EchoApp('fake_path', 'fake_name', None)
342 356 return app(environ, start_response)
343 357 return _git_stream
344 358 else:
345 359 @wsgiapp
346 360 def _git_stream(environ, start_response):
347 361 repo_path = environ['HTTP_X_RC_REPO_PATH']
348 362 repo_name = environ['HTTP_X_RC_REPO_NAME']
349 363 packed_config = base64.b64decode(
350 364 environ['HTTP_X_RC_REPO_CONFIG'])
351 365 config = msgpack.unpackb(packed_config)
352 366
353 367 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
368 content_type = environ.get('CONTENT_TYPE', '')
369
370 path = environ['PATH_INFO']
371 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
372 log.debug(
373 'LFS: Detecting if request `%s` is LFS server path based '
374 'on content type:`%s`, is_lfs:%s',
375 path, content_type, is_lfs_request)
376
377 if not is_lfs_request:
378 # fallback detection by path
379 if GIT_LFS_PROTO_PAT.match(path):
380 is_lfs_request = True
381 log.debug(
382 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
383 path, is_lfs_request)
384
385 if is_lfs_request:
386 app = scm_app.create_git_lfs_wsgi_app(
387 repo_path, repo_name, config)
388 else:
354 389 app = scm_app.create_git_wsgi_app(
355 390 repo_path, repo_name, config)
356 391 return app(environ, start_response)
392
357 393 return _git_stream
358 394
359 395 def is_vcs_view(self, context, request):
360 396 """
361 397 View predicate that returns true if given backend is supported by
362 398 defined remotes.
363 399 """
364 400 backend = request.matchdict.get('backend')
365 401 return backend in self._remotes
366 402
367 def is_vcs_exception(self, context, request):
368 """
369 View predicate that returns true if the context object is a VCS
370 exception.
371 """
372 return hasattr(context, '_vcs_kind')
373
374 403 def handle_vcs_exception(self, exception, request):
375 if exception._vcs_kind == 'repo_locked':
404 _vcs_kind = getattr(exception, '_vcs_kind', '')
405 if _vcs_kind == 'repo_locked':
376 406 # Get custom repo-locked status code if present.
377 407 status_code = request.headers.get('X-RC-Locked-Status-Code')
378 408 return HTTPRepoLocked(
379 409 title=exception.message, status_code=status_code)
380 410
381 411 # Re-raise exception if we can not handle it.
382 raise exception
383
384 def general_error_handler(self, exception, request):
385 412 log.exception(
386 'error occurred handling this request for path: %s',
387 request.path)
413 'error occurred handling this request for path: %s', request.path)
388 414 raise exception
389 415
390 416
391 417 class ResponseFilter(object):
392 418
393 419 def __init__(self, start_response):
394 420 self._start_response = start_response
395 421
396 422 def __call__(self, status, response_headers, exc_info=None):
397 423 headers = tuple(
398 424 (h, v) for h, v in response_headers
399 425 if not wsgiref.util.is_hop_by_hop(h))
400 426 return self._start_response(status, headers, exc_info)
401 427
402 428
403 429 def main(global_config, **settings):
404 430 if MercurialFactory:
405 431 hgpatches.patch_largefiles_capabilities()
406 432 hgpatches.patch_subrepo_type_mapping()
407 app = HTTPApplication(settings=settings)
433 app = HTTPApplication(settings=settings, global_config=global_config)
408 434 return app.wsgi_app()
@@ -1,174 +1,209 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import os
18 19 import logging
19 import os
20 20
21 21 import mercurial
22 22 import mercurial.error
23 23 import mercurial.hgweb.common
24 24 import mercurial.hgweb.hgweb_mod
25 25 import mercurial.hgweb.protocol
26 26 import webob.exc
27 27
28 from vcsserver import pygrack, exceptions, settings
28 from vcsserver import pygrack, exceptions, settings, git_lfs
29 29
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 # propagated from mercurial documentation
35 35 HG_UI_SECTIONS = [
36 36 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 37 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 38 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 39 ]
40 40
41 41
42 42 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 43 """Extension of hgweb that simplifies some functions."""
44 44
45 45 def _get_view(self, repo):
46 46 """Views are not supported."""
47 47 return repo
48 48
49 49 def loadsubweb(self):
50 50 """The result is only used in the templater method which is not used."""
51 51 return None
52 52
53 53 def run(self):
54 54 """Unused function so raise an exception if accidentally called."""
55 55 raise NotImplementedError
56 56
57 57 def templater(self, req):
58 58 """Function used in an unreachable code path.
59 59
60 60 This code is unreachable because we guarantee that the HTTP request,
61 61 corresponds to a Mercurial command. See the is_hg method. So, we are
62 62 never going to get a user-visible url.
63 63 """
64 64 raise NotImplementedError
65 65
66 66 def archivelist(self, nodeid):
67 67 """Unused function so raise an exception if accidentally called."""
68 68 raise NotImplementedError
69 69
70 70 def run_wsgi(self, req):
71 71 """Check the request has a valid command, failing fast otherwise."""
72 72 cmd = req.form.get('cmd', [''])[0]
73 73 if not mercurial.hgweb.protocol.iscmd(cmd):
74 74 req.respond(
75 75 mercurial.hgweb.common.ErrorResponse(
76 76 mercurial.hgweb.common.HTTP_BAD_REQUEST),
77 77 mercurial.hgweb.protocol.HGTYPE
78 78 )
79 79 return ['']
80 80
81 81 return super(HgWeb, self).run_wsgi(req)
82 82
83 83
84 84 def make_hg_ui_from_config(repo_config):
85 85 baseui = mercurial.ui.ui()
86 86
87 87 # clean the baseui object
88 88 baseui._ocfg = mercurial.config.config()
89 89 baseui._ucfg = mercurial.config.config()
90 90 baseui._tcfg = mercurial.config.config()
91 91
92 92 for section, option, value in repo_config:
93 93 baseui.setconfig(section, option, value)
94 94
95 95 # make our hgweb quiet so it doesn't print output
96 96 baseui.setconfig('ui', 'quiet', 'true')
97 97
98 98 return baseui
99 99
100 100
101 101 def update_hg_ui_from_hgrc(baseui, repo_path):
102 102 path = os.path.join(repo_path, '.hg', 'hgrc')
103 103
104 104 if not os.path.isfile(path):
105 105 log.debug('hgrc file is not present at %s, skipping...', path)
106 106 return
107 107 log.debug('reading hgrc from %s', path)
108 108 cfg = mercurial.config.config()
109 109 cfg.read(path)
110 110 for section in HG_UI_SECTIONS:
111 111 for k, v in cfg.items(section):
112 112 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
113 113 baseui.setconfig(section, k, v)
114 114
115 115
116 116 def create_hg_wsgi_app(repo_path, repo_name, config):
117 117 """
118 118 Prepares a WSGI application to handle Mercurial requests.
119 119
120 120 :param config: is a list of 3-item tuples representing a ConfigObject
121 121 (it is the serialized version of the config object).
122 122 """
123 123 log.debug("Creating Mercurial WSGI application")
124 124
125 125 baseui = make_hg_ui_from_config(config)
126 126 update_hg_ui_from_hgrc(baseui, repo_path)
127 127
128 128 try:
129 129 return HgWeb(repo_path, name=repo_name, baseui=baseui)
130 130 except mercurial.error.RequirementError as exc:
131 131 raise exceptions.RequirementException(exc)
132 132
133 133
134 134 class GitHandler(object):
135 """
136 Handler for Git operations like push/pull etc
137 """
135 138 def __init__(self, repo_location, repo_name, git_path, update_server_info,
136 139 extras):
137 140 if not os.path.isdir(repo_location):
138 141 raise OSError(repo_location)
139 142 self.content_path = repo_location
140 143 self.repo_name = repo_name
141 144 self.repo_location = repo_location
142 145 self.extras = extras
143 146 self.git_path = git_path
144 147 self.update_server_info = update_server_info
145 148
146 149 def __call__(self, environ, start_response):
147 150 app = webob.exc.HTTPNotFound()
148 151 candidate_paths = (
149 152 self.content_path, os.path.join(self.content_path, '.git'))
150 153
151 154 for content_path in candidate_paths:
152 155 try:
153 156 app = pygrack.GitRepository(
154 157 self.repo_name, content_path, self.git_path,
155 158 self.update_server_info, self.extras)
156 159 break
157 160 except OSError:
158 161 continue
159 162
160 163 return app(environ, start_response)
161 164
162 165
163 166 def create_git_wsgi_app(repo_path, repo_name, config):
164 167 """
165 168 Creates a WSGI application to handle Git requests.
166 169
167 170 :param config: is a dictionary holding the extras.
168 171 """
169 172 git_path = settings.GIT_EXECUTABLE
170 173 update_server_info = config.pop('git_update_server_info')
171 174 app = GitHandler(
172 175 repo_path, repo_name, git_path, update_server_info, config)
173 176
174 177 return app
178
179
180 class GitLFSHandler(object):
181 """
182 Handler for Git LFS operations
183 """
184
185 def __init__(self, repo_location, repo_name, git_path, update_server_info,
186 extras):
187 if not os.path.isdir(repo_location):
188 raise OSError(repo_location)
189 self.content_path = repo_location
190 self.repo_name = repo_name
191 self.repo_location = repo_location
192 self.extras = extras
193 self.git_path = git_path
194 self.update_server_info = update_server_info
195
196 def get_app(self, git_lfs_enabled, git_lfs_store_path):
197 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
198 return app
199
200
201 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
202 git_path = settings.GIT_EXECUTABLE
203 update_server_info = config.pop('git_update_server_info')
204 git_lfs_enabled = config.pop('git_lfs_enabled')
205 git_lfs_store_path = config.pop('git_lfs_store_path')
206 app = GitLFSHandler(
207 repo_path, repo_name, git_path, update_server_info, config)
208
209 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,651 +1,644 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 from urllib2 import URLError
21 21 import logging
22 22 import posixpath as vcspath
23 23 import StringIO
24 24 import subprocess
25 25 import urllib
26 26
27 27 import svn.client
28 28 import svn.core
29 29 import svn.delta
30 30 import svn.diff
31 31 import svn.fs
32 32 import svn.repos
33 33
34 34 from vcsserver import svn_diff
35 35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 # Set of svn compatible version flags.
43 43 # Compare with subversion/svnadmin/svnadmin.c
44 44 svn_compatible_versions = set([
45 45 'pre-1.4-compatible',
46 46 'pre-1.5-compatible',
47 47 'pre-1.6-compatible',
48 48 'pre-1.8-compatible',
49 49 ])
50 50
51 51
52 52 def reraise_safe_exceptions(func):
53 53 """Decorator for converting svn exceptions to something neutral."""
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except Exception as e:
58 58 if not hasattr(e, '_vcs_kind'):
59 59 log.exception("Unhandled exception in hg remote call")
60 60 raise_from_original(exceptions.UnhandledException)
61 61 raise
62 62 return wrapper
63 63
64 64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
76 65 class SubversionFactory(RepoFactory):
77 66
78 67 def _create_repo(self, wire, create, compatible_version):
79 68 path = svn.core.svn_path_canonicalize(wire['path'])
80 69 if create:
81 70 fs_config = {}
82 71 if compatible_version:
83 72 if compatible_version not in svn_compatible_versions:
84 73 raise Exception('Unknown SVN compatible version "{}"'
85 74 .format(compatible_version))
86 75 log.debug('Create SVN repo with compatible version "%s"',
87 76 compatible_version)
88 77 fs_config[compatible_version] = '1'
89 78 repo = svn.repos.create(path, "", "", None, fs_config)
90 79 else:
91 80 repo = svn.repos.open(path)
92 81 return repo
93 82
94 83 def repo(self, wire, create=False, compatible_version=None):
95 84 def create_new_repo():
96 85 return self._create_repo(wire, create, compatible_version)
97 86
98 87 return self._repo(wire, create_new_repo)
99 88
100 89
101 90
102 91 NODE_TYPE_MAPPING = {
103 92 svn.core.svn_node_file: 'file',
104 93 svn.core.svn_node_dir: 'dir',
105 94 }
106 95
107 96
108 97 class SvnRemote(object):
109 98
110 99 def __init__(self, factory, hg_factory=None):
111 100 self._factory = factory
112 101 # TODO: Remove once we do not use internal Mercurial objects anymore
113 102 # for subversion
114 103 self._hg_factory = hg_factory
115 104
116 105 @reraise_safe_exceptions
117 106 def discover_svn_version(self):
118 107 try:
119 108 import svn.core
120 109 svn_ver = svn.core.SVN_VERSION
121 110 except ImportError:
122 111 svn_ver = None
123 112 return svn_ver
124 113
125 114 def check_url(self, url, config_items):
126 115 # this can throw exception if not installed, but we detect this
127 116 from hgsubversion import svnrepo
128 117
129 118 baseui = self._hg_factory._create_config(config_items)
130 119 # uuid function get's only valid UUID from proper repo, else
131 120 # throws exception
132 121 try:
133 122 svnrepo.svnremoterepo(baseui, url).svn.uuid
134 123 except:
135 124 log.debug("Invalid svn url: %s", url)
136 125 raise URLError(
137 126 '"%s" is not a valid Subversion source url.' % (url, ))
138 127 return True
139 128
140 129 def is_path_valid_repository(self, wire, path):
141 130 try:
142 131 svn.repos.open(path)
143 132 except svn.core.SubversionException:
144 133 log.debug("Invalid Subversion path %s", path)
145 134 return False
146 135 return True
147 136
148 137 def lookup(self, wire, revision):
149 138 if revision not in [-1, None, 'HEAD']:
150 139 raise NotImplementedError
151 140 repo = self._factory.repo(wire)
152 141 fs_ptr = svn.repos.fs(repo)
153 142 head = svn.fs.youngest_rev(fs_ptr)
154 143 return head
155 144
156 145 def lookup_interval(self, wire, start_ts, end_ts):
157 146 repo = self._factory.repo(wire)
158 147 fsobj = svn.repos.fs(repo)
159 148 start_rev = None
160 149 end_rev = None
161 150 if start_ts:
162 151 start_ts_svn = apr_time_t(start_ts)
163 152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
164 153 else:
165 154 start_rev = 1
166 155 if end_ts:
167 156 end_ts_svn = apr_time_t(end_ts)
168 157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
169 158 else:
170 159 end_rev = svn.fs.youngest_rev(fsobj)
171 160 return start_rev, end_rev
172 161
173 162 def revision_properties(self, wire, revision):
174 163 repo = self._factory.repo(wire)
175 164 fs_ptr = svn.repos.fs(repo)
176 165 return svn.fs.revision_proplist(fs_ptr, revision)
177 166
178 167 def revision_changes(self, wire, revision):
179 168
180 169 repo = self._factory.repo(wire)
181 170 fsobj = svn.repos.fs(repo)
182 171 rev_root = svn.fs.revision_root(fsobj, revision)
183 172
184 173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
185 174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
186 175 base_dir = ""
187 176 send_deltas = False
188 177 svn.repos.replay2(
189 178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
190 179 editor_ptr, editor_baton, None)
191 180
192 181 added = []
193 182 changed = []
194 183 removed = []
195 184
196 185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
197 186 for path, change in editor.changes.iteritems():
198 187 # TODO: Decide what to do with directory nodes. Subversion can add
199 188 # empty directories.
200 189
201 190 if change.item_kind == svn.core.svn_node_dir:
202 191 continue
203 192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
204 193 added.append(path)
205 194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 195 svn.repos.CHANGE_ACTION_REPLACE]:
207 196 changed.append(path)
208 197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
209 198 removed.append(path)
210 199 else:
211 200 raise NotImplementedError(
212 201 "Action %s not supported on path %s" % (
213 202 change.action, path))
214 203
215 204 changes = {
216 205 'added': added,
217 206 'changed': changed,
218 207 'removed': removed,
219 208 }
220 209 return changes
221 210
222 211 def node_history(self, wire, path, revision, limit):
223 212 cross_copies = False
224 213 repo = self._factory.repo(wire)
225 214 fsobj = svn.repos.fs(repo)
226 215 rev_root = svn.fs.revision_root(fsobj, revision)
227 216
228 217 history_revisions = []
229 218 history = svn.fs.node_history(rev_root, path)
230 219 history = svn.fs.history_prev(history, cross_copies)
231 220 while history:
232 221 __, node_revision = svn.fs.history_location(history)
233 222 history_revisions.append(node_revision)
234 223 if limit and len(history_revisions) >= limit:
235 224 break
236 225 history = svn.fs.history_prev(history, cross_copies)
237 226 return history_revisions
238 227
239 228 def node_properties(self, wire, path, revision):
240 229 repo = self._factory.repo(wire)
241 230 fsobj = svn.repos.fs(repo)
242 231 rev_root = svn.fs.revision_root(fsobj, revision)
243 232 return svn.fs.node_proplist(rev_root, path)
244 233
245 234 def file_annotate(self, wire, path, revision):
246 235 abs_path = 'file://' + urllib.pathname2url(
247 236 vcspath.join(wire['path'], path))
248 237 file_uri = svn.core.svn_path_canonicalize(abs_path)
249 238
250 239 start_rev = svn_opt_revision_value_t(0)
251 240 peg_rev = svn_opt_revision_value_t(revision)
252 241 end_rev = peg_rev
253 242
254 243 annotations = []
255 244
256 245 def receiver(line_no, revision, author, date, line, pool):
257 246 annotations.append((line_no, revision, line))
258 247
259 248 # TODO: Cannot use blame5, missing typemap function in the swig code
260 249 try:
261 250 svn.client.blame2(
262 251 file_uri, peg_rev, start_rev, end_rev,
263 252 receiver, svn.client.create_context())
264 253 except svn.core.SubversionException as exc:
265 254 log.exception("Error during blame operation.")
266 255 raise Exception(
267 256 "Blame not supported or file does not exist at path %s. "
268 257 "Error %s." % (path, exc))
269 258
270 259 return annotations
271 260
272 261 def get_node_type(self, wire, path, rev=None):
273 262 repo = self._factory.repo(wire)
274 263 fs_ptr = svn.repos.fs(repo)
275 264 if rev is None:
276 265 rev = svn.fs.youngest_rev(fs_ptr)
277 266 root = svn.fs.revision_root(fs_ptr, rev)
278 267 node = svn.fs.check_path(root, path)
279 268 return NODE_TYPE_MAPPING.get(node, None)
280 269
281 270 def get_nodes(self, wire, path, revision=None):
282 271 repo = self._factory.repo(wire)
283 272 fsobj = svn.repos.fs(repo)
284 273 if revision is None:
285 274 revision = svn.fs.youngest_rev(fsobj)
286 275 root = svn.fs.revision_root(fsobj, revision)
287 276 entries = svn.fs.dir_entries(root, path)
288 277 result = []
289 278 for entry_path, entry_info in entries.iteritems():
290 279 result.append(
291 280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
292 281 return result
293 282
294 283 def get_file_content(self, wire, path, rev=None):
295 284 repo = self._factory.repo(wire)
296 285 fsobj = svn.repos.fs(repo)
297 286 if rev is None:
298 287 rev = svn.fs.youngest_revision(fsobj)
299 288 root = svn.fs.revision_root(fsobj, rev)
300 289 content = svn.core.Stream(svn.fs.file_contents(root, path))
301 290 return content.read()
302 291
303 292 def get_file_size(self, wire, path, revision=None):
304 293 repo = self._factory.repo(wire)
305 294 fsobj = svn.repos.fs(repo)
306 295 if revision is None:
307 296 revision = svn.fs.youngest_revision(fsobj)
308 297 root = svn.fs.revision_root(fsobj, revision)
309 298 size = svn.fs.file_length(root, path)
310 299 return size
311 300
312 301 def create_repository(self, wire, compatible_version=None):
313 302 log.info('Creating Subversion repository in path "%s"', wire['path'])
314 303 self._factory.repo(wire, create=True,
315 304 compatible_version=compatible_version)
316 305
317 306 def import_remote_repository(self, wire, src_url):
318 307 repo_path = wire['path']
319 308 if not self.is_path_valid_repository(wire, repo_path):
320 309 raise Exception(
321 310 "Path %s is not a valid Subversion repository." % repo_path)
322 311 # TODO: johbo: URL checks ?
323 312 rdump = subprocess.Popen(
324 313 ['svnrdump', 'dump', '--non-interactive', src_url],
325 314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
326 315 load = subprocess.Popen(
327 316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
328 317
329 318 # TODO: johbo: This can be a very long operation, might be better
330 319 # to track some kind of status and provide an api to check if the
331 320 # import is done.
332 321 rdump.wait()
333 322 load.wait()
334 323
335 324 if rdump.returncode != 0:
336 325 errors = rdump.stderr.read()
337 326 log.error('svnrdump dump failed: statuscode %s: message: %s',
338 327 rdump.returncode, errors)
339 328 reason = 'UNKNOWN'
340 329 if 'svnrdump: E230001:' in errors:
341 330 reason = 'INVALID_CERTIFICATE'
342 331 raise Exception(
343 332 'Failed to dump the remote repository from %s.' % src_url,
344 333 reason)
345 334 if load.returncode != 0:
346 335 raise Exception(
347 336 'Failed to load the dump of remote repository from %s.' %
348 337 (src_url, ))
349 338
350 339 def commit(self, wire, message, author, timestamp, updated, removed):
351 340 assert isinstance(message, str)
352 341 assert isinstance(author, str)
353 342
354 343 repo = self._factory.repo(wire)
355 344 fsobj = svn.repos.fs(repo)
356 345
357 346 rev = svn.fs.youngest_rev(fsobj)
358 347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
359 348 txn_root = svn.fs.txn_root(txn)
360 349
361 350 for node in updated:
362 351 TxnNodeProcessor(node, txn_root).update()
363 352 for node in removed:
364 353 TxnNodeProcessor(node, txn_root).remove()
365 354
366 355 commit_id = svn.repos.fs_commit_txn(repo, txn)
367 356
368 357 if timestamp:
369 358 apr_time = apr_time_t(timestamp)
370 359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
371 360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
372 361
373 362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
374 363 return commit_id
375 364
376 365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
377 366 ignore_whitespace=False, context=3):
378 367
379 368 wire.update(cache=False)
380 369 repo = self._factory.repo(wire)
381 370 diff_creator = SvnDiffer(
382 371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
383 372 try:
384 373 return diff_creator.generate_diff()
385 374 except svn.core.SubversionException as e:
386 375 log.exception(
387 376 "Error during diff operation operation. "
388 377 "Path might not exist %s, %s" % (path1, path2))
389 378 return ""
390 379
380 @reraise_safe_exceptions
381 def is_large_file(self, wire, path):
382 return False
383
391 384
392 385 class SvnDiffer(object):
393 386 """
394 387 Utility to create diffs based on difflib and the Subversion api
395 388 """
396 389
397 390 binary_content = False
398 391
399 392 def __init__(
400 393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
401 394 ignore_whitespace, context):
402 395 self.repo = repo
403 396 self.ignore_whitespace = ignore_whitespace
404 397 self.context = context
405 398
406 399 fsobj = svn.repos.fs(repo)
407 400
408 401 self.tgt_rev = tgt_rev
409 402 self.tgt_path = tgt_path or ''
410 403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
411 404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
412 405
413 406 self.src_rev = src_rev
414 407 self.src_path = src_path or self.tgt_path
415 408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
416 409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
417 410
418 411 self._validate()
419 412
420 413 def _validate(self):
421 414 if (self.tgt_kind != svn.core.svn_node_none and
422 415 self.src_kind != svn.core.svn_node_none and
423 416 self.src_kind != self.tgt_kind):
424 417 # TODO: johbo: proper error handling
425 418 raise Exception(
426 419 "Source and target are not compatible for diff generation. "
427 420 "Source type: %s, target type: %s" %
428 421 (self.src_kind, self.tgt_kind))
429 422
430 423 def generate_diff(self):
431 424 buf = StringIO.StringIO()
432 425 if self.tgt_kind == svn.core.svn_node_dir:
433 426 self._generate_dir_diff(buf)
434 427 else:
435 428 self._generate_file_diff(buf)
436 429 return buf.getvalue()
437 430
438 431 def _generate_dir_diff(self, buf):
439 432 editor = DiffChangeEditor()
440 433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
441 434 svn.repos.dir_delta2(
442 435 self.src_root,
443 436 self.src_path,
444 437 '', # src_entry
445 438 self.tgt_root,
446 439 self.tgt_path,
447 440 editor_ptr, editor_baton,
448 441 authorization_callback_allow_all,
449 442 False, # text_deltas
450 443 svn.core.svn_depth_infinity, # depth
451 444 False, # entry_props
452 445 False, # ignore_ancestry
453 446 )
454 447
455 448 for path, __, change in sorted(editor.changes):
456 449 self._generate_node_diff(
457 450 buf, change, path, self.tgt_path, path, self.src_path)
458 451
459 452 def _generate_file_diff(self, buf):
460 453 change = None
461 454 if self.src_kind == svn.core.svn_node_none:
462 455 change = "add"
463 456 elif self.tgt_kind == svn.core.svn_node_none:
464 457 change = "delete"
465 458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
466 459 src_base, src_path = vcspath.split(self.src_path)
467 460 self._generate_node_diff(
468 461 buf, change, tgt_path, tgt_base, src_path, src_base)
469 462
470 463 def _generate_node_diff(
471 464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
472 465
473 466 if self.src_rev == self.tgt_rev and tgt_base == src_base:
474 467 # makes consistent behaviour with git/hg to return empty diff if
475 468 # we compare same revisions
476 469 return
477 470
478 471 tgt_full_path = vcspath.join(tgt_base, tgt_path)
479 472 src_full_path = vcspath.join(src_base, src_path)
480 473
481 474 self.binary_content = False
482 475 mime_type = self._get_mime_type(tgt_full_path)
483 476
484 477 if mime_type and not mime_type.startswith('text'):
485 478 self.binary_content = True
486 479 buf.write("=" * 67 + '\n')
487 480 buf.write("Cannot display: file marked as a binary type.\n")
488 481 buf.write("svn:mime-type = %s\n" % mime_type)
489 482 buf.write("Index: %s\n" % (tgt_path, ))
490 483 buf.write("=" * 67 + '\n')
491 484 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
492 485 'tgt_path': tgt_path})
493 486
494 487 if change == 'add':
495 488 # TODO: johbo: SVN is missing a zero here compared to git
496 489 buf.write("new file mode 10644\n")
497 490
498 491 #TODO(marcink): intro to binary detection of svn patches
499 492 # if self.binary_content:
500 493 # buf.write('GIT binary patch\n')
501 494
502 495 buf.write("--- /dev/null\t(revision 0)\n")
503 496 src_lines = []
504 497 else:
505 498 if change == 'delete':
506 499 buf.write("deleted file mode 10644\n")
507 500
508 501 #TODO(marcink): intro to binary detection of svn patches
509 502 # if self.binary_content:
510 503 # buf.write('GIT binary patch\n')
511 504
512 505 buf.write("--- a/%s\t(revision %s)\n" % (
513 506 src_path, self.src_rev))
514 507 src_lines = self._svn_readlines(self.src_root, src_full_path)
515 508
516 509 if change == 'delete':
517 510 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
518 511 tgt_lines = []
519 512 else:
520 513 buf.write("+++ b/%s\t(revision %s)\n" % (
521 514 tgt_path, self.tgt_rev))
522 515 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
523 516
524 517 if not self.binary_content:
525 518 udiff = svn_diff.unified_diff(
526 519 src_lines, tgt_lines, context=self.context,
527 520 ignore_blank_lines=self.ignore_whitespace,
528 521 ignore_case=False,
529 522 ignore_space_changes=self.ignore_whitespace)
530 523 buf.writelines(udiff)
531 524
532 525 def _get_mime_type(self, path):
533 526 try:
534 527 mime_type = svn.fs.node_prop(
535 528 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
536 529 except svn.core.SubversionException:
537 530 mime_type = svn.fs.node_prop(
538 531 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
539 532 return mime_type
540 533
541 534 def _svn_readlines(self, fs_root, node_path):
542 535 if self.binary_content:
543 536 return []
544 537 node_kind = svn.fs.check_path(fs_root, node_path)
545 538 if node_kind not in (
546 539 svn.core.svn_node_file, svn.core.svn_node_symlink):
547 540 return []
548 541 content = svn.core.Stream(
549 542 svn.fs.file_contents(fs_root, node_path)).read()
550 543 return content.splitlines(True)
551 544
552 545
553 546 class DiffChangeEditor(svn.delta.Editor):
554 547 """
555 548 Records changes between two given revisions
556 549 """
557 550
558 551 def __init__(self):
559 552 self.changes = []
560 553
561 554 def delete_entry(self, path, revision, parent_baton, pool=None):
562 555 self.changes.append((path, None, 'delete'))
563 556
564 557 def add_file(
565 558 self, path, parent_baton, copyfrom_path, copyfrom_revision,
566 559 file_pool=None):
567 560 self.changes.append((path, 'file', 'add'))
568 561
569 562 def open_file(self, path, parent_baton, base_revision, file_pool=None):
570 563 self.changes.append((path, 'file', 'change'))
571 564
572 565
573 566 def authorization_callback_allow_all(root, path, pool):
574 567 return True
575 568
576 569
577 570 class TxnNodeProcessor(object):
578 571 """
579 572 Utility to process the change of one node within a transaction root.
580 573
581 574 It encapsulates the knowledge of how to add, update or remove
582 575 a node for a given transaction root. The purpose is to support the method
583 576 `SvnRemote.commit`.
584 577 """
585 578
586 579 def __init__(self, node, txn_root):
587 580 assert isinstance(node['path'], str)
588 581
589 582 self.node = node
590 583 self.txn_root = txn_root
591 584
592 585 def update(self):
593 586 self._ensure_parent_dirs()
594 587 self._add_file_if_node_does_not_exist()
595 588 self._update_file_content()
596 589 self._update_file_properties()
597 590
598 591 def remove(self):
599 592 svn.fs.delete(self.txn_root, self.node['path'])
600 593 # TODO: Clean up directory if empty
601 594
602 595 def _ensure_parent_dirs(self):
603 596 curdir = vcspath.dirname(self.node['path'])
604 597 dirs_to_create = []
605 598 while not self._svn_path_exists(curdir):
606 599 dirs_to_create.append(curdir)
607 600 curdir = vcspath.dirname(curdir)
608 601
609 602 for curdir in reversed(dirs_to_create):
610 603 log.debug('Creating missing directory "%s"', curdir)
611 604 svn.fs.make_dir(self.txn_root, curdir)
612 605
613 606 def _svn_path_exists(self, path):
614 607 path_status = svn.fs.check_path(self.txn_root, path)
615 608 return path_status != svn.core.svn_node_none
616 609
617 610 def _add_file_if_node_does_not_exist(self):
618 611 kind = svn.fs.check_path(self.txn_root, self.node['path'])
619 612 if kind == svn.core.svn_node_none:
620 613 svn.fs.make_file(self.txn_root, self.node['path'])
621 614
622 615 def _update_file_content(self):
623 616 assert isinstance(self.node['content'], str)
624 617 handler, baton = svn.fs.apply_textdelta(
625 618 self.txn_root, self.node['path'], None, None)
626 619 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
627 620
628 621 def _update_file_properties(self):
629 622 properties = self.node.get('properties', {})
630 623 for key, value in properties.iteritems():
631 624 svn.fs.change_node_prop(
632 625 self.txn_root, self.node['path'], key, value)
633 626
634 627
635 628 def apr_time_t(timestamp):
636 629 """
637 630 Convert a Python timestamp into APR timestamp type apr_time_t
638 631 """
639 632 return timestamp * 1E6
640 633
641 634
642 635 def svn_opt_revision_value_t(num):
643 636 """
644 637 Put `num` into a `svn_opt_revision_value_t` structure.
645 638 """
646 639 value = svn.core.svn_opt_revision_value_t()
647 640 value.number = num
648 641 revision = svn.core.svn_opt_revision_t()
649 642 revision.kind = svn.core.svn_opt_revision_number
650 643 revision.value = value
651 644 return revision
@@ -1,57 +1,72 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 def safe_int(val, default=None):
20 """
21 Returns int() of val if val is not convertable to int use default
22 instead
19 23
20 # TODO: johbo: That's a copy from rhodecode
24 :param val:
25 :param default:
26 """
27
28 try:
29 val = int(val)
30 except (ValueError, TypeError):
31 val = default
32
33 return val
34
35
21 36 def safe_str(unicode_, to_encoding=['utf8']):
22 37 """
23 38 safe str function. Does few trick to turn unicode_ into string
24 39
25 40 In case of UnicodeEncodeError, we try to return it with encoding detected
26 41 by chardet library if it fails fallback to string with errors replaced
27 42
28 43 :param unicode_: unicode to encode
29 44 :rtype: str
30 45 :returns: str object
31 46 """
32 47
33 48 # if it's not basestr cast to str
34 49 if not isinstance(unicode_, basestring):
35 50 return str(unicode_)
36 51
37 52 if isinstance(unicode_, str):
38 53 return unicode_
39 54
40 55 if not isinstance(to_encoding, (list, tuple)):
41 56 to_encoding = [to_encoding]
42 57
43 58 for enc in to_encoding:
44 59 try:
45 60 return unicode_.encode(enc)
46 61 except UnicodeEncodeError:
47 62 pass
48 63
49 64 try:
50 65 import chardet
51 66 encoding = chardet.detect(unicode_)['encoding']
52 67 if encoding is None:
53 68 raise UnicodeEncodeError()
54 69
55 70 return unicode_.encode(encoding)
56 71 except (ImportError, UnicodeEncodeError):
57 72 return unicode_.encode(to_encoding[0], 'replace')
General Comments 0
You need to be logged in to leave comments. Login now