##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r184:102735b3 merge stable
parent child Browse files
Show More
@@ -0,0 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 from app import create_app
@@ -0,0 +1,276 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import re
19 import logging
20 from wsgiref.util import FileWrapper
21
22 import simplejson as json
23 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
28
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
32
33 log = logging.getLogger(__name__)
34
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
39
40 def write_response_error(http_exception, text=None):
41 content_type = 'application/json'
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
44 if text:
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
48 return _exception
49
50
51 class AuthHeaderRequired(object):
52 """
53 Decorator to check if request has proper auth-header
54 """
55
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
58
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
61 auth = request.authorization
62 if not auth:
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
65
66
67 # views
68
69 def lfs_objects(request):
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
74
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
88 auth = request.authorization
89
90 repo = request.matchdict.get('repo')
91
92 data = request.json
93 operation = data.get('operation')
94 if operation not in ('download', 'upload'):
95 log.debug('LFS: unsupported operation:%s', operation)
96 return write_response_error(
97 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
98
99 if 'objects' not in data:
100 log.debug('LFS: missing objects data')
101 return write_response_error(
102 HTTPBadRequest, 'missing objects data')
103
104 log.debug('LFS: handling operation of type: %s', operation)
105
106 objects = []
107 for o in data['objects']:
108 try:
109 oid = o['oid']
110 obj_size = o['size']
111 except KeyError:
112 log.exception('LFS, failed to extract data')
113 return write_response_error(
114 HTTPBadRequest, 'unsupported data in objects')
115
116 obj_data = {'oid': oid}
117
118 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
119 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
120 store = LFSOidStore(
121 oid, repo, store_location=request.registry.git_lfs_store_path)
122 handler = OidHandler(
123 store, repo, auth, oid, obj_size, obj_data,
124 obj_href, obj_verify_href)
125
126 # this verifies also OIDs
127 actions, errors = handler.exec_operation(operation)
128 if errors:
129 log.warning('LFS: got following errors: %s', errors)
130 obj_data['errors'] = errors
131
132 if actions:
133 obj_data['actions'] = actions
134
135 obj_data['size'] = obj_size
136 obj_data['authenticated'] = True
137 objects.append(obj_data)
138
139 result = {'objects': objects, 'transfer': 'basic'}
140 log.debug('LFS Response %s', safe_result(result))
141
142 return result
143
144
145 def lfs_objects_oid_upload(request):
146 repo = request.matchdict.get('repo')
147 oid = request.matchdict.get('oid')
148 store = LFSOidStore(
149 oid, repo, store_location=request.registry.git_lfs_store_path)
150 engine = store.get_engine(mode='wb')
151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152 with engine as f:
153 for chunk in FileWrapper(request.body_file_seekable, blksize=64 * 1024):
154 f.write(chunk)
155
156 return {'upload': 'ok'}
157
158
159 def lfs_objects_oid_download(request):
160 repo = request.matchdict.get('repo')
161 oid = request.matchdict.get('oid')
162
163 store = LFSOidStore(
164 oid, repo, store_location=request.registry.git_lfs_store_path)
165 if not store.has_oid():
166 log.debug('LFS: oid %s does not exists in store', oid)
167 return write_response_error(
168 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
169
170 # TODO(marcink): support range header ?
171 # Range: bytes=0-, `bytes=(\d+)\-.*`
172
173 f = open(store.oid_path, 'rb')
174 response = Response(
175 content_type='application/octet-stream', app_iter=FileIter(f))
176 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
177 return response
178
179
180 def lfs_objects_verify(request):
181 repo = request.matchdict.get('repo')
182
183 data = request.json
184 oid = data.get('oid')
185 size = safe_int(data.get('size'))
186
187 if not (oid and size):
188 return write_response_error(
189 HTTPBadRequest, 'missing oid and size in request data')
190
191 store = LFSOidStore(
192 oid, repo, store_location=request.registry.git_lfs_store_path)
193 if not store.has_oid():
194 log.debug('LFS: oid %s does not exists in store', oid)
195 return write_response_error(
196 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
197
198 store_size = store.size_oid()
199 if store_size != size:
200 msg = 'requested file size mismatch store size:%s requested:%s' % (
201 store_size, size)
202 return write_response_error(
203 HTTPUnprocessableEntity, msg)
204
205 return {'message': {'size': 'ok', 'in_store': 'ok'}}
206
207
208 def lfs_objects_lock(request):
209 return write_response_error(
210 HTTPNotImplemented, 'GIT LFS locking api not supported')
211
212
213 def not_found(request):
214 return write_response_error(
215 HTTPNotFound, 'request path not found')
216
217
218 def lfs_disabled(request):
219 return write_response_error(
220 HTTPNotImplemented, 'GIT LFS disabled for this repo')
221
222
223 def git_lfs_app(config):
224
225 # v1 API deprecation endpoint
226 config.add_route('lfs_objects',
227 '/{repo:.*?[^/]}/info/lfs/objects')
228 config.add_view(lfs_objects, route_name='lfs_objects',
229 request_method='POST', renderer='json')
230
231 # locking API
232 config.add_route('lfs_objects_lock',
233 '/{repo:.*?[^/]}/info/lfs/locks')
234 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
235 request_method=('POST', 'GET'), renderer='json')
236
237 config.add_route('lfs_objects_lock_verify',
238 '/{repo:.*?[^/]}/info/lfs/locks/verify')
239 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
240 request_method=('POST', 'GET'), renderer='json')
241
242 # batch API
243 config.add_route('lfs_objects_batch',
244 '/{repo:.*?[^/]}/info/lfs/objects/batch')
245 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
246 request_method='POST', renderer='json')
247
248 # oid upload/download API
249 config.add_route('lfs_objects_oid',
250 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
251 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
252 request_method='PUT', renderer='json')
253 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
254 request_method='GET', renderer='json')
255
256 # verification API
257 config.add_route('lfs_objects_verify',
258 '/{repo:.*?[^/]}/info/lfs/verify')
259 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
260 request_method='POST', renderer='json')
261
262 # not found handler for API
263 config.add_notfound_view(not_found, renderer='json')
264
265
266 def create_app(git_lfs_enabled, git_lfs_store_path):
267 config = Configurator()
268 if git_lfs_enabled:
269 config.include(git_lfs_app)
270 config.registry.git_lfs_store_path = git_lfs_store_path
271 else:
272 # not found handler for API, reporting disabled LFS support
273 config.add_notfound_view(lfs_disabled, renderer='json')
274
275 app = config.make_wsgi_app()
276 return app
@@ -0,0 +1,166 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import shutil
20 import logging
21 from collections import OrderedDict
22
23 log = logging.getLogger(__name__)
24
25
26 class OidHandler(object):
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
30 self.current_store = store
31 self.repo_name = repo_name
32 self.auth = auth
33 self.oid = oid
34 self.obj_size = obj_size
35 self.obj_data = obj_data
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
38
39 def get_store(self, mode=None):
40 return self.current_store
41
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
45
46 def download(self):
47
48 store = self.get_store()
49 response = None
50 has_errors = None
51
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 has_errors = OrderedDict(
56 error=OrderedDict(
57 code=404,
58 message=err_msg
59 )
60 )
61
62 download_action = OrderedDict(
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
66 if not has_errors:
67 response = OrderedDict(download=download_action)
68 return response, has_errors
69
70 def upload(self, skip_existing=True):
71 """
72 Write upload action for git-lfs server
73 """
74
75 store = self.get_store()
76 response = None
77 has_errors = None
78
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
82 if skip_existing:
83 log.debug('LFS: skipping further action as oid is existing')
84 return response, has_errors
85
86 upload_action = OrderedDict(
87 href=self.obj_href,
88 header=OrderedDict([("Authorization", self.get_auth())])
89 )
90 if not has_errors:
91 response = OrderedDict(upload=upload_action)
92 # if specified in handler, return the verification endpoint
93 if self.obj_verify_href:
94 verify_action = OrderedDict(
95 href=self.obj_verify_href,
96 header=OrderedDict([("Authorization", self.get_auth())])
97 )
98 response['verify'] = verify_action
99 return response, has_errors
100
101 def exec_operation(self, operation, *args, **kwargs):
102 handler = getattr(self, operation)
103 log.debug('LFS: handling request using %s handler', handler)
104 return handler(*args, **kwargs)
105
106
107 class LFSOidStore(object):
108
109 def __init__(self, oid, repo, store_location=None):
110 self.oid = oid
111 self.repo = repo
112 self.store_path = store_location or self.get_default_store()
113 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
114 self.oid_path = os.path.join(self.store_path, oid)
115 self.fd = None
116
117 def get_engine(self, mode):
118 """
119 engine = .get_engine(mode='wb')
120 with engine as f:
121 f.write('...')
122 """
123
124 class StoreEngine(object):
125 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
126 self.mode = mode
127 self.store_path = store_path
128 self.oid_path = oid_path
129 self.tmp_oid_path = tmp_oid_path
130
131 def __enter__(self):
132 if not os.path.isdir(self.store_path):
133 os.makedirs(self.store_path)
134
135 # TODO(marcink): maybe write metadata here with size/oid ?
136 fd = open(self.tmp_oid_path, self.mode)
137 self.fd = fd
138 return fd
139
140 def __exit__(self, exc_type, exc_value, traceback):
141 # close tmp file, and rename to final destination
142 self.fd.close()
143 shutil.move(self.tmp_oid_path, self.oid_path)
144
145 return StoreEngine(
146 mode, self.store_path, self.oid_path, self.tmp_oid_path)
147
148 def get_default_store(self):
149 """
150 Default store, consistent with defaults of Mercurial large files store
151 which is /home/username/.cache/largefiles
152 """
153 user_home = os.path.expanduser("~")
154 return os.path.join(user_home, '.cache', 'lfs-store')
155
156 def has_oid(self):
157 return os.path.exists(os.path.join(self.store_path, self.oid))
158
159 def size_oid(self):
160 size = -1
161
162 if self.has_oid():
163 oid = os.path.join(self.store_path, self.oid)
164 size = os.stat(oid).st_size
165
166 return size
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,237 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
21
22 from vcsserver.git_lfs.app import create_app
23
24
25 @pytest.fixture(scope='function')
26 def git_lfs_app(tmpdir):
27 custom_app = WebObTestApp(create_app(
28 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
29 custom_app._store = str(tmpdir)
30 return custom_app
31
32
33 @pytest.fixture()
34 def http_auth():
35 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
36
37
38 class TestLFSApplication(object):
39
40 def test_app_wrong_path(self, git_lfs_app):
41 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
42
43 def test_app_deprecated_endpoint(self, git_lfs_app):
44 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
45 assert response.status_code == 501
46 assert response.json == {u'message': u'LFS: v1 api not supported'}
47
48 def test_app_lock_verify_api_not_available(self, git_lfs_app):
49 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
50 assert response.status_code == 501
51 assert response.json == {
52 u'message': u'GIT LFS locking api not supported'}
53
54 def test_app_lock_api_not_available(self, git_lfs_app):
55 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
56 assert response.status_code == 501
57 assert response.json == {
58 u'message': u'GIT LFS locking api not supported'}
59
60 def test_app_batch_api_missing_auth(self, git_lfs_app,):
61 git_lfs_app.post_json(
62 '/repo/info/lfs/objects/batch', params={}, status=403)
63
64 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
65 response = git_lfs_app.post_json(
66 '/repo/info/lfs/objects/batch', params={}, status=400,
67 extra_environ=http_auth)
68 assert response.json == {
69 u'message': u'unsupported operation mode: `None`'}
70
71 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
72 response = git_lfs_app.post_json(
73 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
74 status=400, extra_environ=http_auth)
75 assert response.json == {
76 u'message': u'missing objects data'}
77
78 def test_app_batch_api_unsupported_data_in_objects(
79 self, git_lfs_app, http_auth):
80 params = {'operation': 'download',
81 'objects': [{}]}
82 response = git_lfs_app.post_json(
83 '/repo/info/lfs/objects/batch', params=params, status=400,
84 extra_environ=http_auth)
85 assert response.json == {
86 u'message': u'unsupported data in objects'}
87
88 def test_app_batch_api_download_missing_object(
89 self, git_lfs_app, http_auth):
90 params = {'operation': 'download',
91 'objects': [{'oid': '123', 'size': '1024'}]}
92 response = git_lfs_app.post_json(
93 '/repo/info/lfs/objects/batch', params=params,
94 extra_environ=http_auth)
95
96 expected_objects = [
97 {u'authenticated': True,
98 u'errors': {u'error': {
99 u'code': 404,
100 u'message': u'object: 123 does not exist in store'}},
101 u'oid': u'123',
102 u'size': u'1024'}
103 ]
104 assert response.json == {
105 'objects': expected_objects, 'transfer': 'basic'}
106
107 def test_app_batch_api_download(self, git_lfs_app, http_auth):
108 oid = '456'
109 oid_path = os.path.join(git_lfs_app._store, oid)
110 if not os.path.isdir(os.path.dirname(oid_path)):
111 os.makedirs(os.path.dirname(oid_path))
112 with open(oid_path, 'wb') as f:
113 f.write('OID_CONTENT')
114
115 params = {'operation': 'download',
116 'objects': [{'oid': oid, 'size': '1024'}]}
117 response = git_lfs_app.post_json(
118 '/repo/info/lfs/objects/batch', params=params,
119 extra_environ=http_auth)
120
121 expected_objects = [
122 {u'authenticated': True,
123 u'actions': {
124 u'download': {
125 u'header': {u'Authorization': u'Basic XXXXX'},
126 u'href': u'http://localhost/repo/info/lfs/objects/456'},
127 },
128 u'oid': u'456',
129 u'size': u'1024'}
130 ]
131 assert response.json == {
132 'objects': expected_objects, 'transfer': 'basic'}
133
134 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
135 params = {'operation': 'upload',
136 'objects': [{'oid': '123', 'size': '1024'}]}
137 response = git_lfs_app.post_json(
138 '/repo/info/lfs/objects/batch', params=params,
139 extra_environ=http_auth)
140 expected_objects = [
141 {u'authenticated': True,
142 u'actions': {
143 u'upload': {
144 u'header': {u'Authorization': u'Basic XXXXX'},
145 u'href': u'http://localhost/repo/info/lfs/objects/123'},
146 u'verify': {
147 u'header': {u'Authorization': u'Basic XXXXX'},
148 u'href': u'http://localhost/repo/info/lfs/verify'}
149 },
150 u'oid': u'123',
151 u'size': u'1024'}
152 ]
153 assert response.json == {
154 'objects': expected_objects, 'transfer': 'basic'}
155
156 def test_app_verify_api_missing_data(self, git_lfs_app):
157 params = {'oid': 'missing',}
158 response = git_lfs_app.post_json(
159 '/repo/info/lfs/verify', params=params,
160 status=400)
161
162 assert response.json == {
163 u'message': u'missing oid and size in request data'}
164
165 def test_app_verify_api_missing_obj(self, git_lfs_app):
166 params = {'oid': 'missing', 'size': '1024'}
167 response = git_lfs_app.post_json(
168 '/repo/info/lfs/verify', params=params,
169 status=404)
170
171 assert response.json == {
172 u'message': u'oid `missing` does not exists in store'}
173
174 def test_app_verify_api_size_mismatch(self, git_lfs_app):
175 oid = 'existing'
176 oid_path = os.path.join(git_lfs_app._store, oid)
177 if not os.path.isdir(os.path.dirname(oid_path)):
178 os.makedirs(os.path.dirname(oid_path))
179 with open(oid_path, 'wb') as f:
180 f.write('OID_CONTENT')
181
182 params = {'oid': oid, 'size': '1024'}
183 response = git_lfs_app.post_json(
184 '/repo/info/lfs/verify', params=params, status=422)
185
186 assert response.json == {
187 u'message': u'requested file size mismatch '
188 u'store size:11 requested:1024'}
189
190 def test_app_verify_api(self, git_lfs_app):
191 oid = 'existing'
192 oid_path = os.path.join(git_lfs_app._store, oid)
193 if not os.path.isdir(os.path.dirname(oid_path)):
194 os.makedirs(os.path.dirname(oid_path))
195 with open(oid_path, 'wb') as f:
196 f.write('OID_CONTENT')
197
198 params = {'oid': oid, 'size': 11}
199 response = git_lfs_app.post_json(
200 '/repo/info/lfs/verify', params=params)
201
202 assert response.json == {
203 u'message': {u'size': u'ok', u'in_store': u'ok'}}
204
205 def test_app_download_api_oid_not_existing(self, git_lfs_app):
206 oid = 'missing'
207
208 response = git_lfs_app.get(
209 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
210
211 assert response.json == {
212 u'message': u'requested file with oid `missing` not found in store'}
213
214 def test_app_download_api(self, git_lfs_app):
215 oid = 'existing'
216 oid_path = os.path.join(git_lfs_app._store, oid)
217 if not os.path.isdir(os.path.dirname(oid_path)):
218 os.makedirs(os.path.dirname(oid_path))
219 with open(oid_path, 'wb') as f:
220 f.write('OID_CONTENT')
221
222 response = git_lfs_app.get(
223 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
224 assert response
225
226 def test_app_upload(self, git_lfs_app):
227 oid = 'uploaded'
228
229 response = git_lfs_app.put(
230 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
231
232 assert response.json == {u'upload': u'ok'}
233
234 # verify that we actually wrote that OID
235 oid_path = os.path.join(git_lfs_app._store, oid)
236 assert os.path.isfile(oid_path)
237 assert 'CONTENT' == open(oid_path).read()
@@ -0,0 +1,123 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import pytest
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21
22
23 @pytest.fixture()
24 def lfs_store(tmpdir):
25 repo = 'test'
26 oid = '123456789'
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 return store
29
30
31 @pytest.fixture()
32 def oid_handler(lfs_store):
33 store = lfs_store
34 repo = store.repo
35 oid = store.oid
36
37 oid_handler = OidHandler(
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 oid=oid,
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_verify_href='http://localhost/verify')
42 return oid_handler
43
44
45 class TestOidHandler(object):
46
47 @pytest.mark.parametrize('exec_action', [
48 'download',
49 'upload',
50 ])
51 def test_exec_action(self, exec_action, oid_handler):
52 handler = oid_handler.exec_operation(exec_action)
53 assert handler
54
55 def test_exec_action_undefined(self, oid_handler):
56 with pytest.raises(AttributeError):
57 oid_handler.exec_operation('wrong')
58
59 def test_download_oid_not_existing(self, oid_handler):
60 response, has_errors = oid_handler.exec_operation('download')
61
62 assert response is None
63 assert has_errors['error'] == {
64 'code': 404,
65 'message': 'object: 123456789 does not exist in store'}
66
67 def test_download_oid(self, oid_handler):
68 store = oid_handler.get_store()
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 os.makedirs(os.path.dirname(store.oid_path))
71
72 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
74
75 response, has_errors = oid_handler.exec_operation('download')
76
77 assert has_errors is None
78 assert response['download'] == {
79 'header': {'Authorization': 'basic xxxx'},
80 'href': 'http://localhost/handle_oid'
81 }
82
83 def test_upload_oid_that_exists(self, oid_handler):
84 store = oid_handler.get_store()
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 os.makedirs(os.path.dirname(store.oid_path))
87
88 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
90
91 response, has_errors = oid_handler.exec_operation('upload')
92 assert has_errors is None
93 assert response is None
94
95 def test_upload_oid(self, oid_handler):
96 response, has_errors = oid_handler.exec_operation('upload')
97 assert has_errors is None
98 assert response['upload'] == {
99 'header': {'Authorization': 'basic xxxx'},
100 'href': 'http://localhost/handle_oid'
101 }
102
103
104 class TestLFSStore(object):
105 def test_write_oid(self, lfs_store):
106 oid_location = lfs_store.oid_path
107
108 assert not os.path.isfile(oid_location)
109
110 engine = lfs_store.get_engine(mode='wb')
111 with engine as f:
112 f.write('CONTENT')
113
114 assert os.path.isfile(oid_location)
115
116 def test_detect_has_oid(self, lfs_store):
117
118 assert lfs_store.has_oid() is False
119 engine = lfs_store.get_engine(mode='wb')
120 with engine as f:
121 f.write('CONTENT')
122
123 assert lfs_store.has_oid() is True No newline at end of file
@@ -0,0 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
18 from functools import wraps
19
20
21 def get_cython_compat_decorator(wrapper, func):
22 """
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
25
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
28 """
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
34
35
36 def safe_result(result):
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
39
40 try:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
48 return result
49
50 return clean_copy
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.6.1
2 current_version = 4.7.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.6.1
13 version = 4.7.0
16
14
@@ -1,111 +1,109 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6
6
7 [server:main]
7 [server:main]
8 ## COMMON ##
8 ## COMMON ##
9 host = 127.0.0.1
9 host = 127.0.0.1
10 port = 9900
10 port = 9900
11
11
12
12
13 ##########################
13 ##########################
14 ## GUNICORN WSGI SERVER ##
14 ## GUNICORN WSGI SERVER ##
15 ##########################
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. You must set `instance_id = *`
18 ## Sets the number of process workers. Recommended
19 ## when this option is set to more than one worker, recommended
20 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
21 ## The `instance_id = *` must be set in the [app:main] section below
22 workers = 2
20 workers = 2
23 ## process name
21 ## process name
24 proc_name = rhodecode_vcsserver
22 proc_name = rhodecode_vcsserver
25 ## type of worker class, one of sync, gevent
23 ## type of worker class, one of sync, gevent
26 ## recommended for bigger setup is using of of other than sync one
24 ## recommended for bigger setup is using of of other than sync one
27 worker_class = sync
25 worker_class = sync
28 ## The maximum number of simultaneous clients. Valid only for Gevent
26 ## The maximum number of simultaneous clients. Valid only for Gevent
29 #worker_connections = 10
27 #worker_connections = 10
30 ## max number of requests that worker will handle before being gracefully
28 ## max number of requests that worker will handle before being gracefully
31 ## restarted, could prevent memory leaks
29 ## restarted, could prevent memory leaks
32 max_requests = 1000
30 max_requests = 1000
33 max_requests_jitter = 30
31 max_requests_jitter = 30
34 ## amount of time a worker can spend with handling a request before it
32 ## amount of time a worker can spend with handling a request before it
35 ## gets killed and restarted. Set to 6hrs
33 ## gets killed and restarted. Set to 6hrs
36 timeout = 21600
34 timeout = 21600
37
35
38
36
39 [app:main]
37 [app:main]
40 use = egg:rhodecode-vcsserver
38 use = egg:rhodecode-vcsserver
41
39
42 pyramid.default_locale_name = en
40 pyramid.default_locale_name = en
43 pyramid.includes =
41 pyramid.includes =
44
42
45 ## default locale used by VCS systems
43 ## default locale used by VCS systems
46 locale = en_US.UTF-8
44 locale = en_US.UTF-8
47
45
48 # cache regions, please don't change
46 # cache regions, please don't change
49 beaker.cache.regions = repo_object
47 beaker.cache.regions = repo_object
50 beaker.cache.repo_object.type = memorylru
48 beaker.cache.repo_object.type = memorylru
51 beaker.cache.repo_object.max_items = 100
49 beaker.cache.repo_object.max_items = 100
52 # cache auto-expires after N seconds
50 # cache auto-expires after N seconds
53 beaker.cache.repo_object.expire = 300
51 beaker.cache.repo_object.expire = 300
54 beaker.cache.repo_object.enabled = true
52 beaker.cache.repo_object.enabled = true
55
53
56
54
57 ################################
55 ################################
58 ### LOGGING CONFIGURATION ####
56 ### LOGGING CONFIGURATION ####
59 ################################
57 ################################
60 [loggers]
58 [loggers]
61 keys = root, vcsserver, pyro4, beaker
59 keys = root, vcsserver, pyro4, beaker
62
60
63 [handlers]
61 [handlers]
64 keys = console
62 keys = console
65
63
66 [formatters]
64 [formatters]
67 keys = generic
65 keys = generic
68
66
69 #############
67 #############
70 ## LOGGERS ##
68 ## LOGGERS ##
71 #############
69 #############
72 [logger_root]
70 [logger_root]
73 level = NOTSET
71 level = NOTSET
74 handlers = console
72 handlers = console
75
73
76 [logger_vcsserver]
74 [logger_vcsserver]
77 level = DEBUG
75 level = DEBUG
78 handlers =
76 handlers =
79 qualname = vcsserver
77 qualname = vcsserver
80 propagate = 1
78 propagate = 1
81
79
82 [logger_beaker]
80 [logger_beaker]
83 level = DEBUG
81 level = DEBUG
84 handlers =
82 handlers =
85 qualname = beaker
83 qualname = beaker
86 propagate = 1
84 propagate = 1
87
85
88 [logger_pyro4]
86 [logger_pyro4]
89 level = DEBUG
87 level = DEBUG
90 handlers =
88 handlers =
91 qualname = Pyro4
89 qualname = Pyro4
92 propagate = 1
90 propagate = 1
93
91
94
92
95 ##############
93 ##############
96 ## HANDLERS ##
94 ## HANDLERS ##
97 ##############
95 ##############
98
96
99 [handler_console]
97 [handler_console]
100 class = StreamHandler
98 class = StreamHandler
101 args = (sys.stderr,)
99 args = (sys.stderr,)
102 level = DEBUG
100 level = DEBUG
103 formatter = generic
101 formatter = generic
104
102
105 ################
103 ################
106 ## FORMATTERS ##
104 ## FORMATTERS ##
107 ################
105 ################
108
106
109 [formatter_generic]
107 [formatter_generic]
110 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
108 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
111 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
109 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,153 +1,154 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.9.3";
19 name = "git-2.9.3";
20 src = pkgs.fetchurl {
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
23 };
23 };
24
24
25 });
25 });
26
26
27 # Override subversion derivation to
27 # Override subversion derivation to
28 # - activate python bindings
28 # - activate python bindings
29 subversion = let
29 subversion = let
30 subversionWithPython = super.subversion.override {
30 subversionWithPython = super.subversion.override {
31 httpSupport = true;
31 httpSupport = true;
32 pythonBindings = true;
32 pythonBindings = true;
33 python = self.python27Packages.python;
33 python = self.python27Packages.python;
34 };
34 };
35
35
36 in
36 in
37
37
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
39 patches = (oldAttrs.patches or []) ++
39 patches = (oldAttrs.patches or []) ++
40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
41 # johbo: "import svn.client" fails on darwin currently.
41 # johbo: "import svn.client" fails on darwin currently.
42 ./pkgs/subversion-1.9.4-darwin.patch
42 ./pkgs/subversion-1.9.4-darwin.patch
43 ];
43 ];
44 });
44 });
45
45
46 });
46 });
47
47
48 inherit (pkgs.lib) fix extends;
48 inherit (pkgs.lib) fix extends;
49 basePythonPackages = with builtins; if isAttrs pythonPackages
49 basePythonPackages = with builtins; if isAttrs pythonPackages
50 then pythonPackages
50 then pythonPackages
51 else getAttr pythonPackages pkgs;
51 else getAttr pythonPackages pkgs;
52
52
53 elem = builtins.elem;
53 elem = builtins.elem;
54 basename = path: with pkgs.lib; last (splitString "/" path);
54 basename = path: with pkgs.lib; last (splitString "/" path);
55 startsWith = prefix: full: let
55 startsWith = prefix: full: let
56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
57 in actualPrefix == prefix;
57 in actualPrefix == prefix;
58
58
59 src-filter = path: type: with pkgs.lib;
59 src-filter = path: type: with pkgs.lib;
60 let
60 let
61 ext = last (splitString "." path);
61 ext = last (splitString "." path);
62 in
62 in
63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
64 "node_modules" "build" "data" "tmp"] &&
64 "node_modules" "build" "data" "tmp"] &&
65 !elem ext ["egg-info" "pyc"] &&
65 !elem ext ["egg-info" "pyc"] &&
66 !startsWith "result" path;
66 !startsWith "result" path;
67
67
68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
69
69
70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
71 inherit self;
71 inherit self;
72 }) // (scopedImport {
72 }) // (scopedImport {
73 self = self;
73 self = self;
74 super = basePythonPackages;
74 super = basePythonPackages;
75 inherit pkgs;
75 inherit pkgs;
76 inherit (pkgs) fetchurl fetchgit;
76 inherit (pkgs) fetchurl fetchgit;
77 } ./pkgs/python-packages.nix);
77 } ./pkgs/python-packages.nix);
78
78
79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
80 inherit basePythonPackages pkgs;
80 inherit basePythonPackages pkgs;
81 };
81 };
82
82
83 version = builtins.readFile ./vcsserver/VERSION;
83 version = builtins.readFile ./vcsserver/VERSION;
84
84
85 pythonLocalOverrides = self: super: {
85 pythonLocalOverrides = self: super: {
86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
87 inherit doCheck version;
87 inherit doCheck version;
88
88
89 name = "rhodecode-vcsserver-${version}";
89 name = "rhodecode-vcsserver-${version}";
90 releaseName = "RhodeCodeVCSServer-${version}";
90 releaseName = "RhodeCodeVCSServer-${version}";
91 src = rhodecode-vcsserver-src;
91 src = rhodecode-vcsserver-src;
92 dontStrip = true; # prevent strip, we don't need it.
92
93
93 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
94 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
94 pkgs.git
95 pkgs.git
95 pkgs.subversion
96 pkgs.subversion
96 ]);
97 ]);
97
98
98 # TODO: johbo: Make a nicer way to expose the parts. Maybe
99 # TODO: johbo: Make a nicer way to expose the parts. Maybe
99 # pkgs/default.nix?
100 # pkgs/default.nix?
100 passthru = {
101 passthru = {
101 pythonPackages = self;
102 pythonPackages = self;
102 };
103 };
103
104
104 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 preCheck = ''
106 preCheck = ''
106 export PATH="$out/bin:$PATH"
107 export PATH="$out/bin:$PATH"
107 '';
108 '';
108
109
109 # put custom attrs here
110 # put custom attrs here
110 checkPhase = ''
111 checkPhase = ''
111 runHook preCheck
112 runHook preCheck
112 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
113 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
113 runHook postCheck
114 runHook postCheck
114 '';
115 '';
115
116
116 postInstall = ''
117 postInstall = ''
117 echo "Writing meta information for rccontrol to nix-support/rccontrol"
118 echo "Writing meta information for rccontrol to nix-support/rccontrol"
118 mkdir -p $out/nix-support/rccontrol
119 mkdir -p $out/nix-support/rccontrol
119 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
120 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
120 echo "DONE: Meta information for rccontrol written"
121 echo "DONE: Meta information for rccontrol written"
121
122
122 ln -s ${self.pyramid}/bin/* $out/bin/
123 ln -s ${self.pyramid}/bin/* $out/bin/
123 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
124 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
124
125
125 # Symlink version control utilities
126 # Symlink version control utilities
126 #
127 #
127 # We ensure that always the correct version is available as a symlink.
128 # We ensure that always the correct version is available as a symlink.
128 # So that users calling them via the profile path will always use the
129 # So that users calling them via the profile path will always use the
129 # correct version.
130 # correct version.
130 ln -s ${pkgs.git}/bin/git $out/bin
131 ln -s ${pkgs.git}/bin/git $out/bin
131 ln -s ${self.mercurial}/bin/hg $out/bin
132 ln -s ${self.mercurial}/bin/hg $out/bin
132 ln -s ${pkgs.subversion}/bin/svn* $out/bin
133 ln -s ${pkgs.subversion}/bin/svn* $out/bin
133
134
134 for file in $out/bin/*; do
135 for file in $out/bin/*; do
135 wrapProgram $file \
136 wrapProgram $file \
136 --set PATH $PATH \
137 --set PATH $PATH \
137 --set PYTHONPATH $PYTHONPATH \
138 --set PYTHONPATH $PYTHONPATH \
138 --set PYTHONHASHSEED random
139 --set PYTHONHASHSEED random
139 done
140 done
140 '';
141 '';
141
142
142 });
143 });
143 };
144 };
144
145
145 # Apply all overrides and fix the final package set
146 # Apply all overrides and fix the final package set
146 myPythonPackages =
147 myPythonPackages =
147 (fix
148 (fix
148 (extends pythonExternalOverrides
149 (extends pythonExternalOverrides
149 (extends pythonLocalOverrides
150 (extends pythonLocalOverrides
150 (extends pythonOverrides
151 (extends pythonOverrides
151 pythonGeneratedPackages))));
152 pythonGeneratedPackages))));
152
153
153 in myPythonPackages.rhodecode-vcsserver
154 in myPythonPackages.rhodecode-vcsserver
@@ -1,812 +1,812 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.7.0";
6 name = "Beaker-1.7.0";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
12 md5 = "386be3f7fe427358881eee4622b428b3";
12 md5 = "386be3f7fe427358881eee4622b428b3";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.8";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.6";
32 name = "Mako-1.0.6";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-0.23";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
55 };
56 };
56 };
57 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
68 };
68 };
69 };
69 };
70 Pyro4 = super.buildPythonPackage {
70 Pyro4 = super.buildPythonPackage {
71 name = "Pyro4-4.41";
71 name = "Pyro4-4.41";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [serpent];
74 propagatedBuildInputs = with self; [serpent];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
81 };
81 };
82 };
82 };
83 WebOb = super.buildPythonPackage {
83 WebOb = super.buildPythonPackage {
84 name = "WebOb-1.3.1";
84 name = "WebOb-1.3.1";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [];
87 propagatedBuildInputs = with self; [];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
90 md5 = "20918251c5726956ba8fef22d1556177";
90 md5 = "20918251c5726956ba8fef22d1556177";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 WebTest = super.buildPythonPackage {
96 WebTest = super.buildPythonPackage {
97 name = "WebTest-1.4.3";
97 name = "WebTest-1.4.3";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [WebOb];
100 propagatedBuildInputs = with self; [WebOb];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
103 md5 = "631ce728bed92c681a4020a36adbc353";
103 md5 = "631ce728bed92c681a4020a36adbc353";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
107 };
107 };
108 };
108 };
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 name = "backports.shutil-get-terminal-size-1.0.0";
110 name = "backports.shutil-get-terminal-size-1.0.0";
111 buildInputs = with self; [];
111 buildInputs = with self; [];
112 doCheck = false;
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 md5 = "03267762480bd86b50580dc19dff3c66";
116 md5 = "03267762480bd86b50580dc19dff3c66";
117 };
117 };
118 meta = {
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
119 license = [ pkgs.lib.licenses.mit ];
120 };
120 };
121 };
121 };
122 configobj = super.buildPythonPackage {
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
124 buildInputs = with self; [];
125 doCheck = false;
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
130 };
131 meta = {
131 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
133 };
134 };
134 };
135 cov-core = super.buildPythonPackage {
135 cov-core = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
136 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
137 buildInputs = with self; [];
138 doCheck = false;
138 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
139 propagatedBuildInputs = with self; [coverage];
140 src = fetchurl {
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 };
143 };
144 meta = {
144 meta = {
145 license = [ pkgs.lib.licenses.mit ];
145 license = [ pkgs.lib.licenses.mit ];
146 };
146 };
147 };
147 };
148 coverage = super.buildPythonPackage {
148 coverage = super.buildPythonPackage {
149 name = "coverage-3.7.1";
149 name = "coverage-3.7.1";
150 buildInputs = with self; [];
150 buildInputs = with self; [];
151 doCheck = false;
151 doCheck = false;
152 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
153 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 };
156 };
157 meta = {
157 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
159 };
160 };
160 };
161 decorator = super.buildPythonPackage {
161 decorator = super.buildPythonPackage {
162 name = "decorator-4.0.10";
162 name = "decorator-4.0.11";
163 buildInputs = with self; [];
163 buildInputs = with self; [];
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = with self; [];
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
168 md5 = "434b57fdc3230c500716c5aff8896100";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
169 };
169 };
170 meta = {
170 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 };
172 };
173 };
173 };
174 dulwich = super.buildPythonPackage {
174 dulwich = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
175 name = "dulwich-0.13.0";
176 buildInputs = with self; [];
176 buildInputs = with self; [];
177 doCheck = false;
177 doCheck = false;
178 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
179 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 };
182 };
183 meta = {
183 meta = {
184 license = [ pkgs.lib.licenses.gpl2Plus ];
184 license = [ pkgs.lib.licenses.gpl2Plus ];
185 };
185 };
186 };
186 };
187 enum34 = super.buildPythonPackage {
187 enum34 = super.buildPythonPackage {
188 name = "enum34-1.1.6";
188 name = "enum34-1.1.6";
189 buildInputs = with self; [];
189 buildInputs = with self; [];
190 doCheck = false;
190 doCheck = false;
191 propagatedBuildInputs = with self; [];
191 propagatedBuildInputs = with self; [];
192 src = fetchurl {
192 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 };
195 };
196 meta = {
196 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
198 };
199 };
199 };
200 gevent = super.buildPythonPackage {
200 gevent = super.buildPythonPackage {
201 name = "gevent-1.1.2";
201 name = "gevent-1.1.2";
202 buildInputs = with self; [];
202 buildInputs = with self; [];
203 doCheck = false;
203 doCheck = false;
204 propagatedBuildInputs = with self; [greenlet];
204 propagatedBuildInputs = with self; [greenlet];
205 src = fetchurl {
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
206 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
207 md5 = "bb32a2f852a4997138014d5007215c6e";
207 md5 = "bb32a2f852a4997138014d5007215c6e";
208 };
208 };
209 meta = {
209 meta = {
210 license = [ pkgs.lib.licenses.mit ];
210 license = [ pkgs.lib.licenses.mit ];
211 };
211 };
212 };
212 };
213 gprof2dot = super.buildPythonPackage {
213 gprof2dot = super.buildPythonPackage {
214 name = "gprof2dot-2016.10.13";
214 name = "gprof2dot-2016.10.13";
215 buildInputs = with self; [];
215 buildInputs = with self; [];
216 doCheck = false;
216 doCheck = false;
217 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = with self; [];
218 src = fetchurl {
218 src = fetchurl {
219 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
219 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
220 md5 = "0125401f15fd2afe1df686a76c64a4fd";
220 md5 = "0125401f15fd2afe1df686a76c64a4fd";
221 };
221 };
222 meta = {
222 meta = {
223 license = [ { fullName = "LGPL"; } ];
223 license = [ { fullName = "LGPL"; } ];
224 };
224 };
225 };
225 };
226 greenlet = super.buildPythonPackage {
226 greenlet = super.buildPythonPackage {
227 name = "greenlet-0.4.10";
227 name = "greenlet-0.4.10";
228 buildInputs = with self; [];
228 buildInputs = with self; [];
229 doCheck = false;
229 doCheck = false;
230 propagatedBuildInputs = with self; [];
230 propagatedBuildInputs = with self; [];
231 src = fetchurl {
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
232 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
233 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
233 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
234 };
234 };
235 meta = {
235 meta = {
236 license = [ pkgs.lib.licenses.mit ];
236 license = [ pkgs.lib.licenses.mit ];
237 };
237 };
238 };
238 };
239 gunicorn = super.buildPythonPackage {
239 gunicorn = super.buildPythonPackage {
240 name = "gunicorn-19.6.0";
240 name = "gunicorn-19.6.0";
241 buildInputs = with self; [];
241 buildInputs = with self; [];
242 doCheck = false;
242 doCheck = false;
243 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
245 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
246 md5 = "338e5e8a83ea0f0625f768dba4597530";
246 md5 = "338e5e8a83ea0f0625f768dba4597530";
247 };
247 };
248 meta = {
248 meta = {
249 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
250 };
250 };
251 };
251 };
252 hgsubversion = super.buildPythonPackage {
252 hgsubversion = super.buildPythonPackage {
253 name = "hgsubversion-1.8.6";
253 name = "hgsubversion-1.8.6";
254 buildInputs = with self; [];
254 buildInputs = with self; [];
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = with self; [mercurial subvertpy];
256 propagatedBuildInputs = with self; [mercurial subvertpy];
257 src = fetchurl {
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
260 };
260 };
261 meta = {
261 meta = {
262 license = [ pkgs.lib.licenses.gpl1 ];
262 license = [ pkgs.lib.licenses.gpl1 ];
263 };
263 };
264 };
264 };
265 infrae.cache = super.buildPythonPackage {
265 infrae.cache = super.buildPythonPackage {
266 name = "infrae.cache-1.0.1";
266 name = "infrae.cache-1.0.1";
267 buildInputs = with self; [];
267 buildInputs = with self; [];
268 doCheck = false;
268 doCheck = false;
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
270 src = fetchurl {
270 src = fetchurl {
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
273 };
273 };
274 meta = {
274 meta = {
275 license = [ pkgs.lib.licenses.zpt21 ];
275 license = [ pkgs.lib.licenses.zpt21 ];
276 };
276 };
277 };
277 };
278 ipdb = super.buildPythonPackage {
278 ipdb = super.buildPythonPackage {
279 name = "ipdb-0.10.1";
279 name = "ipdb-0.10.1";
280 buildInputs = with self; [];
280 buildInputs = with self; [];
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = with self; [ipython setuptools];
282 propagatedBuildInputs = with self; [ipython setuptools];
283 src = fetchurl {
283 src = fetchurl {
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
286 };
286 };
287 meta = {
287 meta = {
288 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
289 };
290 };
290 };
291 ipython = super.buildPythonPackage {
291 ipython = super.buildPythonPackage {
292 name = "ipython-5.1.0";
292 name = "ipython-5.1.0";
293 buildInputs = with self; [];
293 buildInputs = with self; [];
294 doCheck = false;
294 doCheck = false;
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
296 src = fetchurl {
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.bsdOriginal ];
301 license = [ pkgs.lib.licenses.bsdOriginal ];
302 };
302 };
303 };
303 };
304 ipython-genutils = super.buildPythonPackage {
304 ipython-genutils = super.buildPythonPackage {
305 name = "ipython-genutils-0.1.0";
305 name = "ipython-genutils-0.1.0";
306 buildInputs = with self; [];
306 buildInputs = with self; [];
307 doCheck = false;
307 doCheck = false;
308 propagatedBuildInputs = with self; [];
308 propagatedBuildInputs = with self; [];
309 src = fetchurl {
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
310 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
311 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
311 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
312 };
312 };
313 meta = {
313 meta = {
314 license = [ pkgs.lib.licenses.bsdOriginal ];
314 license = [ pkgs.lib.licenses.bsdOriginal ];
315 };
315 };
316 };
316 };
317 mercurial = super.buildPythonPackage {
317 mercurial = super.buildPythonPackage {
318 name = "mercurial-4.0.2";
318 name = "mercurial-4.1.2";
319 buildInputs = with self; [];
319 buildInputs = with self; [];
320 doCheck = false;
320 doCheck = false;
321 propagatedBuildInputs = with self; [];
321 propagatedBuildInputs = with self; [];
322 src = fetchurl {
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/85/1b/0296aacd697228974a473d2508f013532f987ed6b1bacfe5abd6d5be6332/mercurial-4.0.2.tar.gz";
323 url = "https://pypi.python.org/packages/88/c1/f0501fd67f5e69346da41ee0bd7b2619ce4bbc9854bb645074c418b9941f/mercurial-4.1.2.tar.gz";
324 md5 = "fa72a08e2723e4fa2a21c4e66437f3fa";
324 md5 = "934c99808bdc8385e074b902d59b0d93";
325 };
325 };
326 meta = {
326 meta = {
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
328 };
328 };
329 };
329 };
330 mock = super.buildPythonPackage {
330 mock = super.buildPythonPackage {
331 name = "mock-1.0.1";
331 name = "mock-1.0.1";
332 buildInputs = with self; [];
332 buildInputs = with self; [];
333 doCheck = false;
333 doCheck = false;
334 propagatedBuildInputs = with self; [];
334 propagatedBuildInputs = with self; [];
335 src = fetchurl {
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
338 };
338 };
339 meta = {
339 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 };
341 };
342 };
342 };
343 msgpack-python = super.buildPythonPackage {
343 msgpack-python = super.buildPythonPackage {
344 name = "msgpack-python-0.4.8";
344 name = "msgpack-python-0.4.8";
345 buildInputs = with self; [];
345 buildInputs = with self; [];
346 doCheck = false;
346 doCheck = false;
347 propagatedBuildInputs = with self; [];
347 propagatedBuildInputs = with self; [];
348 src = fetchurl {
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
351 };
351 };
352 meta = {
352 meta = {
353 license = [ pkgs.lib.licenses.asl20 ];
353 license = [ pkgs.lib.licenses.asl20 ];
354 };
354 };
355 };
355 };
356 pathlib2 = super.buildPythonPackage {
356 pathlib2 = super.buildPythonPackage {
357 name = "pathlib2-2.1.0";
357 name = "pathlib2-2.1.0";
358 buildInputs = with self; [];
358 buildInputs = with self; [];
359 doCheck = false;
359 doCheck = false;
360 propagatedBuildInputs = with self; [six];
360 propagatedBuildInputs = with self; [six];
361 src = fetchurl {
361 src = fetchurl {
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
364 };
364 };
365 meta = {
365 meta = {
366 license = [ pkgs.lib.licenses.mit ];
366 license = [ pkgs.lib.licenses.mit ];
367 };
367 };
368 };
368 };
369 pexpect = super.buildPythonPackage {
369 pexpect = super.buildPythonPackage {
370 name = "pexpect-4.2.1";
370 name = "pexpect-4.2.1";
371 buildInputs = with self; [];
371 buildInputs = with self; [];
372 doCheck = false;
372 doCheck = false;
373 propagatedBuildInputs = with self; [ptyprocess];
373 propagatedBuildInputs = with self; [ptyprocess];
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
380 };
380 };
381 };
381 };
382 pickleshare = super.buildPythonPackage {
382 pickleshare = super.buildPythonPackage {
383 name = "pickleshare-0.7.4";
383 name = "pickleshare-0.7.4";
384 buildInputs = with self; [];
384 buildInputs = with self; [];
385 doCheck = false;
385 doCheck = false;
386 propagatedBuildInputs = with self; [pathlib2];
386 propagatedBuildInputs = with self; [pathlib2];
387 src = fetchurl {
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
390 };
390 };
391 meta = {
391 meta = {
392 license = [ pkgs.lib.licenses.mit ];
392 license = [ pkgs.lib.licenses.mit ];
393 };
393 };
394 };
394 };
395 prompt-toolkit = super.buildPythonPackage {
395 prompt-toolkit = super.buildPythonPackage {
396 name = "prompt-toolkit-1.0.9";
396 name = "prompt-toolkit-1.0.9";
397 buildInputs = with self; [];
397 buildInputs = with self; [];
398 doCheck = false;
398 doCheck = false;
399 propagatedBuildInputs = with self; [six wcwidth];
399 propagatedBuildInputs = with self; [six wcwidth];
400 src = fetchurl {
400 src = fetchurl {
401 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
401 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
402 md5 = "a39f91a54308fb7446b1a421c11f227c";
402 md5 = "a39f91a54308fb7446b1a421c11f227c";
403 };
403 };
404 meta = {
404 meta = {
405 license = [ pkgs.lib.licenses.bsdOriginal ];
405 license = [ pkgs.lib.licenses.bsdOriginal ];
406 };
406 };
407 };
407 };
408 ptyprocess = super.buildPythonPackage {
408 ptyprocess = super.buildPythonPackage {
409 name = "ptyprocess-0.5.1";
409 name = "ptyprocess-0.5.1";
410 buildInputs = with self; [];
410 buildInputs = with self; [];
411 doCheck = false;
411 doCheck = false;
412 propagatedBuildInputs = with self; [];
412 propagatedBuildInputs = with self; [];
413 src = fetchurl {
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
416 };
416 };
417 meta = {
417 meta = {
418 license = [ ];
418 license = [ ];
419 };
419 };
420 };
420 };
421 py = super.buildPythonPackage {
421 py = super.buildPythonPackage {
422 name = "py-1.4.31";
422 name = "py-1.4.31";
423 buildInputs = with self; [];
423 buildInputs = with self; [];
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = with self; [];
425 propagatedBuildInputs = with self; [];
426 src = fetchurl {
426 src = fetchurl {
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
429 };
429 };
430 meta = {
430 meta = {
431 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
432 };
432 };
433 };
433 };
434 pygments = super.buildPythonPackage {
434 pygments = super.buildPythonPackage {
435 name = "pygments-2.2.0";
435 name = "pygments-2.2.0";
436 buildInputs = with self; [];
436 buildInputs = with self; [];
437 doCheck = false;
437 doCheck = false;
438 propagatedBuildInputs = with self; [];
438 propagatedBuildInputs = with self; [];
439 src = fetchurl {
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
441 md5 = "13037baca42f16917cbd5ad2fab50844";
441 md5 = "13037baca42f16917cbd5ad2fab50844";
442 };
442 };
443 meta = {
443 meta = {
444 license = [ pkgs.lib.licenses.bsdOriginal ];
444 license = [ pkgs.lib.licenses.bsdOriginal ];
445 };
445 };
446 };
446 };
447 pyramid = super.buildPythonPackage {
447 pyramid = super.buildPythonPackage {
448 name = "pyramid-1.6.1";
448 name = "pyramid-1.7.4";
449 buildInputs = with self; [];
449 buildInputs = with self; [];
450 doCheck = false;
450 doCheck = false;
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
452 src = fetchurl {
452 src = fetchurl {
453 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
454 md5 = "b18688ff3cc33efdbb098a35b45dd122";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
455 };
455 };
456 meta = {
456 meta = {
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 };
458 };
459 };
459 };
460 pyramid-jinja2 = super.buildPythonPackage {
460 pyramid-jinja2 = super.buildPythonPackage {
461 name = "pyramid-jinja2-2.5";
461 name = "pyramid-jinja2-2.5";
462 buildInputs = with self; [];
462 buildInputs = with self; [];
463 doCheck = false;
463 doCheck = false;
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
465 src = fetchurl {
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
468 };
468 };
469 meta = {
469 meta = {
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
471 };
471 };
472 };
472 };
473 pyramid-mako = super.buildPythonPackage {
473 pyramid-mako = super.buildPythonPackage {
474 name = "pyramid-mako-1.0.2";
474 name = "pyramid-mako-1.0.2";
475 buildInputs = with self; [];
475 buildInputs = with self; [];
476 doCheck = false;
476 doCheck = false;
477 propagatedBuildInputs = with self; [pyramid Mako];
477 propagatedBuildInputs = with self; [pyramid Mako];
478 src = fetchurl {
478 src = fetchurl {
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
481 };
481 };
482 meta = {
482 meta = {
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
484 };
484 };
485 };
485 };
486 pytest = super.buildPythonPackage {
486 pytest = super.buildPythonPackage {
487 name = "pytest-3.0.5";
487 name = "pytest-3.0.5";
488 buildInputs = with self; [];
488 buildInputs = with self; [];
489 doCheck = false;
489 doCheck = false;
490 propagatedBuildInputs = with self; [py];
490 propagatedBuildInputs = with self; [py];
491 src = fetchurl {
491 src = fetchurl {
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
494 };
494 };
495 meta = {
495 meta = {
496 license = [ pkgs.lib.licenses.mit ];
496 license = [ pkgs.lib.licenses.mit ];
497 };
497 };
498 };
498 };
499 pytest-catchlog = super.buildPythonPackage {
499 pytest-catchlog = super.buildPythonPackage {
500 name = "pytest-catchlog-1.2.2";
500 name = "pytest-catchlog-1.2.2";
501 buildInputs = with self; [];
501 buildInputs = with self; [];
502 doCheck = false;
502 doCheck = false;
503 propagatedBuildInputs = with self; [py pytest];
503 propagatedBuildInputs = with self; [py pytest];
504 src = fetchurl {
504 src = fetchurl {
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
507 };
507 };
508 meta = {
508 meta = {
509 license = [ pkgs.lib.licenses.mit ];
509 license = [ pkgs.lib.licenses.mit ];
510 };
510 };
511 };
511 };
512 pytest-cov = super.buildPythonPackage {
512 pytest-cov = super.buildPythonPackage {
513 name = "pytest-cov-2.4.0";
513 name = "pytest-cov-2.4.0";
514 buildInputs = with self; [];
514 buildInputs = with self; [];
515 doCheck = false;
515 doCheck = false;
516 propagatedBuildInputs = with self; [pytest coverage];
516 propagatedBuildInputs = with self; [pytest coverage];
517 src = fetchurl {
517 src = fetchurl {
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
520 };
520 };
521 meta = {
521 meta = {
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 };
523 };
524 };
524 };
525 pytest-profiling = super.buildPythonPackage {
525 pytest-profiling = super.buildPythonPackage {
526 name = "pytest-profiling-1.2.2";
526 name = "pytest-profiling-1.2.2";
527 buildInputs = with self; [];
527 buildInputs = with self; [];
528 doCheck = false;
528 doCheck = false;
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 src = fetchurl {
530 src = fetchurl {
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
533 };
533 };
534 meta = {
534 meta = {
535 license = [ pkgs.lib.licenses.mit ];
535 license = [ pkgs.lib.licenses.mit ];
536 };
536 };
537 };
537 };
538 pytest-runner = super.buildPythonPackage {
538 pytest-runner = super.buildPythonPackage {
539 name = "pytest-runner-2.9";
539 name = "pytest-runner-2.9";
540 buildInputs = with self; [];
540 buildInputs = with self; [];
541 doCheck = false;
541 doCheck = false;
542 propagatedBuildInputs = with self; [];
542 propagatedBuildInputs = with self; [];
543 src = fetchurl {
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
546 };
546 };
547 meta = {
547 meta = {
548 license = [ pkgs.lib.licenses.mit ];
548 license = [ pkgs.lib.licenses.mit ];
549 };
549 };
550 };
550 };
551 pytest-sugar = super.buildPythonPackage {
551 pytest-sugar = super.buildPythonPackage {
552 name = "pytest-sugar-0.7.1";
552 name = "pytest-sugar-0.7.1";
553 buildInputs = with self; [];
553 buildInputs = with self; [];
554 doCheck = false;
554 doCheck = false;
555 propagatedBuildInputs = with self; [pytest termcolor];
555 propagatedBuildInputs = with self; [pytest termcolor];
556 src = fetchurl {
556 src = fetchurl {
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
559 };
559 };
560 meta = {
560 meta = {
561 license = [ pkgs.lib.licenses.bsdOriginal ];
561 license = [ pkgs.lib.licenses.bsdOriginal ];
562 };
562 };
563 };
563 };
564 pytest-timeout = super.buildPythonPackage {
564 pytest-timeout = super.buildPythonPackage {
565 name = "pytest-timeout-1.2.0";
565 name = "pytest-timeout-1.2.0";
566 buildInputs = with self; [];
566 buildInputs = with self; [];
567 doCheck = false;
567 doCheck = false;
568 propagatedBuildInputs = with self; [pytest];
568 propagatedBuildInputs = with self; [pytest];
569 src = fetchurl {
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 md5 = "83607d91aa163562c7ee835da57d061d";
571 md5 = "83607d91aa163562c7ee835da57d061d";
572 };
572 };
573 meta = {
573 meta = {
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 };
575 };
576 };
576 };
577 repoze.lru = super.buildPythonPackage {
577 repoze.lru = super.buildPythonPackage {
578 name = "repoze.lru-0.6";
578 name = "repoze.lru-0.6";
579 buildInputs = with self; [];
579 buildInputs = with self; [];
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = with self; [];
581 propagatedBuildInputs = with self; [];
582 src = fetchurl {
582 src = fetchurl {
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
585 };
585 };
586 meta = {
586 meta = {
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
588 };
588 };
589 };
589 };
590 rhodecode-vcsserver = super.buildPythonPackage {
590 rhodecode-vcsserver = super.buildPythonPackage {
591 name = "rhodecode-vcsserver-4.6.1";
591 name = "rhodecode-vcsserver-4.7.0";
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
593 doCheck = true;
593 doCheck = true;
594 propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
594 propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
595 src = ./.;
595 src = ./.;
596 meta = {
596 meta = {
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
598 };
598 };
599 };
599 };
600 serpent = super.buildPythonPackage {
600 serpent = super.buildPythonPackage {
601 name = "serpent-1.15";
601 name = "serpent-1.15";
602 buildInputs = with self; [];
602 buildInputs = with self; [];
603 doCheck = false;
603 doCheck = false;
604 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
605 src = fetchurl {
605 src = fetchurl {
606 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
606 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
607 md5 = "e27b1aad5c218e16442f52abb7c7053a";
607 md5 = "e27b1aad5c218e16442f52abb7c7053a";
608 };
608 };
609 meta = {
609 meta = {
610 license = [ pkgs.lib.licenses.mit ];
610 license = [ pkgs.lib.licenses.mit ];
611 };
611 };
612 };
612 };
613 setuptools = super.buildPythonPackage {
613 setuptools = super.buildPythonPackage {
614 name = "setuptools-30.1.0";
614 name = "setuptools-30.1.0";
615 buildInputs = with self; [];
615 buildInputs = with self; [];
616 doCheck = false;
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
619 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
620 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
620 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
621 };
621 };
622 meta = {
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
623 license = [ pkgs.lib.licenses.mit ];
624 };
624 };
625 };
625 };
626 simplegeneric = super.buildPythonPackage {
626 simplegeneric = super.buildPythonPackage {
627 name = "simplegeneric-0.8.1";
627 name = "simplegeneric-0.8.1";
628 buildInputs = with self; [];
628 buildInputs = with self; [];
629 doCheck = false;
629 doCheck = false;
630 propagatedBuildInputs = with self; [];
630 propagatedBuildInputs = with self; [];
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
632 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
633 md5 = "f9c1fab00fd981be588fc32759f474e3";
633 md5 = "f9c1fab00fd981be588fc32759f474e3";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ pkgs.lib.licenses.zpt21 ];
636 license = [ pkgs.lib.licenses.zpt21 ];
637 };
637 };
638 };
638 };
639 simplejson = super.buildPythonPackage {
639 simplejson = super.buildPythonPackage {
640 name = "simplejson-3.7.2";
640 name = "simplejson-3.7.2";
641 buildInputs = with self; [];
641 buildInputs = with self; [];
642 doCheck = false;
642 doCheck = false;
643 propagatedBuildInputs = with self; [];
643 propagatedBuildInputs = with self; [];
644 src = fetchurl {
644 src = fetchurl {
645 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
645 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
646 md5 = "a5fc7d05d4cb38492285553def5d4b46";
646 md5 = "a5fc7d05d4cb38492285553def5d4b46";
647 };
647 };
648 meta = {
648 meta = {
649 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
649 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
650 };
650 };
651 };
651 };
652 six = super.buildPythonPackage {
652 six = super.buildPythonPackage {
653 name = "six-1.9.0";
653 name = "six-1.9.0";
654 buildInputs = with self; [];
654 buildInputs = with self; [];
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = with self; [];
656 propagatedBuildInputs = with self; [];
657 src = fetchurl {
657 src = fetchurl {
658 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
658 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
659 md5 = "476881ef4012262dfc8adc645ee786c4";
659 md5 = "476881ef4012262dfc8adc645ee786c4";
660 };
660 };
661 meta = {
661 meta = {
662 license = [ pkgs.lib.licenses.mit ];
662 license = [ pkgs.lib.licenses.mit ];
663 };
663 };
664 };
664 };
665 subprocess32 = super.buildPythonPackage {
665 subprocess32 = super.buildPythonPackage {
666 name = "subprocess32-3.2.6";
666 name = "subprocess32-3.2.6";
667 buildInputs = with self; [];
667 buildInputs = with self; [];
668 doCheck = false;
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
671 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
672 md5 = "754c5ab9f533e764f931136974b618f1";
672 md5 = "754c5ab9f533e764f931136974b618f1";
673 };
673 };
674 meta = {
674 meta = {
675 license = [ pkgs.lib.licenses.psfl ];
675 license = [ pkgs.lib.licenses.psfl ];
676 };
676 };
677 };
677 };
678 subvertpy = super.buildPythonPackage {
678 subvertpy = super.buildPythonPackage {
679 name = "subvertpy-0.9.3";
679 name = "subvertpy-0.9.3";
680 buildInputs = with self; [];
680 buildInputs = with self; [];
681 doCheck = false;
681 doCheck = false;
682 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
683 src = fetchurl {
684 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
684 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
685 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
685 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
686 };
686 };
687 meta = {
687 meta = {
688 license = [ pkgs.lib.licenses.lgpl21Plus ];
688 license = [ pkgs.lib.licenses.lgpl21Plus ];
689 };
689 };
690 };
690 };
691 termcolor = super.buildPythonPackage {
691 termcolor = super.buildPythonPackage {
692 name = "termcolor-1.1.0";
692 name = "termcolor-1.1.0";
693 buildInputs = with self; [];
693 buildInputs = with self; [];
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
697 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
698 md5 = "043e89644f8909d462fbbfa511c768df";
698 md5 = "043e89644f8909d462fbbfa511c768df";
699 };
699 };
700 meta = {
700 meta = {
701 license = [ pkgs.lib.licenses.mit ];
701 license = [ pkgs.lib.licenses.mit ];
702 };
702 };
703 };
703 };
704 traitlets = super.buildPythonPackage {
704 traitlets = super.buildPythonPackage {
705 name = "traitlets-4.3.1";
705 name = "traitlets-4.3.1";
706 buildInputs = with self; [];
706 buildInputs = with self; [];
707 doCheck = false;
707 doCheck = false;
708 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
708 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
709 src = fetchurl {
709 src = fetchurl {
710 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
710 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
711 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
711 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
712 };
712 };
713 meta = {
713 meta = {
714 license = [ pkgs.lib.licenses.bsdOriginal ];
714 license = [ pkgs.lib.licenses.bsdOriginal ];
715 };
715 };
716 };
716 };
717 translationstring = super.buildPythonPackage {
717 translationstring = super.buildPythonPackage {
718 name = "translationstring-1.3";
718 name = "translationstring-1.3";
719 buildInputs = with self; [];
719 buildInputs = with self; [];
720 doCheck = false;
720 doCheck = false;
721 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
723 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
724 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
724 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
727 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
728 };
728 };
729 };
729 };
730 venusian = super.buildPythonPackage {
730 venusian = super.buildPythonPackage {
731 name = "venusian-1.0";
731 name = "venusian-1.0";
732 buildInputs = with self; [];
732 buildInputs = with self; [];
733 doCheck = false;
733 doCheck = false;
734 propagatedBuildInputs = with self; [];
734 propagatedBuildInputs = with self; [];
735 src = fetchurl {
735 src = fetchurl {
736 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
736 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
737 md5 = "dccf2eafb7113759d60c86faf5538756";
737 md5 = "dccf2eafb7113759d60c86faf5538756";
738 };
738 };
739 meta = {
739 meta = {
740 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
740 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
741 };
741 };
742 };
742 };
743 waitress = super.buildPythonPackage {
743 waitress = super.buildPythonPackage {
744 name = "waitress-1.0.1";
744 name = "waitress-1.0.1";
745 buildInputs = with self; [];
745 buildInputs = with self; [];
746 doCheck = false;
746 doCheck = false;
747 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
748 src = fetchurl {
749 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
749 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
750 md5 = "dda92358a7569669086155923a46e57c";
750 md5 = "dda92358a7569669086155923a46e57c";
751 };
751 };
752 meta = {
752 meta = {
753 license = [ pkgs.lib.licenses.zpt21 ];
753 license = [ pkgs.lib.licenses.zpt21 ];
754 };
754 };
755 };
755 };
756 wcwidth = super.buildPythonPackage {
756 wcwidth = super.buildPythonPackage {
757 name = "wcwidth-0.1.7";
757 name = "wcwidth-0.1.7";
758 buildInputs = with self; [];
758 buildInputs = with self; [];
759 doCheck = false;
759 doCheck = false;
760 propagatedBuildInputs = with self; [];
760 propagatedBuildInputs = with self; [];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
762 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
763 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
763 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.mit ];
766 license = [ pkgs.lib.licenses.mit ];
767 };
767 };
768 };
768 };
769 wheel = super.buildPythonPackage {
769 wheel = super.buildPythonPackage {
770 name = "wheel-0.29.0";
770 name = "wheel-0.29.0";
771 buildInputs = with self; [];
771 buildInputs = with self; [];
772 doCheck = false;
772 doCheck = false;
773 propagatedBuildInputs = with self; [];
773 propagatedBuildInputs = with self; [];
774 src = fetchurl {
774 src = fetchurl {
775 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
775 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
776 md5 = "555a67e4507cedee23a0deb9651e452f";
776 md5 = "555a67e4507cedee23a0deb9651e452f";
777 };
777 };
778 meta = {
778 meta = {
779 license = [ pkgs.lib.licenses.mit ];
779 license = [ pkgs.lib.licenses.mit ];
780 };
780 };
781 };
781 };
782 zope.deprecation = super.buildPythonPackage {
782 zope.deprecation = super.buildPythonPackage {
783 name = "zope.deprecation-4.1.2";
783 name = "zope.deprecation-4.1.2";
784 buildInputs = with self; [];
784 buildInputs = with self; [];
785 doCheck = false;
785 doCheck = false;
786 propagatedBuildInputs = with self; [setuptools];
786 propagatedBuildInputs = with self; [setuptools];
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
788 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
789 md5 = "e9a663ded58f4f9f7881beb56cae2782";
789 md5 = "e9a663ded58f4f9f7881beb56cae2782";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.zpt21 ];
792 license = [ pkgs.lib.licenses.zpt21 ];
793 };
793 };
794 };
794 };
795 zope.interface = super.buildPythonPackage {
795 zope.interface = super.buildPythonPackage {
796 name = "zope.interface-4.1.3";
796 name = "zope.interface-4.1.3";
797 buildInputs = with self; [];
797 buildInputs = with self; [];
798 doCheck = false;
798 doCheck = false;
799 propagatedBuildInputs = with self; [setuptools];
799 propagatedBuildInputs = with self; [setuptools];
800 src = fetchurl {
800 src = fetchurl {
801 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
801 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
802 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
802 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
803 };
803 };
804 meta = {
804 meta = {
805 license = [ pkgs.lib.licenses.zpt21 ];
805 license = [ pkgs.lib.licenses.zpt21 ];
806 };
806 };
807 };
807 };
808
808
809 ### Test requirements
809 ### Test requirements
810
810
811
811
812 }
812 }
@@ -1,43 +1,43 b''
1 # core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3
3
4 Beaker==1.7.0
4 Beaker==1.7.0
5 configobj==5.0.6
5 configobj==5.0.6
6 decorator==4.0.11
6 dulwich==0.13.0
7 dulwich==0.13.0
7 hgsubversion==1.8.6
8 hgsubversion==1.8.6
8 infrae.cache==1.0.1
9 infrae.cache==1.0.1
9 mercurial==4.0.2
10 mercurial==4.1.2
10 msgpack-python==0.4.8
11 msgpack-python==0.4.8
11 pyramid==1.6.1
12 pyramid-jinja2==2.5
12 pyramid-jinja2==2.5
13 pyramid==1.7.4
13 pyramid-mako==1.0.2
14 pyramid-mako==1.0.2
14 repoze.lru==0.6
15 repoze.lru==0.6
15 simplejson==3.7.2
16 simplejson==3.7.2
16 subprocess32==3.2.6
17 subprocess32==3.2.6
17
18
18 # Custom subvertpy that is not available on pypi.
19 # Custom subvertpy that is not available on pypi.
19 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
20 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
20
21
21 six==1.9.0
22 six==1.9.0
22 translationstring==1.3
23 translationstring==1.3
23 WebOb==1.3.1
24 WebOb==1.3.1
24 wheel==0.29.0
25 wheel==0.29.0
25 zope.deprecation==4.1.2
26 zope.deprecation==4.1.2
26 zope.interface==4.1.3
27 zope.interface==4.1.3
27
28
28 ## debug
29 ## debug
29 ipdb==0.10.1
30 ipdb==0.10.1
30 ipython==5.1.0
31 ipython==5.1.0
31
32 # http servers
32 # http servers
33 gevent==1.1.2
33 gevent==1.1.2
34 greenlet==0.4.10
34 greenlet==0.4.10
35 gunicorn==19.6.0
35 gunicorn==19.6.0
36 waitress==1.0.1
36 waitress==1.0.1
37
37
38 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
38 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
39 Pyro4==4.41
39 Pyro4==4.41
40 serpent==1.15
40 serpent==1.15
41
41
42 ## test related requirements
42 ## test related requirements
43 -r requirements_test.txt
43 -r requirements_test.txt
@@ -1,1 +1,1 b''
1 4.6.1 No newline at end of file
1 4.7.0 No newline at end of file
@@ -1,82 +1,98 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
19 import traceback
18 import logging
20 import logging
19 import urlparse
21 import urlparse
20
22
21 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
22
24
23
25
24 class RepoFactory(object):
26 class RepoFactory(object):
25 """
27 """
26 Utility to create instances of repository
28 Utility to create instances of repository
27
29
28 It provides internal caching of the `repo` object based on
30 It provides internal caching of the `repo` object based on
29 the :term:`call context`.
31 the :term:`call context`.
30 """
32 """
31
33
32 def __init__(self, repo_cache):
34 def __init__(self, repo_cache):
33 self._cache = repo_cache
35 self._cache = repo_cache
34
36
35 def _create_config(self, path, config):
37 def _create_config(self, path, config):
36 config = {}
38 config = {}
37 return config
39 return config
38
40
39 def _create_repo(self, wire, create):
41 def _create_repo(self, wire, create):
40 raise NotImplementedError()
42 raise NotImplementedError()
41
43
42 def repo(self, wire, create=False):
44 def repo(self, wire, create=False):
43 """
45 """
44 Get a repository instance for the given path.
46 Get a repository instance for the given path.
45
47
46 Uses internally the low level beaker API since the decorators introduce
48 Uses internally the low level beaker API since the decorators introduce
47 significant overhead.
49 significant overhead.
48 """
50 """
49 def create_new_repo():
51 def create_new_repo():
50 return self._create_repo(wire, create)
52 return self._create_repo(wire, create)
51
53
52 return self._repo(wire, create_new_repo)
54 return self._repo(wire, create_new_repo)
53
55
54 def _repo(self, wire, createfunc):
56 def _repo(self, wire, createfunc):
55 context = wire.get('context', None)
57 context = wire.get('context', None)
56 cache = wire.get('cache', True)
58 cache = wire.get('cache', True)
57
59
58 if context and cache:
60 if context and cache:
59 cache_key = (context, wire['path'])
61 cache_key = (context, wire['path'])
60 log.debug(
62 log.debug(
61 'FETCH %s@%s repo object from cache. Context: %s',
63 'FETCH %s@%s repo object from cache. Context: %s',
62 self.__class__.__name__, wire['path'], context)
64 self.__class__.__name__, wire['path'], context)
63 return self._cache.get(key=cache_key, createfunc=createfunc)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
64 else:
66 else:
65 log.debug(
67 log.debug(
66 'INIT %s@%s repo object based on wire %s. Context: %s',
68 'INIT %s@%s repo object based on wire %s. Context: %s',
67 self.__class__.__name__, wire['path'], wire, context)
69 self.__class__.__name__, wire['path'], wire, context)
68 return createfunc()
70 return createfunc()
69
71
70
72
71 def obfuscate_qs(query_string):
73 def obfuscate_qs(query_string):
72 if query_string is None:
74 if query_string is None:
73 return None
75 return None
74
76
75 parsed = []
77 parsed = []
76 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
77 if k in ['auth_token', 'api_key']:
79 if k in ['auth_token', 'api_key']:
78 v = "*****"
80 v = "*****"
79 parsed.append((k, v))
81 parsed.append((k, v))
80
82
81 return '&'.join('{}{}'.format(
83 return '&'.join('{}{}'.format(
82 k, '={}'.format(v) if v else '') for k, v in parsed)
84 k, '={}'.format(v) if v else '') for k, v in parsed)
85
86
87 def raise_from_original(new_type):
88 """
89 Raise a new exception type with original args and traceback.
90 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
92
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 try:
96 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
98 del exc_traceback
@@ -1,581 +1,644 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41 from vcsserver.git_lfs.lib import LFSOidStore
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 except Exception as e:
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # (KeyError on empty repos), we cannot track this and catch all
64 # exceptions, it's an exceptions from other handlers
65 #if not hasattr(e, '_vcs_kind'):
66 #log.exception("Unhandled exception in git remote call")
67 #raise_from_original(exceptions.UnhandledException)
68 raise
61 return wrapper
69 return wrapper
62
70
63
71
64 class Repo(DulwichRepo):
72 class Repo(DulwichRepo):
65 """
73 """
66 A wrapper for dulwich Repo class.
74 A wrapper for dulwich Repo class.
67
75
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 "Too many open files" error. We need to close all opened file descriptors
77 "Too many open files" error. We need to close all opened file descriptors
70 once the repo object is destroyed.
78 once the repo object is destroyed.
71
79
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 to 0.12.0 +
81 to 0.12.0 +
74 """
82 """
75 def __del__(self):
83 def __del__(self):
76 if hasattr(self, 'object_store'):
84 if hasattr(self, 'object_store'):
77 self.close()
85 self.close()
78
86
79
87
80 class GitFactory(RepoFactory):
88 class GitFactory(RepoFactory):
81
89
82 def _create_repo(self, wire, create):
90 def _create_repo(self, wire, create):
83 repo_path = str_to_dulwich(wire['path'])
91 repo_path = str_to_dulwich(wire['path'])
84 return Repo(repo_path)
92 return Repo(repo_path)
85
93
86
94
87 class GitRemote(object):
95 class GitRemote(object):
88
96
89 def __init__(self, factory):
97 def __init__(self, factory):
90 self._factory = factory
98 self._factory = factory
91
99
92 self._bulk_methods = {
100 self._bulk_methods = {
93 "author": self.commit_attribute,
101 "author": self.commit_attribute,
94 "date": self.get_object_attrs,
102 "date": self.get_object_attrs,
95 "message": self.commit_attribute,
103 "message": self.commit_attribute,
96 "parents": self.commit_attribute,
104 "parents": self.commit_attribute,
97 "_commit": self.revision,
105 "_commit": self.revision,
98 }
106 }
99
107
108 def _wire_to_config(self, wire):
109 if 'config' in wire:
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return {}
112
100 def _assign_ref(self, wire, ref, commit_id):
113 def _assign_ref(self, wire, ref, commit_id):
101 repo = self._factory.repo(wire)
114 repo = self._factory.repo(wire)
102 repo[ref] = commit_id
115 repo[ref] = commit_id
103
116
104 @reraise_safe_exceptions
117 @reraise_safe_exceptions
105 def add_object(self, wire, content):
118 def add_object(self, wire, content):
106 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
107 blob = objects.Blob()
120 blob = objects.Blob()
108 blob.set_raw_string(content)
121 blob.set_raw_string(content)
109 repo.object_store.add_object(blob)
122 repo.object_store.add_object(blob)
110 return blob.id
123 return blob.id
111
124
112 @reraise_safe_exceptions
125 @reraise_safe_exceptions
113 def assert_correct_path(self, wire):
126 def assert_correct_path(self, wire):
114 try:
127 try:
115 self._factory.repo(wire)
128 self._factory.repo(wire)
116 except NotGitRepository as e:
129 except NotGitRepository as e:
117 # Exception can contain unicode which we convert
130 # Exception can contain unicode which we convert
118 raise exceptions.AbortException(repr(e))
131 raise exceptions.AbortException(repr(e))
119
132
120 @reraise_safe_exceptions
133 @reraise_safe_exceptions
121 def bare(self, wire):
134 def bare(self, wire):
122 repo = self._factory.repo(wire)
135 repo = self._factory.repo(wire)
123 return repo.bare
136 return repo.bare
124
137
125 @reraise_safe_exceptions
138 @reraise_safe_exceptions
126 def blob_as_pretty_string(self, wire, sha):
139 def blob_as_pretty_string(self, wire, sha):
127 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
128 return repo[sha].as_pretty_string()
141 return repo[sha].as_pretty_string()
129
142
130 @reraise_safe_exceptions
143 @reraise_safe_exceptions
131 def blob_raw_length(self, wire, sha):
144 def blob_raw_length(self, wire, sha):
132 repo = self._factory.repo(wire)
145 repo = self._factory.repo(wire)
133 blob = repo[sha]
146 blob = repo[sha]
134 return blob.raw_length()
147 return blob.raw_length()
135
148
149 def _parse_lfs_pointer(self, raw_content):
150
151 spec_string = 'version https://git-lfs.github.com/spec'
152 if raw_content and raw_content.startswith(spec_string):
153 pattern = re.compile(r"""
154 (?:\n)?
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
158 (?:\n)?
159 """, re.VERBOSE | re.MULTILINE)
160 match = pattern.match(raw_content)
161 if match:
162 return match.groupdict()
163
164 return {}
165
166 @reraise_safe_exceptions
167 def is_large_file(self, wire, sha):
168 repo = self._factory.repo(wire)
169 blob = repo[sha]
170 return self._parse_lfs_pointer(blob.as_raw_string())
171
172 @reraise_safe_exceptions
173 def in_largefiles_store(self, wire, oid):
174 repo = self._factory.repo(wire)
175 conf = self._wire_to_config(wire)
176
177 store_location = conf.get('vcs_git_lfs_store_location')
178 if store_location:
179 repo_name = repo.path
180 store = LFSOidStore(
181 oid=oid, repo=repo_name, store_location=store_location)
182 return store.has_oid()
183
184 return False
185
186 @reraise_safe_exceptions
187 def store_path(self, wire, oid):
188 repo = self._factory.repo(wire)
189 conf = self._wire_to_config(wire)
190
191 store_location = conf.get('vcs_git_lfs_store_location')
192 if store_location:
193 repo_name = repo.path
194 store = LFSOidStore(
195 oid=oid, repo=repo_name, store_location=store_location)
196 return store.oid_path
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198
136 @reraise_safe_exceptions
199 @reraise_safe_exceptions
137 def bulk_request(self, wire, rev, pre_load):
200 def bulk_request(self, wire, rev, pre_load):
138 result = {}
201 result = {}
139 for attr in pre_load:
202 for attr in pre_load:
140 try:
203 try:
141 method = self._bulk_methods[attr]
204 method = self._bulk_methods[attr]
142 args = [wire, rev]
205 args = [wire, rev]
143 if attr == "date":
206 if attr == "date":
144 args.extend(["commit_time", "commit_timezone"])
207 args.extend(["commit_time", "commit_timezone"])
145 elif attr in ["author", "message", "parents"]:
208 elif attr in ["author", "message", "parents"]:
146 args.append(attr)
209 args.append(attr)
147 result[attr] = method(*args)
210 result[attr] = method(*args)
148 except KeyError:
211 except KeyError:
149 raise exceptions.VcsException(
212 raise exceptions.VcsException(
150 "Unknown bulk attribute: %s" % attr)
213 "Unknown bulk attribute: %s" % attr)
151 return result
214 return result
152
215
153 def _build_opener(self, url):
216 def _build_opener(self, url):
154 handlers = []
217 handlers = []
155 url_obj = url_parser(url)
218 url_obj = url_parser(url)
156 _, authinfo = url_obj.authinfo()
219 _, authinfo = url_obj.authinfo()
157
220
158 if authinfo:
221 if authinfo:
159 # create a password manager
222 # create a password manager
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 passmgr.add_password(*authinfo)
224 passmgr.add_password(*authinfo)
162
225
163 handlers.extend((httpbasicauthhandler(passmgr),
226 handlers.extend((httpbasicauthhandler(passmgr),
164 httpdigestauthhandler(passmgr)))
227 httpdigestauthhandler(passmgr)))
165
228
166 return urllib2.build_opener(*handlers)
229 return urllib2.build_opener(*handlers)
167
230
168 @reraise_safe_exceptions
231 @reraise_safe_exceptions
169 def check_url(self, url, config):
232 def check_url(self, url, config):
170 url_obj = url_parser(url)
233 url_obj = url_parser(url)
171 test_uri, _ = url_obj.authinfo()
234 test_uri, _ = url_obj.authinfo()
172 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
173 url_obj.query = obfuscate_qs(url_obj.query)
236 url_obj.query = obfuscate_qs(url_obj.query)
174 cleaned_uri = str(url_obj)
237 cleaned_uri = str(url_obj)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
176
239
177 if not test_uri.endswith('info/refs'):
240 if not test_uri.endswith('info/refs'):
178 test_uri = test_uri.rstrip('/') + '/info/refs'
241 test_uri = test_uri.rstrip('/') + '/info/refs'
179
242
180 o = self._build_opener(url)
243 o = self._build_opener(url)
181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
182
245
183 q = {"service": 'git-upload-pack'}
246 q = {"service": 'git-upload-pack'}
184 qs = '?%s' % urllib.urlencode(q)
247 qs = '?%s' % urllib.urlencode(q)
185 cu = "%s%s" % (test_uri, qs)
248 cu = "%s%s" % (test_uri, qs)
186 req = urllib2.Request(cu, None, {})
249 req = urllib2.Request(cu, None, {})
187
250
188 try:
251 try:
189 log.debug("Trying to open URL %s", cleaned_uri)
252 log.debug("Trying to open URL %s", cleaned_uri)
190 resp = o.open(req)
253 resp = o.open(req)
191 if resp.code != 200:
254 if resp.code != 200:
192 raise exceptions.URLError('Return Code is not 200')
255 raise exceptions.URLError('Return Code is not 200')
193 except Exception as e:
256 except Exception as e:
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
195 # means it cannot be cloned
258 # means it cannot be cloned
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
197
260
198 # now detect if it's proper git repo
261 # now detect if it's proper git repo
199 gitdata = resp.read()
262 gitdata = resp.read()
200 if 'service=git-upload-pack' in gitdata:
263 if 'service=git-upload-pack' in gitdata:
201 pass
264 pass
202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
203 # old style git can return some other format !
266 # old style git can return some other format !
204 pass
267 pass
205 else:
268 else:
206 raise exceptions.URLError(
269 raise exceptions.URLError(
207 "url [%s] does not look like an git" % (cleaned_uri,))
270 "url [%s] does not look like an git" % (cleaned_uri,))
208
271
209 return True
272 return True
210
273
211 @reraise_safe_exceptions
274 @reraise_safe_exceptions
212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
213 remote_refs = self.fetch(wire, url, apply_refs=False)
276 remote_refs = self.fetch(wire, url, apply_refs=False)
214 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
215 if isinstance(valid_refs, list):
278 if isinstance(valid_refs, list):
216 valid_refs = tuple(valid_refs)
279 valid_refs = tuple(valid_refs)
217
280
218 for k in remote_refs:
281 for k in remote_refs:
219 # only parse heads/tags and skip so called deferred tags
282 # only parse heads/tags and skip so called deferred tags
220 if k.startswith(valid_refs) and not k.endswith(deferred):
283 if k.startswith(valid_refs) and not k.endswith(deferred):
221 repo[k] = remote_refs[k]
284 repo[k] = remote_refs[k]
222
285
223 if update_after_clone:
286 if update_after_clone:
224 # we want to checkout HEAD
287 # we want to checkout HEAD
225 repo["HEAD"] = remote_refs["HEAD"]
288 repo["HEAD"] = remote_refs["HEAD"]
226 index.build_index_from_tree(repo.path, repo.index_path(),
289 index.build_index_from_tree(repo.path, repo.index_path(),
227 repo.object_store, repo["HEAD"].tree)
290 repo.object_store, repo["HEAD"].tree)
228
291
229 # TODO: this is quite complex, check if that can be simplified
292 # TODO: this is quite complex, check if that can be simplified
230 @reraise_safe_exceptions
293 @reraise_safe_exceptions
231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
232 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
233 object_store = repo.object_store
296 object_store = repo.object_store
234
297
235 # Create tree and populates it with blobs
298 # Create tree and populates it with blobs
236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
237
300
238 for node in updated:
301 for node in updated:
239 # Compute subdirs if needed
302 # Compute subdirs if needed
240 dirpath, nodename = vcspath.split(node['path'])
303 dirpath, nodename = vcspath.split(node['path'])
241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
242 parent = commit_tree
305 parent = commit_tree
243 ancestors = [('', parent)]
306 ancestors = [('', parent)]
244
307
245 # Tries to dig for the deepest existing tree
308 # Tries to dig for the deepest existing tree
246 while dirnames:
309 while dirnames:
247 curdir = dirnames.pop(0)
310 curdir = dirnames.pop(0)
248 try:
311 try:
249 dir_id = parent[curdir][1]
312 dir_id = parent[curdir][1]
250 except KeyError:
313 except KeyError:
251 # put curdir back into dirnames and stops
314 # put curdir back into dirnames and stops
252 dirnames.insert(0, curdir)
315 dirnames.insert(0, curdir)
253 break
316 break
254 else:
317 else:
255 # If found, updates parent
318 # If found, updates parent
256 parent = repo[dir_id]
319 parent = repo[dir_id]
257 ancestors.append((curdir, parent))
320 ancestors.append((curdir, parent))
258 # Now parent is deepest existing tree and we need to create
321 # Now parent is deepest existing tree and we need to create
259 # subtrees for dirnames (in reverse order)
322 # subtrees for dirnames (in reverse order)
260 # [this only applies for nodes from added]
323 # [this only applies for nodes from added]
261 new_trees = []
324 new_trees = []
262
325
263 blob = objects.Blob.from_string(node['content'])
326 blob = objects.Blob.from_string(node['content'])
264
327
265 if dirnames:
328 if dirnames:
266 # If there are trees which should be created we need to build
329 # If there are trees which should be created we need to build
267 # them now (in reverse order)
330 # them now (in reverse order)
268 reversed_dirnames = list(reversed(dirnames))
331 reversed_dirnames = list(reversed(dirnames))
269 curtree = objects.Tree()
332 curtree = objects.Tree()
270 curtree[node['node_path']] = node['mode'], blob.id
333 curtree[node['node_path']] = node['mode'], blob.id
271 new_trees.append(curtree)
334 new_trees.append(curtree)
272 for dirname in reversed_dirnames[:-1]:
335 for dirname in reversed_dirnames[:-1]:
273 newtree = objects.Tree()
336 newtree = objects.Tree()
274 newtree[dirname] = (DIR_STAT, curtree.id)
337 newtree[dirname] = (DIR_STAT, curtree.id)
275 new_trees.append(newtree)
338 new_trees.append(newtree)
276 curtree = newtree
339 curtree = newtree
277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
278 else:
341 else:
279 parent.add(
342 parent.add(
280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
281
344
282 new_trees.append(parent)
345 new_trees.append(parent)
283 # Update ancestors
346 # Update ancestors
284 reversed_ancestors = reversed(
347 reversed_ancestors = reversed(
285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
286 for parent, tree, path in reversed_ancestors:
349 for parent, tree, path in reversed_ancestors:
287 parent[path] = (DIR_STAT, tree.id)
350 parent[path] = (DIR_STAT, tree.id)
288 object_store.add_object(tree)
351 object_store.add_object(tree)
289
352
290 object_store.add_object(blob)
353 object_store.add_object(blob)
291 for tree in new_trees:
354 for tree in new_trees:
292 object_store.add_object(tree)
355 object_store.add_object(tree)
293
356
294 for node_path in removed:
357 for node_path in removed:
295 paths = node_path.split('/')
358 paths = node_path.split('/')
296 tree = commit_tree
359 tree = commit_tree
297 trees = [tree]
360 trees = [tree]
298 # Traverse deep into the forest...
361 # Traverse deep into the forest...
299 for path in paths:
362 for path in paths:
300 try:
363 try:
301 obj = repo[tree[path][1]]
364 obj = repo[tree[path][1]]
302 if isinstance(obj, objects.Tree):
365 if isinstance(obj, objects.Tree):
303 trees.append(obj)
366 trees.append(obj)
304 tree = obj
367 tree = obj
305 except KeyError:
368 except KeyError:
306 break
369 break
307 # Cut down the blob and all rotten trees on the way back...
370 # Cut down the blob and all rotten trees on the way back...
308 for path, tree in reversed(zip(paths, trees)):
371 for path, tree in reversed(zip(paths, trees)):
309 del tree[path]
372 del tree[path]
310 if tree:
373 if tree:
311 # This tree still has elements - don't remove it or any
374 # This tree still has elements - don't remove it or any
312 # of it's parents
375 # of it's parents
313 break
376 break
314
377
315 object_store.add_object(commit_tree)
378 object_store.add_object(commit_tree)
316
379
317 # Create commit
380 # Create commit
318 commit = objects.Commit()
381 commit = objects.Commit()
319 commit.tree = commit_tree.id
382 commit.tree = commit_tree.id
320 for k, v in commit_data.iteritems():
383 for k, v in commit_data.iteritems():
321 setattr(commit, k, v)
384 setattr(commit, k, v)
322 object_store.add_object(commit)
385 object_store.add_object(commit)
323
386
324 ref = 'refs/heads/%s' % branch
387 ref = 'refs/heads/%s' % branch
325 repo.refs[ref] = commit.id
388 repo.refs[ref] = commit.id
326
389
327 return commit.id
390 return commit.id
328
391
329 @reraise_safe_exceptions
392 @reraise_safe_exceptions
330 def fetch(self, wire, url, apply_refs=True, refs=None):
393 def fetch(self, wire, url, apply_refs=True, refs=None):
331 if url != 'default' and '://' not in url:
394 if url != 'default' and '://' not in url:
332 client = LocalGitClient(url)
395 client = LocalGitClient(url)
333 else:
396 else:
334 url_obj = url_parser(url)
397 url_obj = url_parser(url)
335 o = self._build_opener(url)
398 o = self._build_opener(url)
336 url, _ = url_obj.authinfo()
399 url, _ = url_obj.authinfo()
337 client = HttpGitClient(base_url=url, opener=o)
400 client = HttpGitClient(base_url=url, opener=o)
338 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
339
402
340 determine_wants = repo.object_store.determine_wants_all
403 determine_wants = repo.object_store.determine_wants_all
341 if refs:
404 if refs:
342 def determine_wants_requested(references):
405 def determine_wants_requested(references):
343 return [references[r] for r in references if r in refs]
406 return [references[r] for r in references if r in refs]
344 determine_wants = determine_wants_requested
407 determine_wants = determine_wants_requested
345
408
346 try:
409 try:
347 remote_refs = client.fetch(
410 remote_refs = client.fetch(
348 path=url, target=repo, determine_wants=determine_wants)
411 path=url, target=repo, determine_wants=determine_wants)
349 except NotGitRepository as e:
412 except NotGitRepository as e:
350 log.warning(
413 log.warning(
351 'Trying to fetch from "%s" failed, not a Git repository.', url)
414 'Trying to fetch from "%s" failed, not a Git repository.', url)
352 # Exception can contain unicode which we convert
415 # Exception can contain unicode which we convert
353 raise exceptions.AbortException(repr(e))
416 raise exceptions.AbortException(repr(e))
354
417
355 # mikhail: client.fetch() returns all the remote refs, but fetches only
418 # mikhail: client.fetch() returns all the remote refs, but fetches only
356 # refs filtered by `determine_wants` function. We need to filter result
419 # refs filtered by `determine_wants` function. We need to filter result
357 # as well
420 # as well
358 if refs:
421 if refs:
359 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
360
423
361 if apply_refs:
424 if apply_refs:
362 # TODO: johbo: Needs proper test coverage with a git repository
425 # TODO: johbo: Needs proper test coverage with a git repository
363 # that contains a tag object, so that we would end up with
426 # that contains a tag object, so that we would end up with
364 # a peeled ref at this point.
427 # a peeled ref at this point.
365 PEELED_REF_MARKER = '^{}'
428 PEELED_REF_MARKER = '^{}'
366 for k in remote_refs:
429 for k in remote_refs:
367 if k.endswith(PEELED_REF_MARKER):
430 if k.endswith(PEELED_REF_MARKER):
368 log.info("Skipping peeled reference %s", k)
431 log.info("Skipping peeled reference %s", k)
369 continue
432 continue
370 repo[k] = remote_refs[k]
433 repo[k] = remote_refs[k]
371
434
372 if refs:
435 if refs:
373 # mikhail: explicitly set the head to the last ref.
436 # mikhail: explicitly set the head to the last ref.
374 repo['HEAD'] = remote_refs[refs[-1]]
437 repo['HEAD'] = remote_refs[refs[-1]]
375
438
376 # TODO: mikhail: should we return remote_refs here to be
439 # TODO: mikhail: should we return remote_refs here to be
377 # consistent?
440 # consistent?
378 else:
441 else:
379 return remote_refs
442 return remote_refs
380
443
381 @reraise_safe_exceptions
444 @reraise_safe_exceptions
382 def get_remote_refs(self, wire, url):
445 def get_remote_refs(self, wire, url):
383 repo = Repo(url)
446 repo = Repo(url)
384 return repo.get_refs()
447 return repo.get_refs()
385
448
386 @reraise_safe_exceptions
449 @reraise_safe_exceptions
387 def get_description(self, wire):
450 def get_description(self, wire):
388 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
389 return repo.get_description()
452 return repo.get_description()
390
453
391 @reraise_safe_exceptions
454 @reraise_safe_exceptions
392 def get_file_history(self, wire, file_path, commit_id, limit):
455 def get_file_history(self, wire, file_path, commit_id, limit):
393 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
394 include = [commit_id]
457 include = [commit_id]
395 paths = [file_path]
458 paths = [file_path]
396
459
397 walker = repo.get_walker(include, paths=paths, max_entries=limit)
460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
398 return [x.commit.id for x in walker]
461 return [x.commit.id for x in walker]
399
462
400 @reraise_safe_exceptions
463 @reraise_safe_exceptions
401 def get_missing_revs(self, wire, rev1, rev2, path2):
464 def get_missing_revs(self, wire, rev1, rev2, path2):
402 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
403 LocalGitClient(thin_packs=False).fetch(path2, repo)
466 LocalGitClient(thin_packs=False).fetch(path2, repo)
404
467
405 wire_remote = wire.copy()
468 wire_remote = wire.copy()
406 wire_remote['path'] = path2
469 wire_remote['path'] = path2
407 repo_remote = self._factory.repo(wire_remote)
470 repo_remote = self._factory.repo(wire_remote)
408 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
409
472
410 revs = [
473 revs = [
411 x.commit.id
474 x.commit.id
412 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
413 return revs
476 return revs
414
477
415 @reraise_safe_exceptions
478 @reraise_safe_exceptions
416 def get_object(self, wire, sha):
479 def get_object(self, wire, sha):
417 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
418 obj = repo.get_object(sha)
481 obj = repo.get_object(sha)
419 commit_id = obj.id
482 commit_id = obj.id
420
483
421 if isinstance(obj, Tag):
484 if isinstance(obj, Tag):
422 commit_id = obj.object[1]
485 commit_id = obj.object[1]
423
486
424 return {
487 return {
425 'id': obj.id,
488 'id': obj.id,
426 'type': obj.type_name,
489 'type': obj.type_name,
427 'commit_id': commit_id
490 'commit_id': commit_id
428 }
491 }
429
492
430 @reraise_safe_exceptions
493 @reraise_safe_exceptions
431 def get_object_attrs(self, wire, sha, *attrs):
494 def get_object_attrs(self, wire, sha, *attrs):
432 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
433 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
434 return list(getattr(obj, a) for a in attrs)
497 return list(getattr(obj, a) for a in attrs)
435
498
436 @reraise_safe_exceptions
499 @reraise_safe_exceptions
437 def get_refs(self, wire):
500 def get_refs(self, wire):
438 repo = self._factory.repo(wire)
501 repo = self._factory.repo(wire)
439 result = {}
502 result = {}
440 for ref, sha in repo.refs.as_dict().items():
503 for ref, sha in repo.refs.as_dict().items():
441 peeled_sha = repo.get_peeled(ref)
504 peeled_sha = repo.get_peeled(ref)
442 result[ref] = peeled_sha
505 result[ref] = peeled_sha
443 return result
506 return result
444
507
445 @reraise_safe_exceptions
508 @reraise_safe_exceptions
446 def get_refs_path(self, wire):
509 def get_refs_path(self, wire):
447 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
448 return repo.refs.path
511 return repo.refs.path
449
512
450 @reraise_safe_exceptions
513 @reraise_safe_exceptions
451 def head(self, wire):
514 def head(self, wire):
452 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
453 return repo.head()
516 return repo.head()
454
517
455 @reraise_safe_exceptions
518 @reraise_safe_exceptions
456 def init(self, wire):
519 def init(self, wire):
457 repo_path = str_to_dulwich(wire['path'])
520 repo_path = str_to_dulwich(wire['path'])
458 self.repo = Repo.init(repo_path)
521 self.repo = Repo.init(repo_path)
459
522
460 @reraise_safe_exceptions
523 @reraise_safe_exceptions
461 def init_bare(self, wire):
524 def init_bare(self, wire):
462 repo_path = str_to_dulwich(wire['path'])
525 repo_path = str_to_dulwich(wire['path'])
463 self.repo = Repo.init_bare(repo_path)
526 self.repo = Repo.init_bare(repo_path)
464
527
465 @reraise_safe_exceptions
528 @reraise_safe_exceptions
466 def revision(self, wire, rev):
529 def revision(self, wire, rev):
467 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
468 obj = repo[rev]
531 obj = repo[rev]
469 obj_data = {
532 obj_data = {
470 'id': obj.id,
533 'id': obj.id,
471 }
534 }
472 try:
535 try:
473 obj_data['tree'] = obj.tree
536 obj_data['tree'] = obj.tree
474 except AttributeError:
537 except AttributeError:
475 pass
538 pass
476 return obj_data
539 return obj_data
477
540
478 @reraise_safe_exceptions
541 @reraise_safe_exceptions
479 def commit_attribute(self, wire, rev, attr):
542 def commit_attribute(self, wire, rev, attr):
480 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
481 obj = repo[rev]
544 obj = repo[rev]
482 return getattr(obj, attr)
545 return getattr(obj, attr)
483
546
484 @reraise_safe_exceptions
547 @reraise_safe_exceptions
485 def set_refs(self, wire, key, value):
548 def set_refs(self, wire, key, value):
486 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
487 repo.refs[key] = value
550 repo.refs[key] = value
488
551
489 @reraise_safe_exceptions
552 @reraise_safe_exceptions
490 def remove_ref(self, wire, key):
553 def remove_ref(self, wire, key):
491 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
492 del repo.refs[key]
555 del repo.refs[key]
493
556
494 @reraise_safe_exceptions
557 @reraise_safe_exceptions
495 def tree_changes(self, wire, source_id, target_id):
558 def tree_changes(self, wire, source_id, target_id):
496 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
497 source = repo[source_id].tree if source_id else None
560 source = repo[source_id].tree if source_id else None
498 target = repo[target_id].tree
561 target = repo[target_id].tree
499 result = repo.object_store.tree_changes(source, target)
562 result = repo.object_store.tree_changes(source, target)
500 return list(result)
563 return list(result)
501
564
502 @reraise_safe_exceptions
565 @reraise_safe_exceptions
503 def tree_items(self, wire, tree_id):
566 def tree_items(self, wire, tree_id):
504 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
505 tree = repo[tree_id]
568 tree = repo[tree_id]
506
569
507 result = []
570 result = []
508 for item in tree.iteritems():
571 for item in tree.iteritems():
509 item_sha = item.sha
572 item_sha = item.sha
510 item_mode = item.mode
573 item_mode = item.mode
511
574
512 if FILE_MODE(item_mode) == GIT_LINK:
575 if FILE_MODE(item_mode) == GIT_LINK:
513 item_type = "link"
576 item_type = "link"
514 else:
577 else:
515 item_type = repo[item_sha].type_name
578 item_type = repo[item_sha].type_name
516
579
517 result.append((item.path, item_mode, item_sha, item_type))
580 result.append((item.path, item_mode, item_sha, item_type))
518 return result
581 return result
519
582
520 @reraise_safe_exceptions
583 @reraise_safe_exceptions
521 def update_server_info(self, wire):
584 def update_server_info(self, wire):
522 repo = self._factory.repo(wire)
585 repo = self._factory.repo(wire)
523 update_server_info(repo)
586 update_server_info(repo)
524
587
525 @reraise_safe_exceptions
588 @reraise_safe_exceptions
526 def discover_git_version(self):
589 def discover_git_version(self):
527 stdout, _ = self.run_git_command(
590 stdout, _ = self.run_git_command(
528 {}, ['--version'], _bare=True, _safe=True)
591 {}, ['--version'], _bare=True, _safe=True)
529 prefix = 'git version'
592 prefix = 'git version'
530 if stdout.startswith(prefix):
593 if stdout.startswith(prefix):
531 stdout = stdout[len(prefix):]
594 stdout = stdout[len(prefix):]
532 return stdout.strip()
595 return stdout.strip()
533
596
534 @reraise_safe_exceptions
597 @reraise_safe_exceptions
535 def run_git_command(self, wire, cmd, **opts):
598 def run_git_command(self, wire, cmd, **opts):
536 path = wire.get('path', None)
599 path = wire.get('path', None)
537
600
538 if path and os.path.isdir(path):
601 if path and os.path.isdir(path):
539 opts['cwd'] = path
602 opts['cwd'] = path
540
603
541 if '_bare' in opts:
604 if '_bare' in opts:
542 _copts = []
605 _copts = []
543 del opts['_bare']
606 del opts['_bare']
544 else:
607 else:
545 _copts = ['-c', 'core.quotepath=false', ]
608 _copts = ['-c', 'core.quotepath=false', ]
546 safe_call = False
609 safe_call = False
547 if '_safe' in opts:
610 if '_safe' in opts:
548 # no exc on failure
611 # no exc on failure
549 del opts['_safe']
612 del opts['_safe']
550 safe_call = True
613 safe_call = True
551
614
552 gitenv = os.environ.copy()
615 gitenv = os.environ.copy()
553 gitenv.update(opts.pop('extra_env', {}))
616 gitenv.update(opts.pop('extra_env', {}))
554 # need to clean fix GIT_DIR !
617 # need to clean fix GIT_DIR !
555 if 'GIT_DIR' in gitenv:
618 if 'GIT_DIR' in gitenv:
556 del gitenv['GIT_DIR']
619 del gitenv['GIT_DIR']
557 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
558
621
559 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
560
623
561 try:
624 try:
562 _opts = {'env': gitenv, 'shell': False}
625 _opts = {'env': gitenv, 'shell': False}
563 _opts.update(opts)
626 _opts.update(opts)
564 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
565
628
566 return ''.join(p), ''.join(p.error)
629 return ''.join(p), ''.join(p.error)
567 except (EnvironmentError, OSError) as err:
630 except (EnvironmentError, OSError) as err:
568 tb_err = ("Couldn't run git command (%s).\n"
631 tb_err = ("Couldn't run git command (%s).\n"
569 "Original error was:%s\n" % (cmd, err))
632 "Original error was:%s\n" % (cmd, err))
570 log.exception(tb_err)
633 log.exception(tb_err)
571 if safe_call:
634 if safe_call:
572 return '', err
635 return '', err
573 else:
636 else:
574 raise exceptions.VcsException(tb_err)
637 raise exceptions.VcsException(tb_err)
575
638
576
639
577 def str_to_dulwich(value):
640 def str_to_dulwich(value):
578 """
641 """
579 Dulwich 0.10.1a requires `unicode` objects to be passed in.
642 Dulwich 0.10.1a requires `unicode` objects to be passed in.
580 """
643 """
581 return value.decode(settings.WIRE_ENCODING)
644 return value.decode(settings.WIRE_ENCODING)
@@ -1,725 +1,727 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
22 import urllib
21 import urllib
23 import urllib2
22 import urllib2
24
23
25 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
26 from mercurial import commands
28 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
37 InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 def raise_from_original(new_type):
95 """
96 Raise a new exception type with original args and traceback.
97 """
98 _, original, traceback = sys.exc_info()
99 try:
100 raise new_type(*original.args), None, traceback
101 finally:
102 del traceback
103
104
105 class MercurialFactory(RepoFactory):
94 class MercurialFactory(RepoFactory):
106
95
107 def _create_config(self, config, hooks=True):
96 def _create_config(self, config, hooks=True):
108 if not hooks:
97 if not hooks:
109 hooks_to_clean = frozenset((
98 hooks_to_clean = frozenset((
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 new_config = []
101 new_config = []
113 for section, option, value in config:
102 for section, option, value in config:
114 if section == 'hooks' and option in hooks_to_clean:
103 if section == 'hooks' and option in hooks_to_clean:
115 continue
104 continue
116 new_config.append((section, option, value))
105 new_config.append((section, option, value))
117 config = new_config
106 config = new_config
118
107
119 baseui = make_ui_from_config(config)
108 baseui = make_ui_from_config(config)
120 return baseui
109 return baseui
121
110
122 def _create_repo(self, wire, create):
111 def _create_repo(self, wire, create):
123 baseui = self._create_config(wire["config"])
112 baseui = self._create_config(wire["config"])
124 return localrepository(baseui, wire["path"], create)
113 return localrepository(baseui, wire["path"], create)
125
114
126
115
127 class HgRemote(object):
116 class HgRemote(object):
128
117
129 def __init__(self, factory):
118 def __init__(self, factory):
130 self._factory = factory
119 self._factory = factory
131
120
132 self._bulk_methods = {
121 self._bulk_methods = {
133 "affected_files": self.ctx_files,
122 "affected_files": self.ctx_files,
134 "author": self.ctx_user,
123 "author": self.ctx_user,
135 "branch": self.ctx_branch,
124 "branch": self.ctx_branch,
136 "children": self.ctx_children,
125 "children": self.ctx_children,
137 "date": self.ctx_date,
126 "date": self.ctx_date,
138 "message": self.ctx_description,
127 "message": self.ctx_description,
139 "parents": self.ctx_parents,
128 "parents": self.ctx_parents,
140 "status": self.ctx_status,
129 "status": self.ctx_status,
141 "_file_paths": self.ctx_list,
130 "_file_paths": self.ctx_list,
142 }
131 }
143
132
144 @reraise_safe_exceptions
133 @reraise_safe_exceptions
145 def discover_hg_version(self):
134 def discover_hg_version(self):
146 from mercurial import util
135 from mercurial import util
147 return util.version()
136 return util.version()
148
137
149 @reraise_safe_exceptions
138 @reraise_safe_exceptions
150 def archive_repo(self, archive_path, mtime, file_info, kind):
139 def archive_repo(self, archive_path, mtime, file_info, kind):
151 if kind == "tgz":
140 if kind == "tgz":
152 archiver = archival.tarit(archive_path, mtime, "gz")
141 archiver = archival.tarit(archive_path, mtime, "gz")
153 elif kind == "tbz2":
142 elif kind == "tbz2":
154 archiver = archival.tarit(archive_path, mtime, "bz2")
143 archiver = archival.tarit(archive_path, mtime, "bz2")
155 elif kind == 'zip':
144 elif kind == 'zip':
156 archiver = archival.zipit(archive_path, mtime)
145 archiver = archival.zipit(archive_path, mtime)
157 else:
146 else:
158 raise exceptions.ArchiveException(
147 raise exceptions.ArchiveException(
159 'Remote does not support: "%s".' % kind)
148 'Remote does not support: "%s".' % kind)
160
149
161 for f_path, f_mode, f_is_link, f_content in file_info:
150 for f_path, f_mode, f_is_link, f_content in file_info:
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
163 archiver.done()
152 archiver.done()
164
153
165 @reraise_safe_exceptions
154 @reraise_safe_exceptions
166 def bookmarks(self, wire):
155 def bookmarks(self, wire):
167 repo = self._factory.repo(wire)
156 repo = self._factory.repo(wire)
168 return dict(repo._bookmarks)
157 return dict(repo._bookmarks)
169
158
170 @reraise_safe_exceptions
159 @reraise_safe_exceptions
171 def branches(self, wire, normal, closed):
160 def branches(self, wire, normal, closed):
172 repo = self._factory.repo(wire)
161 repo = self._factory.repo(wire)
173 iter_branches = repo.branchmap().iterbranches()
162 iter_branches = repo.branchmap().iterbranches()
174 bt = {}
163 bt = {}
175 for branch_name, _heads, tip, is_closed in iter_branches:
164 for branch_name, _heads, tip, is_closed in iter_branches:
176 if normal and not is_closed:
165 if normal and not is_closed:
177 bt[branch_name] = tip
166 bt[branch_name] = tip
178 if closed and is_closed:
167 if closed and is_closed:
179 bt[branch_name] = tip
168 bt[branch_name] = tip
180
169
181 return bt
170 return bt
182
171
183 @reraise_safe_exceptions
172 @reraise_safe_exceptions
184 def bulk_request(self, wire, rev, pre_load):
173 def bulk_request(self, wire, rev, pre_load):
185 result = {}
174 result = {}
186 for attr in pre_load:
175 for attr in pre_load:
187 try:
176 try:
188 method = self._bulk_methods[attr]
177 method = self._bulk_methods[attr]
189 result[attr] = method(wire, rev)
178 result[attr] = method(wire, rev)
190 except KeyError:
179 except KeyError:
191 raise exceptions.VcsException(
180 raise exceptions.VcsException(
192 'Unknown bulk attribute: "%s"' % attr)
181 'Unknown bulk attribute: "%s"' % attr)
193 return result
182 return result
194
183
195 @reraise_safe_exceptions
184 @reraise_safe_exceptions
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
199
188
200 @reraise_safe_exceptions
189 @reraise_safe_exceptions
201 def commitctx(
190 def commitctx(
202 self, wire, message, parents, commit_time, commit_timezone,
191 self, wire, message, parents, commit_time, commit_timezone,
203 user, files, extra, removed, updated):
192 user, files, extra, removed, updated):
204
193
205 def _filectxfn(_repo, memctx, path):
194 def _filectxfn(_repo, memctx, path):
206 """
195 """
207 Marks given path as added/changed/removed in a given _repo. This is
196 Marks given path as added/changed/removed in a given _repo. This is
208 for internal mercurial commit function.
197 for internal mercurial commit function.
209 """
198 """
210
199
211 # check if this path is removed
200 # check if this path is removed
212 if path in removed:
201 if path in removed:
213 # returning None is a way to mark node for removal
202 # returning None is a way to mark node for removal
214 return None
203 return None
215
204
216 # check if this path is added
205 # check if this path is added
217 for node in updated:
206 for node in updated:
218 if node['path'] == path:
207 if node['path'] == path:
219 return memfilectx(
208 return memfilectx(
220 _repo,
209 _repo,
221 path=node['path'],
210 path=node['path'],
222 data=node['content'],
211 data=node['content'],
223 islink=False,
212 islink=False,
224 isexec=bool(node['mode'] & stat.S_IXUSR),
213 isexec=bool(node['mode'] & stat.S_IXUSR),
225 copied=False,
214 copied=False,
226 memctx=memctx)
215 memctx=memctx)
227
216
228 raise exceptions.AbortException(
217 raise exceptions.AbortException(
229 "Given path haven't been marked as added, "
218 "Given path haven't been marked as added, "
230 "changed or removed (%s)" % path)
219 "changed or removed (%s)" % path)
231
220
232 repo = self._factory.repo(wire)
221 repo = self._factory.repo(wire)
233
222
234 commit_ctx = memctx(
223 commit_ctx = memctx(
235 repo=repo,
224 repo=repo,
236 parents=parents,
225 parents=parents,
237 text=message,
226 text=message,
238 files=files,
227 files=files,
239 filectxfn=_filectxfn,
228 filectxfn=_filectxfn,
240 user=user,
229 user=user,
241 date=(commit_time, commit_timezone),
230 date=(commit_time, commit_timezone),
242 extra=extra)
231 extra=extra)
243
232
244 n = repo.commitctx(commit_ctx)
233 n = repo.commitctx(commit_ctx)
245 new_id = hex(n)
234 new_id = hex(n)
246
235
247 return new_id
236 return new_id
248
237
249 @reraise_safe_exceptions
238 @reraise_safe_exceptions
250 def ctx_branch(self, wire, revision):
239 def ctx_branch(self, wire, revision):
251 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
252 ctx = repo[revision]
241 ctx = repo[revision]
253 return ctx.branch()
242 return ctx.branch()
254
243
255 @reraise_safe_exceptions
244 @reraise_safe_exceptions
256 def ctx_children(self, wire, revision):
245 def ctx_children(self, wire, revision):
257 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
258 ctx = repo[revision]
247 ctx = repo[revision]
259 return [child.rev() for child in ctx.children()]
248 return [child.rev() for child in ctx.children()]
260
249
261 @reraise_safe_exceptions
250 @reraise_safe_exceptions
262 def ctx_date(self, wire, revision):
251 def ctx_date(self, wire, revision):
263 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
264 ctx = repo[revision]
253 ctx = repo[revision]
265 return ctx.date()
254 return ctx.date()
266
255
267 @reraise_safe_exceptions
256 @reraise_safe_exceptions
268 def ctx_description(self, wire, revision):
257 def ctx_description(self, wire, revision):
269 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
270 ctx = repo[revision]
259 ctx = repo[revision]
271 return ctx.description()
260 return ctx.description()
272
261
273 @reraise_safe_exceptions
262 @reraise_safe_exceptions
274 def ctx_diff(
263 def ctx_diff(
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
276 repo = self._factory.repo(wire)
265 repo = self._factory.repo(wire)
277 ctx = repo[revision]
266 ctx = repo[revision]
278 result = ctx.diff(
267 result = ctx.diff(
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
280 return list(result)
269 return list(result)
281
270
282 @reraise_safe_exceptions
271 @reraise_safe_exceptions
283 def ctx_files(self, wire, revision):
272 def ctx_files(self, wire, revision):
284 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
285 ctx = repo[revision]
274 ctx = repo[revision]
286 return ctx.files()
275 return ctx.files()
287
276
288 @reraise_safe_exceptions
277 @reraise_safe_exceptions
289 def ctx_list(self, path, revision):
278 def ctx_list(self, path, revision):
290 repo = self._factory.repo(path)
279 repo = self._factory.repo(path)
291 ctx = repo[revision]
280 ctx = repo[revision]
292 return list(ctx)
281 return list(ctx)
293
282
294 @reraise_safe_exceptions
283 @reraise_safe_exceptions
295 def ctx_parents(self, wire, revision):
284 def ctx_parents(self, wire, revision):
296 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
297 ctx = repo[revision]
286 ctx = repo[revision]
298 return [parent.rev() for parent in ctx.parents()]
287 return [parent.rev() for parent in ctx.parents()]
299
288
300 @reraise_safe_exceptions
289 @reraise_safe_exceptions
301 def ctx_substate(self, wire, revision):
290 def ctx_substate(self, wire, revision):
302 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
303 ctx = repo[revision]
292 ctx = repo[revision]
304 return ctx.substate
293 return ctx.substate
305
294
306 @reraise_safe_exceptions
295 @reraise_safe_exceptions
307 def ctx_status(self, wire, revision):
296 def ctx_status(self, wire, revision):
308 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
309 ctx = repo[revision]
298 ctx = repo[revision]
310 status = repo[ctx.p1().node()].status(other=ctx.node())
299 status = repo[ctx.p1().node()].status(other=ctx.node())
311 # object of status (odd, custom named tuple in mercurial) is not
300 # object of status (odd, custom named tuple in mercurial) is not
312 # correctly serializable via Pyro, we make it a list, as the underling
301 # correctly serializable via Pyro, we make it a list, as the underling
313 # API expects this to be a list
302 # API expects this to be a list
314 return list(status)
303 return list(status)
315
304
316 @reraise_safe_exceptions
305 @reraise_safe_exceptions
317 def ctx_user(self, wire, revision):
306 def ctx_user(self, wire, revision):
318 repo = self._factory.repo(wire)
307 repo = self._factory.repo(wire)
319 ctx = repo[revision]
308 ctx = repo[revision]
320 return ctx.user()
309 return ctx.user()
321
310
322 @reraise_safe_exceptions
311 @reraise_safe_exceptions
323 def check_url(self, url, config):
312 def check_url(self, url, config):
324 _proto = None
313 _proto = None
325 if '+' in url[:url.find('://')]:
314 if '+' in url[:url.find('://')]:
326 _proto = url[0:url.find('+')]
315 _proto = url[0:url.find('+')]
327 url = url[url.find('+') + 1:]
316 url = url[url.find('+') + 1:]
328 handlers = []
317 handlers = []
329 url_obj = url_parser(url)
318 url_obj = url_parser(url)
330 test_uri, authinfo = url_obj.authinfo()
319 test_uri, authinfo = url_obj.authinfo()
331 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
320 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
332 url_obj.query = obfuscate_qs(url_obj.query)
321 url_obj.query = obfuscate_qs(url_obj.query)
333
322
334 cleaned_uri = str(url_obj)
323 cleaned_uri = str(url_obj)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
324 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
336
325
337 if authinfo:
326 if authinfo:
338 # create a password manager
327 # create a password manager
339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
328 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
340 passmgr.add_password(*authinfo)
329 passmgr.add_password(*authinfo)
341
330
342 handlers.extend((httpbasicauthhandler(passmgr),
331 handlers.extend((httpbasicauthhandler(passmgr),
343 httpdigestauthhandler(passmgr)))
332 httpdigestauthhandler(passmgr)))
344
333
345 o = urllib2.build_opener(*handlers)
334 o = urllib2.build_opener(*handlers)
346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
335 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
347 ('Accept', 'application/mercurial-0.1')]
336 ('Accept', 'application/mercurial-0.1')]
348
337
349 q = {"cmd": 'between'}
338 q = {"cmd": 'between'}
350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
339 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
351 qs = '?%s' % urllib.urlencode(q)
340 qs = '?%s' % urllib.urlencode(q)
352 cu = "%s%s" % (test_uri, qs)
341 cu = "%s%s" % (test_uri, qs)
353 req = urllib2.Request(cu, None, {})
342 req = urllib2.Request(cu, None, {})
354
343
355 try:
344 try:
356 log.debug("Trying to open URL %s", cleaned_uri)
345 log.debug("Trying to open URL %s", cleaned_uri)
357 resp = o.open(req)
346 resp = o.open(req)
358 if resp.code != 200:
347 if resp.code != 200:
359 raise exceptions.URLError('Return Code is not 200')
348 raise exceptions.URLError('Return Code is not 200')
360 except Exception as e:
349 except Exception as e:
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
350 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
362 # means it cannot be cloned
351 # means it cannot be cloned
363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
352 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
364
353
365 # now check if it's a proper hg repo, but don't do it for svn
354 # now check if it's a proper hg repo, but don't do it for svn
366 try:
355 try:
367 if _proto == 'svn':
356 if _proto == 'svn':
368 pass
357 pass
369 else:
358 else:
370 # check for pure hg repos
359 # check for pure hg repos
371 log.debug(
360 log.debug(
372 "Verifying if URL is a Mercurial repository: %s",
361 "Verifying if URL is a Mercurial repository: %s",
373 cleaned_uri)
362 cleaned_uri)
374 httppeer(make_ui_from_config(config), url).lookup('tip')
363 httppeer(make_ui_from_config(config), url).lookup('tip')
375 except Exception as e:
364 except Exception as e:
376 log.warning("URL is not a valid Mercurial repository: %s",
365 log.warning("URL is not a valid Mercurial repository: %s",
377 cleaned_uri)
366 cleaned_uri)
378 raise exceptions.URLError(
367 raise exceptions.URLError(
379 "url [%s] does not look like an hg repo org_exc: %s"
368 "url [%s] does not look like an hg repo org_exc: %s"
380 % (cleaned_uri, e))
369 % (cleaned_uri, e))
381
370
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
371 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
383 return True
372 return True
384
373
385 @reraise_safe_exceptions
374 @reraise_safe_exceptions
386 def diff(
375 def diff(
387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
376 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
388 context):
377 context):
389 repo = self._factory.repo(wire)
378 repo = self._factory.repo(wire)
390
379
391 if file_filter:
380 if file_filter:
392 match_filter = match(file_filter[0], '', [file_filter[1]])
381 match_filter = match(file_filter[0], '', [file_filter[1]])
393 else:
382 else:
394 match_filter = file_filter
383 match_filter = file_filter
395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
384 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
396
385
397 try:
386 try:
398 return "".join(patch.diff(
387 return "".join(patch.diff(
399 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
388 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
400 except RepoLookupError:
389 except RepoLookupError:
401 raise exceptions.LookupException()
390 raise exceptions.LookupException()
402
391
403 @reraise_safe_exceptions
392 @reraise_safe_exceptions
404 def file_history(self, wire, revision, path, limit):
393 def file_history(self, wire, revision, path, limit):
405 repo = self._factory.repo(wire)
394 repo = self._factory.repo(wire)
406
395
407 ctx = repo[revision]
396 ctx = repo[revision]
408 fctx = ctx.filectx(path)
397 fctx = ctx.filectx(path)
409
398
410 def history_iter():
399 def history_iter():
411 limit_rev = fctx.rev()
400 limit_rev = fctx.rev()
412 for obj in reversed(list(fctx.filelog())):
401 for obj in reversed(list(fctx.filelog())):
413 obj = fctx.filectx(obj)
402 obj = fctx.filectx(obj)
414 if limit_rev >= obj.rev():
403 if limit_rev >= obj.rev():
415 yield obj
404 yield obj
416
405
417 history = []
406 history = []
418 for cnt, obj in enumerate(history_iter()):
407 for cnt, obj in enumerate(history_iter()):
419 if limit and cnt >= limit:
408 if limit and cnt >= limit:
420 break
409 break
421 history.append(hex(obj.node()))
410 history.append(hex(obj.node()))
422
411
423 return [x for x in history]
412 return [x for x in history]
424
413
425 @reraise_safe_exceptions
414 @reraise_safe_exceptions
426 def file_history_untill(self, wire, revision, path, limit):
415 def file_history_untill(self, wire, revision, path, limit):
427 repo = self._factory.repo(wire)
416 repo = self._factory.repo(wire)
428 ctx = repo[revision]
417 ctx = repo[revision]
429 fctx = ctx.filectx(path)
418 fctx = ctx.filectx(path)
430
419
431 file_log = list(fctx.filelog())
420 file_log = list(fctx.filelog())
432 if limit:
421 if limit:
433 # Limit to the last n items
422 # Limit to the last n items
434 file_log = file_log[-limit:]
423 file_log = file_log[-limit:]
435
424
436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
425 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
437
426
438 @reraise_safe_exceptions
427 @reraise_safe_exceptions
439 def fctx_annotate(self, wire, revision, path):
428 def fctx_annotate(self, wire, revision, path):
440 repo = self._factory.repo(wire)
429 repo = self._factory.repo(wire)
441 ctx = repo[revision]
430 ctx = repo[revision]
442 fctx = ctx.filectx(path)
431 fctx = ctx.filectx(path)
443
432
444 result = []
433 result = []
445 for i, annotate_data in enumerate(fctx.annotate()):
434 for i, annotate_data in enumerate(fctx.annotate()):
446 ln_no = i + 1
435 ln_no = i + 1
447 node_info, content = annotate_data
436 node_info, content = annotate_data
448 sha = hex(node_info[0].node())
437 sha = hex(node_info[0].node())
449 result.append((ln_no, sha, content))
438 result.append((ln_no, sha, content))
450 return result
439 return result
451
440
452 @reraise_safe_exceptions
441 @reraise_safe_exceptions
453 def fctx_data(self, wire, revision, path):
442 def fctx_data(self, wire, revision, path):
454 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
455 ctx = repo[revision]
444 ctx = repo[revision]
456 fctx = ctx.filectx(path)
445 fctx = ctx.filectx(path)
457 return fctx.data()
446 return fctx.data()
458
447
459 @reraise_safe_exceptions
448 @reraise_safe_exceptions
460 def fctx_flags(self, wire, revision, path):
449 def fctx_flags(self, wire, revision, path):
461 repo = self._factory.repo(wire)
450 repo = self._factory.repo(wire)
462 ctx = repo[revision]
451 ctx = repo[revision]
463 fctx = ctx.filectx(path)
452 fctx = ctx.filectx(path)
464 return fctx.flags()
453 return fctx.flags()
465
454
466 @reraise_safe_exceptions
455 @reraise_safe_exceptions
467 def fctx_size(self, wire, revision, path):
456 def fctx_size(self, wire, revision, path):
468 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
469 ctx = repo[revision]
458 ctx = repo[revision]
470 fctx = ctx.filectx(path)
459 fctx = ctx.filectx(path)
471 return fctx.size()
460 return fctx.size()
472
461
473 @reraise_safe_exceptions
462 @reraise_safe_exceptions
474 def get_all_commit_ids(self, wire, name):
463 def get_all_commit_ids(self, wire, name):
475 repo = self._factory.repo(wire)
464 repo = self._factory.repo(wire)
476 revs = repo.filtered(name).changelog.index
465 revs = repo.filtered(name).changelog.index
477 return map(lambda x: hex(x[7]), revs)[:-1]
466 return map(lambda x: hex(x[7]), revs)[:-1]
478
467
479 @reraise_safe_exceptions
468 @reraise_safe_exceptions
480 def get_config_value(self, wire, section, name, untrusted=False):
469 def get_config_value(self, wire, section, name, untrusted=False):
481 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
482 return repo.ui.config(section, name, untrusted=untrusted)
471 return repo.ui.config(section, name, untrusted=untrusted)
483
472
484 @reraise_safe_exceptions
473 @reraise_safe_exceptions
485 def get_config_bool(self, wire, section, name, untrusted=False):
474 def get_config_bool(self, wire, section, name, untrusted=False):
486 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
487 return repo.ui.configbool(section, name, untrusted=untrusted)
476 return repo.ui.configbool(section, name, untrusted=untrusted)
488
477
489 @reraise_safe_exceptions
478 @reraise_safe_exceptions
490 def get_config_list(self, wire, section, name, untrusted=False):
479 def get_config_list(self, wire, section, name, untrusted=False):
491 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
492 return repo.ui.configlist(section, name, untrusted=untrusted)
481 return repo.ui.configlist(section, name, untrusted=untrusted)
493
482
494 @reraise_safe_exceptions
483 @reraise_safe_exceptions
495 def is_large_file(self, wire, path):
484 def is_large_file(self, wire, path):
496 return largefiles.lfutil.isstandin(path)
485 return largefiles.lfutil.isstandin(path)
497
486
498 @reraise_safe_exceptions
487 @reraise_safe_exceptions
499 def in_store(self, wire, sha):
488 def in_largefiles_store(self, wire, sha):
500 repo = self._factory.repo(wire)
489 repo = self._factory.repo(wire)
501 return largefiles.lfutil.instore(repo, sha)
490 return largefiles.lfutil.instore(repo, sha)
502
491
503 @reraise_safe_exceptions
492 @reraise_safe_exceptions
504 def in_user_cache(self, wire, sha):
493 def in_user_cache(self, wire, sha):
505 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
506 return largefiles.lfutil.inusercache(repo.ui, sha)
495 return largefiles.lfutil.inusercache(repo.ui, sha)
507
496
508 @reraise_safe_exceptions
497 @reraise_safe_exceptions
509 def store_path(self, wire, sha):
498 def store_path(self, wire, sha):
510 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
511 return largefiles.lfutil.storepath(repo, sha)
500 return largefiles.lfutil.storepath(repo, sha)
512
501
513 @reraise_safe_exceptions
502 @reraise_safe_exceptions
514 def link(self, wire, sha, path):
503 def link(self, wire, sha, path):
515 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
516 largefiles.lfutil.link(
505 largefiles.lfutil.link(
517 largefiles.lfutil.usercachepath(repo.ui, sha), path)
506 largefiles.lfutil.usercachepath(repo.ui, sha), path)
518
507
519 @reraise_safe_exceptions
508 @reraise_safe_exceptions
520 def localrepository(self, wire, create=False):
509 def localrepository(self, wire, create=False):
521 self._factory.repo(wire, create=create)
510 self._factory.repo(wire, create=create)
522
511
523 @reraise_safe_exceptions
512 @reraise_safe_exceptions
524 def lookup(self, wire, revision, both):
513 def lookup(self, wire, revision, both):
525 # TODO Paris: Ugly hack to "deserialize" long for msgpack
514 # TODO Paris: Ugly hack to "deserialize" long for msgpack
526 if isinstance(revision, float):
515 if isinstance(revision, float):
527 revision = long(revision)
516 revision = long(revision)
528 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
529 try:
518 try:
530 ctx = repo[revision]
519 ctx = repo[revision]
531 except RepoLookupError:
520 except RepoLookupError:
532 raise exceptions.LookupException(revision)
521 raise exceptions.LookupException(revision)
533 except LookupError as e:
522 except LookupError as e:
534 raise exceptions.LookupException(e.name)
523 raise exceptions.LookupException(e.name)
535
524
536 if not both:
525 if not both:
537 return ctx.hex()
526 return ctx.hex()
538
527
539 ctx = repo[ctx.hex()]
528 ctx = repo[ctx.hex()]
540 return ctx.hex(), ctx.rev()
529 return ctx.hex(), ctx.rev()
541
530
542 @reraise_safe_exceptions
531 @reraise_safe_exceptions
543 def pull(self, wire, url, commit_ids=None):
532 def pull(self, wire, url, commit_ids=None):
544 repo = self._factory.repo(wire)
533 repo = self._factory.repo(wire)
545 remote = peer(repo, {}, url)
534 remote = peer(repo, {}, url)
546 if commit_ids:
535 if commit_ids:
547 commit_ids = [bin(commit_id) for commit_id in commit_ids]
536 commit_ids = [bin(commit_id) for commit_id in commit_ids]
548
537
549 return exchange.pull(
538 return exchange.pull(
550 repo, remote, heads=commit_ids, force=None).cgresult
539 repo, remote, heads=commit_ids, force=None).cgresult
551
540
552 @reraise_safe_exceptions
541 @reraise_safe_exceptions
553 def revision(self, wire, rev):
542 def revision(self, wire, rev):
554 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
555 ctx = repo[rev]
544 ctx = repo[rev]
556 return ctx.rev()
545 return ctx.rev()
557
546
558 @reraise_safe_exceptions
547 @reraise_safe_exceptions
559 def rev_range(self, wire, filter):
548 def rev_range(self, wire, filter):
560 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
561 revisions = [rev for rev in revrange(repo, filter)]
550 revisions = [rev for rev in revrange(repo, filter)]
562 return revisions
551 return revisions
563
552
564 @reraise_safe_exceptions
553 @reraise_safe_exceptions
565 def rev_range_hash(self, wire, node):
554 def rev_range_hash(self, wire, node):
566 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
567
556
568 def get_revs(repo, rev_opt):
557 def get_revs(repo, rev_opt):
569 if rev_opt:
558 if rev_opt:
570 revs = revrange(repo, rev_opt)
559 revs = revrange(repo, rev_opt)
571 if len(revs) == 0:
560 if len(revs) == 0:
572 return (nullrev, nullrev)
561 return (nullrev, nullrev)
573 return max(revs), min(revs)
562 return max(revs), min(revs)
574 else:
563 else:
575 return len(repo) - 1, 0
564 return len(repo) - 1, 0
576
565
577 stop, start = get_revs(repo, [node + ':'])
566 stop, start = get_revs(repo, [node + ':'])
578 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
567 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
579 return revs
568 return revs
580
569
581 @reraise_safe_exceptions
570 @reraise_safe_exceptions
582 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
571 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
583 other_path = kwargs.pop('other_path', None)
572 other_path = kwargs.pop('other_path', None)
584
573
585 # case when we want to compare two independent repositories
574 # case when we want to compare two independent repositories
586 if other_path and other_path != wire["path"]:
575 if other_path and other_path != wire["path"]:
587 baseui = self._factory._create_config(wire["config"])
576 baseui = self._factory._create_config(wire["config"])
588 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
577 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
589 else:
578 else:
590 repo = self._factory.repo(wire)
579 repo = self._factory.repo(wire)
591 return list(repo.revs(rev_spec, *args))
580 return list(repo.revs(rev_spec, *args))
592
581
593 @reraise_safe_exceptions
582 @reraise_safe_exceptions
594 def strip(self, wire, revision, update, backup):
583 def strip(self, wire, revision, update, backup):
595 repo = self._factory.repo(wire)
584 repo = self._factory.repo(wire)
596 ctx = repo[revision]
585 ctx = repo[revision]
597 hgext_strip(
586 hgext_strip(
598 repo.baseui, repo, ctx.node(), update=update, backup=backup)
587 repo.baseui, repo, ctx.node(), update=update, backup=backup)
599
588
600 @reraise_safe_exceptions
589 @reraise_safe_exceptions
590 def verify(self, wire,):
591 repo = self._factory.repo(wire)
592 baseui = self._factory._create_config(wire['config'])
593 baseui.setconfig('ui', 'quiet', 'false')
594 output = io.BytesIO()
595
596 def write(data, **unused_kwargs):
597 output.write(data)
598 baseui.write = write
599
600 repo.ui = baseui
601 verify.verify(repo)
602 return output.getvalue()
603
604 @reraise_safe_exceptions
601 def tag(self, wire, name, revision, message, local, user,
605 def tag(self, wire, name, revision, message, local, user,
602 tag_time, tag_timezone):
606 tag_time, tag_timezone):
603 repo = self._factory.repo(wire)
607 repo = self._factory.repo(wire)
604 ctx = repo[revision]
608 ctx = repo[revision]
605 node = ctx.node()
609 node = ctx.node()
606
610
607 date = (tag_time, tag_timezone)
611 date = (tag_time, tag_timezone)
608 try:
612 try:
609 repo.tag(name, node, message, local, user, date)
613 repo.tag(name, node, message, local, user, date)
610 except Abort as e:
614 except Abort as e:
611 log.exception("Tag operation aborted")
615 log.exception("Tag operation aborted")
612 # Exception can contain unicode which we convert
616 # Exception can contain unicode which we convert
613 raise exceptions.AbortException(repr(e))
617 raise exceptions.AbortException(repr(e))
614
618
615 @reraise_safe_exceptions
619 @reraise_safe_exceptions
616 def tags(self, wire):
620 def tags(self, wire):
617 repo = self._factory.repo(wire)
621 repo = self._factory.repo(wire)
618 return repo.tags()
622 return repo.tags()
619
623
620 @reraise_safe_exceptions
624 @reraise_safe_exceptions
621 def update(self, wire, node=None, clean=False):
625 def update(self, wire, node=None, clean=False):
622 repo = self._factory.repo(wire)
626 repo = self._factory.repo(wire)
623 baseui = self._factory._create_config(wire['config'])
627 baseui = self._factory._create_config(wire['config'])
624 commands.update(baseui, repo, node=node, clean=clean)
628 commands.update(baseui, repo, node=node, clean=clean)
625
629
626 @reraise_safe_exceptions
630 @reraise_safe_exceptions
627 def identify(self, wire):
631 def identify(self, wire):
628 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
629 baseui = self._factory._create_config(wire['config'])
633 baseui = self._factory._create_config(wire['config'])
630 output = io.BytesIO()
634 output = io.BytesIO()
631 baseui.write = output.write
635 baseui.write = output.write
632 # This is required to get a full node id
636 # This is required to get a full node id
633 baseui.debugflag = True
637 baseui.debugflag = True
634 commands.identify(baseui, repo, id=True)
638 commands.identify(baseui, repo, id=True)
635
639
636 return output.getvalue()
640 return output.getvalue()
637
641
638 @reraise_safe_exceptions
642 @reraise_safe_exceptions
639 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
643 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
640 hooks=True):
644 hooks=True):
641 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
642 baseui = self._factory._create_config(wire['config'], hooks=hooks)
646 baseui = self._factory._create_config(wire['config'], hooks=hooks)
643
647
644 # Mercurial internally has a lot of logic that checks ONLY if
648 # Mercurial internally has a lot of logic that checks ONLY if
645 # option is defined, we just pass those if they are defined then
649 # option is defined, we just pass those if they are defined then
646 opts = {}
650 opts = {}
647 if bookmark:
651 if bookmark:
648 opts['bookmark'] = bookmark
652 opts['bookmark'] = bookmark
649 if branch:
653 if branch:
650 opts['branch'] = branch
654 opts['branch'] = branch
651 if revision:
655 if revision:
652 opts['rev'] = revision
656 opts['rev'] = revision
653
657
654 commands.pull(baseui, repo, source, **opts)
658 commands.pull(baseui, repo, source, **opts)
655
659
656 @reraise_safe_exceptions
660 @reraise_safe_exceptions
657 def heads(self, wire, branch=None):
661 def heads(self, wire, branch=None):
658 repo = self._factory.repo(wire)
662 repo = self._factory.repo(wire)
659 baseui = self._factory._create_config(wire['config'])
663 baseui = self._factory._create_config(wire['config'])
660 output = io.BytesIO()
664 output = io.BytesIO()
661
665
662 def write(data, **unused_kwargs):
666 def write(data, **unused_kwargs):
663 output.write(data)
667 output.write(data)
664
668
665 baseui.write = write
669 baseui.write = write
666 if branch:
670 if branch:
667 args = [branch]
671 args = [branch]
668 else:
672 else:
669 args = []
673 args = []
670 commands.heads(baseui, repo, template='{node} ', *args)
674 commands.heads(baseui, repo, template='{node} ', *args)
671
675
672 return output.getvalue()
676 return output.getvalue()
673
677
674 @reraise_safe_exceptions
678 @reraise_safe_exceptions
675 def ancestor(self, wire, revision1, revision2):
679 def ancestor(self, wire, revision1, revision2):
676 repo = self._factory.repo(wire)
680 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'])
681 changelog = repo.changelog
678 output = io.BytesIO()
682 lookup = repo.lookup
679 baseui.write = output.write
683 a = changelog.ancestor(lookup(revision1), lookup(revision2))
680 commands.debugancestor(baseui, repo, revision1, revision2)
684 return hex(a)
681
682 return output.getvalue()
683
685
684 @reraise_safe_exceptions
686 @reraise_safe_exceptions
685 def push(self, wire, revisions, dest_path, hooks=True,
687 def push(self, wire, revisions, dest_path, hooks=True,
686 push_branches=False):
688 push_branches=False):
687 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
688 baseui = self._factory._create_config(wire['config'], hooks=hooks)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
689 commands.push(baseui, repo, dest=dest_path, rev=revisions,
691 commands.push(baseui, repo, dest=dest_path, rev=revisions,
690 new_branch=push_branches)
692 new_branch=push_branches)
691
693
692 @reraise_safe_exceptions
694 @reraise_safe_exceptions
693 def merge(self, wire, revision):
695 def merge(self, wire, revision):
694 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
695 baseui = self._factory._create_config(wire['config'])
697 baseui = self._factory._create_config(wire['config'])
696 repo.ui.setconfig('ui', 'merge', 'internal:dump')
698 repo.ui.setconfig('ui', 'merge', 'internal:dump')
697
699
698 # In case of sub repositories are used mercurial prompts the user in
700 # In case of sub repositories are used mercurial prompts the user in
699 # case of merge conflicts or different sub repository sources. By
701 # case of merge conflicts or different sub repository sources. By
700 # setting the interactive flag to `False` mercurial doesn't prompt the
702 # setting the interactive flag to `False` mercurial doesn't prompt the
701 # used but instead uses a default value.
703 # used but instead uses a default value.
702 repo.ui.setconfig('ui', 'interactive', False)
704 repo.ui.setconfig('ui', 'interactive', False)
703
705
704 commands.merge(baseui, repo, rev=revision)
706 commands.merge(baseui, repo, rev=revision)
705
707
706 @reraise_safe_exceptions
708 @reraise_safe_exceptions
707 def commit(self, wire, message, username):
709 def commit(self, wire, message, username):
708 repo = self._factory.repo(wire)
710 repo = self._factory.repo(wire)
709 baseui = self._factory._create_config(wire['config'])
711 baseui = self._factory._create_config(wire['config'])
710 repo.ui.setconfig('ui', 'username', username)
712 repo.ui.setconfig('ui', 'username', username)
711 commands.commit(baseui, repo, message=message)
713 commands.commit(baseui, repo, message=message)
712
714
713 @reraise_safe_exceptions
715 @reraise_safe_exceptions
714 def rebase(self, wire, source=None, dest=None, abort=False):
716 def rebase(self, wire, source=None, dest=None, abort=False):
715 repo = self._factory.repo(wire)
717 repo = self._factory.repo(wire)
716 baseui = self._factory._create_config(wire['config'])
718 baseui = self._factory._create_config(wire['config'])
717 repo.ui.setconfig('ui', 'merge', 'internal:dump')
719 repo.ui.setconfig('ui', 'merge', 'internal:dump')
718 rebase.rebase(
720 rebase.rebase(
719 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
721 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
720
722
721 @reraise_safe_exceptions
723 @reraise_safe_exceptions
722 def bookmark(self, wire, bookmark, revision=None):
724 def bookmark(self, wire, bookmark, revision=None):
723 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
726 baseui = self._factory._create_config(wire['config'])
725 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
727 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,372 +1,404 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2017 RodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
21 import sys
22 import json
23 import logging
20 import collections
24 import collections
21 import importlib
25 import importlib
22 import io
23 import json
24 import subprocess
26 import subprocess
25 import sys
27
26 from httplib import HTTPConnection
28 from httplib import HTTPConnection
27
29
28
30
29 import mercurial.scmutil
31 import mercurial.scmutil
30 import mercurial.node
32 import mercurial.node
31 import Pyro4
33 import Pyro4
32 import simplejson as json
34 import simplejson as json
33
35
34 from vcsserver import exceptions
36 from vcsserver import exceptions
35
37
38 log = logging.getLogger(__name__)
39
36
40
37 class HooksHttpClient(object):
41 class HooksHttpClient(object):
38 connection = None
42 connection = None
39
43
40 def __init__(self, hooks_uri):
44 def __init__(self, hooks_uri):
41 self.hooks_uri = hooks_uri
45 self.hooks_uri = hooks_uri
42
46
43 def __call__(self, method, extras):
47 def __call__(self, method, extras):
44 connection = HTTPConnection(self.hooks_uri)
48 connection = HTTPConnection(self.hooks_uri)
45 body = self._serialize(method, extras)
49 body = self._serialize(method, extras)
46 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
47 response = connection.getresponse()
51 response = connection.getresponse()
48 return json.loads(response.read())
52 return json.loads(response.read())
49
53
50 def _serialize(self, hook_name, extras):
54 def _serialize(self, hook_name, extras):
51 data = {
55 data = {
52 'method': hook_name,
56 'method': hook_name,
53 'extras': extras
57 'extras': extras
54 }
58 }
55 return json.dumps(data)
59 return json.dumps(data)
56
60
57
61
58 class HooksDummyClient(object):
62 class HooksDummyClient(object):
59 def __init__(self, hooks_module):
63 def __init__(self, hooks_module):
60 self._hooks_module = importlib.import_module(hooks_module)
64 self._hooks_module = importlib.import_module(hooks_module)
61
65
62 def __call__(self, hook_name, extras):
66 def __call__(self, hook_name, extras):
63 with self._hooks_module.Hooks() as hooks:
67 with self._hooks_module.Hooks() as hooks:
64 return getattr(hooks, hook_name)(extras)
68 return getattr(hooks, hook_name)(extras)
65
69
66
70
67 class HooksPyro4Client(object):
71 class HooksPyro4Client(object):
68 def __init__(self, hooks_uri):
72 def __init__(self, hooks_uri):
69 self.hooks_uri = hooks_uri
73 self.hooks_uri = hooks_uri
70
74
71 def __call__(self, hook_name, extras):
75 def __call__(self, hook_name, extras):
72 with Pyro4.Proxy(self.hooks_uri) as hooks:
76 with Pyro4.Proxy(self.hooks_uri) as hooks:
73 return getattr(hooks, hook_name)(extras)
77 return getattr(hooks, hook_name)(extras)
74
78
75
79
76 class RemoteMessageWriter(object):
80 class RemoteMessageWriter(object):
77 """Writer base class."""
81 """Writer base class."""
78 def write(message):
82 def write(message):
79 raise NotImplementedError()
83 raise NotImplementedError()
80
84
81
85
82 class HgMessageWriter(RemoteMessageWriter):
86 class HgMessageWriter(RemoteMessageWriter):
83 """Writer that knows how to send messages to mercurial clients."""
87 """Writer that knows how to send messages to mercurial clients."""
84
88
85 def __init__(self, ui):
89 def __init__(self, ui):
86 self.ui = ui
90 self.ui = ui
87
91
88 def write(self, message):
92 def write(self, message):
89 # TODO: Check why the quiet flag is set by default.
93 # TODO: Check why the quiet flag is set by default.
90 old = self.ui.quiet
94 old = self.ui.quiet
91 self.ui.quiet = False
95 self.ui.quiet = False
92 self.ui.status(message.encode('utf-8'))
96 self.ui.status(message.encode('utf-8'))
93 self.ui.quiet = old
97 self.ui.quiet = old
94
98
95
99
96 class GitMessageWriter(RemoteMessageWriter):
100 class GitMessageWriter(RemoteMessageWriter):
97 """Writer that knows how to send messages to git clients."""
101 """Writer that knows how to send messages to git clients."""
98
102
99 def __init__(self, stdout=None):
103 def __init__(self, stdout=None):
100 self.stdout = stdout or sys.stdout
104 self.stdout = stdout or sys.stdout
101
105
102 def write(self, message):
106 def write(self, message):
103 self.stdout.write(message.encode('utf-8'))
107 self.stdout.write(message.encode('utf-8'))
104
108
105
109
106 def _handle_exception(result):
110 def _handle_exception(result):
107 exception_class = result.get('exception')
111 exception_class = result.get('exception')
112 exception_traceback = result.get('exception_traceback')
113
114 if exception_traceback:
115 log.error('Got traceback from remote call:%s', exception_traceback)
116
108 if exception_class == 'HTTPLockedRC':
117 if exception_class == 'HTTPLockedRC':
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
118 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 elif exception_class == 'RepositoryError':
119 elif exception_class == 'RepositoryError':
111 raise exceptions.VcsException(*result['exception_args'])
120 raise exceptions.VcsException(*result['exception_args'])
112 elif exception_class:
121 elif exception_class:
113 raise Exception('Got remote exception "%s" with args "%s"' %
122 raise Exception('Got remote exception "%s" with args "%s"' %
114 (exception_class, result['exception_args']))
123 (exception_class, result['exception_args']))
115
124
116
125
117 def _get_hooks_client(extras):
126 def _get_hooks_client(extras):
118 if 'hooks_uri' in extras:
127 if 'hooks_uri' in extras:
119 protocol = extras.get('hooks_protocol')
128 protocol = extras.get('hooks_protocol')
120 return (
129 return (
121 HooksHttpClient(extras['hooks_uri'])
130 HooksHttpClient(extras['hooks_uri'])
122 if protocol == 'http'
131 if protocol == 'http'
123 else HooksPyro4Client(extras['hooks_uri'])
132 else HooksPyro4Client(extras['hooks_uri'])
124 )
133 )
125 else:
134 else:
126 return HooksDummyClient(extras['hooks_module'])
135 return HooksDummyClient(extras['hooks_module'])
127
136
128
137
129 def _call_hook(hook_name, extras, writer):
138 def _call_hook(hook_name, extras, writer):
130 hooks = _get_hooks_client(extras)
139 hooks = _get_hooks_client(extras)
131 result = hooks(hook_name, extras)
140 result = hooks(hook_name, extras)
132 writer.write(result['output'])
141 writer.write(result['output'])
133 _handle_exception(result)
142 _handle_exception(result)
134
143
135 return result['status']
144 return result['status']
136
145
137
146
138 def _extras_from_ui(ui):
147 def _extras_from_ui(ui):
139 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
148 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
140 return extras
149 return extras
141
150
142
151
143 def repo_size(ui, repo, **kwargs):
152 def repo_size(ui, repo, **kwargs):
144 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
153 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
145
154
146
155
147 def pre_pull(ui, repo, **kwargs):
156 def pre_pull(ui, repo, **kwargs):
148 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
157 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
149
158
150
159
151 def post_pull(ui, repo, **kwargs):
160 def post_pull(ui, repo, **kwargs):
152 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
161 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
153
162
154
163
155 def pre_push(ui, repo, **kwargs):
164 def pre_push(ui, repo, node=None, **kwargs):
156 return _call_hook('pre_push', _extras_from_ui(ui), HgMessageWriter(ui))
165 extras = _extras_from_ui(ui)
166
167 rev_data = []
168 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
169 branches = collections.defaultdict(list)
170 for commit_id, branch in _rev_range_hash(repo, node, with_branch=True):
171 branches[branch].append(commit_id)
172
173 for branch, commits in branches.iteritems():
174 old_rev = kwargs.get('node_last') or commits[0]
175 rev_data.append({
176 'old_rev': old_rev,
177 'new_rev': commits[-1],
178 'ref': '',
179 'type': 'branch',
180 'name': branch,
181 })
182
183 extras['commit_ids'] = rev_data
184 return _call_hook('pre_push', extras, HgMessageWriter(ui))
157
185
158
186
159 # N.B.(skreft): the two functions below were taken and adapted from
187 def _rev_range_hash(repo, node, with_branch=False):
160 # rhodecode.lib.vcs.remote.handle_git_pre_receive
161 # They are required to compute the commit_ids
162 def _get_revs(repo, rev_opt):
163 revs = [rev for rev in mercurial.scmutil.revrange(repo, rev_opt)]
164 if len(revs) == 0:
165 return (mercurial.node.nullrev, mercurial.node.nullrev)
166
188
167 return max(revs), min(revs)
189 commits = []
168
190 for rev in xrange(repo[node], len(repo)):
191 ctx = repo[rev]
192 commit_id = mercurial.node.hex(ctx.node())
193 branch = ctx.branch()
194 if with_branch:
195 commits.append((commit_id, branch))
196 else:
197 commits.append(commit_id)
169
198
170 def _rev_range_hash(repo, node):
199 return commits
171 stop, start = _get_revs(repo, [node + ':'])
172 revs = [mercurial.node.hex(repo[r].node()) for r in xrange(start, stop + 1)]
173
174 return revs
175
200
176
201
177 def post_push(ui, repo, node, **kwargs):
202 def post_push(ui, repo, node, **kwargs):
178 commit_ids = _rev_range_hash(repo, node)
203 commit_ids = _rev_range_hash(repo, node)
179
204
180 extras = _extras_from_ui(ui)
205 extras = _extras_from_ui(ui)
181 extras['commit_ids'] = commit_ids
206 extras['commit_ids'] = commit_ids
182
207
183 return _call_hook('post_push', extras, HgMessageWriter(ui))
208 return _call_hook('post_push', extras, HgMessageWriter(ui))
184
209
185
210
186 # backward compat
211 # backward compat
187 log_pull_action = post_pull
212 log_pull_action = post_pull
188
213
189 # backward compat
214 # backward compat
190 log_push_action = post_push
215 log_push_action = post_push
191
216
192
217
193 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
218 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
194 """
219 """
195 Old hook name: keep here for backward compatibility.
220 Old hook name: keep here for backward compatibility.
196
221
197 This is only required when the installed git hooks are not upgraded.
222 This is only required when the installed git hooks are not upgraded.
198 """
223 """
199 pass
224 pass
200
225
201
226
202 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
227 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
203 """
228 """
204 Old hook name: keep here for backward compatibility.
229 Old hook name: keep here for backward compatibility.
205
230
206 This is only required when the installed git hooks are not upgraded.
231 This is only required when the installed git hooks are not upgraded.
207 """
232 """
208 pass
233 pass
209
234
210
235
211 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
236 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
212
237
213
238
214 def git_pre_pull(extras):
239 def git_pre_pull(extras):
215 """
240 """
216 Pre pull hook.
241 Pre pull hook.
217
242
218 :param extras: dictionary containing the keys defined in simplevcs
243 :param extras: dictionary containing the keys defined in simplevcs
219 :type extras: dict
244 :type extras: dict
220
245
221 :return: status code of the hook. 0 for success.
246 :return: status code of the hook. 0 for success.
222 :rtype: int
247 :rtype: int
223 """
248 """
224 if 'pull' not in extras['hooks']:
249 if 'pull' not in extras['hooks']:
225 return HookResponse(0, '')
250 return HookResponse(0, '')
226
251
227 stdout = io.BytesIO()
252 stdout = io.BytesIO()
228 try:
253 try:
229 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
254 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
230 except Exception as error:
255 except Exception as error:
231 status = 128
256 status = 128
232 stdout.write('ERROR: %s\n' % str(error))
257 stdout.write('ERROR: %s\n' % str(error))
233
258
234 return HookResponse(status, stdout.getvalue())
259 return HookResponse(status, stdout.getvalue())
235
260
236
261
237 def git_post_pull(extras):
262 def git_post_pull(extras):
238 """
263 """
239 Post pull hook.
264 Post pull hook.
240
265
241 :param extras: dictionary containing the keys defined in simplevcs
266 :param extras: dictionary containing the keys defined in simplevcs
242 :type extras: dict
267 :type extras: dict
243
268
244 :return: status code of the hook. 0 for success.
269 :return: status code of the hook. 0 for success.
245 :rtype: int
270 :rtype: int
246 """
271 """
247 if 'pull' not in extras['hooks']:
272 if 'pull' not in extras['hooks']:
248 return HookResponse(0, '')
273 return HookResponse(0, '')
249
274
250 stdout = io.BytesIO()
275 stdout = io.BytesIO()
251 try:
276 try:
252 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
277 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
253 except Exception as error:
278 except Exception as error:
254 status = 128
279 status = 128
255 stdout.write('ERROR: %s\n' % error)
280 stdout.write('ERROR: %s\n' % error)
256
281
257 return HookResponse(status, stdout.getvalue())
282 return HookResponse(status, stdout.getvalue())
258
283
259
284
260 def git_pre_receive(unused_repo_path, unused_revs, env):
285 def _parse_git_ref_lines(revision_lines):
286 rev_data = []
287 for revision_line in revision_lines or []:
288 old_rev, new_rev, ref = revision_line.strip().split(' ')
289 ref_data = ref.split('/', 2)
290 if ref_data[1] in ('tags', 'heads'):
291 rev_data.append({
292 'old_rev': old_rev,
293 'new_rev': new_rev,
294 'ref': ref,
295 'type': ref_data[1],
296 'name': ref_data[2],
297 })
298 return rev_data
299
300
301 def git_pre_receive(unused_repo_path, revision_lines, env):
261 """
302 """
262 Pre push hook.
303 Pre push hook.
263
304
264 :param extras: dictionary containing the keys defined in simplevcs
305 :param extras: dictionary containing the keys defined in simplevcs
265 :type extras: dict
306 :type extras: dict
266
307
267 :return: status code of the hook. 0 for success.
308 :return: status code of the hook. 0 for success.
268 :rtype: int
309 :rtype: int
269 """
310 """
270 extras = json.loads(env['RC_SCM_DATA'])
311 extras = json.loads(env['RC_SCM_DATA'])
312 rev_data = _parse_git_ref_lines(revision_lines)
271 if 'push' not in extras['hooks']:
313 if 'push' not in extras['hooks']:
272 return 0
314 return 0
315 extras['commit_ids'] = rev_data
273 return _call_hook('pre_push', extras, GitMessageWriter())
316 return _call_hook('pre_push', extras, GitMessageWriter())
274
317
275
318
276 def _run_command(arguments):
319 def _run_command(arguments):
277 """
320 """
278 Run the specified command and return the stdout.
321 Run the specified command and return the stdout.
279
322
280 :param arguments: sequence of program arugments (including the program name)
323 :param arguments: sequence of program arguments (including the program name)
281 :type arguments: list[str]
324 :type arguments: list[str]
282 """
325 """
283 # TODO(skreft): refactor this method and all the other similar ones.
326 # TODO(skreft): refactor this method and all the other similar ones.
284 # Probably this should be using subprocessio.
327 # Probably this should be using subprocessio.
285 process = subprocess.Popen(
328 process = subprocess.Popen(
286 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
329 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
287 stdout, _ = process.communicate()
330 stdout, _ = process.communicate()
288
331
289 if process.returncode != 0:
332 if process.returncode != 0:
290 raise Exception(
333 raise Exception(
291 'Command %s exited with exit code %s' % (arguments,
334 'Command %s exited with exit code %s' % (arguments,
292 process.returncode))
335 process.returncode))
293
336
294 return stdout
337 return stdout
295
338
296
339
297 def git_post_receive(unused_repo_path, revision_lines, env):
340 def git_post_receive(unused_repo_path, revision_lines, env):
298 """
341 """
299 Post push hook.
342 Post push hook.
300
343
301 :param extras: dictionary containing the keys defined in simplevcs
344 :param extras: dictionary containing the keys defined in simplevcs
302 :type extras: dict
345 :type extras: dict
303
346
304 :return: status code of the hook. 0 for success.
347 :return: status code of the hook. 0 for success.
305 :rtype: int
348 :rtype: int
306 """
349 """
307 extras = json.loads(env['RC_SCM_DATA'])
350 extras = json.loads(env['RC_SCM_DATA'])
308 if 'push' not in extras['hooks']:
351 if 'push' not in extras['hooks']:
309 return 0
352 return 0
310
353
311 rev_data = []
354 rev_data = _parse_git_ref_lines(revision_lines)
312 for revision_line in revision_lines:
313 old_rev, new_rev, ref = revision_line.strip().split(' ')
314 ref_data = ref.split('/', 2)
315 if ref_data[1] in ('tags', 'heads'):
316 rev_data.append({
317 'old_rev': old_rev,
318 'new_rev': new_rev,
319 'ref': ref,
320 'type': ref_data[1],
321 'name': ref_data[2],
322 })
323
355
324 git_revs = []
356 git_revs = []
325
357
326 # N.B.(skreft): it is ok to just call git, as git before calling a
358 # N.B.(skreft): it is ok to just call git, as git before calling a
327 # subcommand sets the PATH environment variable so that it point to the
359 # subcommand sets the PATH environment variable so that it point to the
328 # correct version of the git executable.
360 # correct version of the git executable.
329 empty_commit_id = '0' * 40
361 empty_commit_id = '0' * 40
330 for push_ref in rev_data:
362 for push_ref in rev_data:
331 type_ = push_ref['type']
363 type_ = push_ref['type']
332 if type_ == 'heads':
364 if type_ == 'heads':
333 if push_ref['old_rev'] == empty_commit_id:
365 if push_ref['old_rev'] == empty_commit_id:
334
366
335 # Fix up head revision if needed
367 # Fix up head revision if needed
336 cmd = ['git', 'show', 'HEAD']
368 cmd = ['git', 'show', 'HEAD']
337 try:
369 try:
338 _run_command(cmd)
370 _run_command(cmd)
339 except Exception:
371 except Exception:
340 cmd = ['git', 'symbolic-ref', 'HEAD',
372 cmd = ['git', 'symbolic-ref', 'HEAD',
341 'refs/heads/%s' % push_ref['name']]
373 'refs/heads/%s' % push_ref['name']]
342 print "Setting default branch to %s" % push_ref['name']
374 print("Setting default branch to %s" % push_ref['name'])
343 _run_command(cmd)
375 _run_command(cmd)
344
376
345 cmd = ['git', 'for-each-ref', '--format=%(refname)',
377 cmd = ['git', 'for-each-ref', '--format=%(refname)',
346 'refs/heads/*']
378 'refs/heads/*']
347 heads = _run_command(cmd)
379 heads = _run_command(cmd)
348 heads = heads.replace(push_ref['ref'], '')
380 heads = heads.replace(push_ref['ref'], '')
349 heads = ' '.join(head for head in heads.splitlines() if head)
381 heads = ' '.join(head for head in heads.splitlines() if head)
350 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
382 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
351 '--', push_ref['new_rev'], '--not', heads]
383 '--', push_ref['new_rev'], '--not', heads]
352 git_revs.extend(_run_command(cmd).splitlines())
384 git_revs.extend(_run_command(cmd).splitlines())
353 elif push_ref['new_rev'] == empty_commit_id:
385 elif push_ref['new_rev'] == empty_commit_id:
354 # delete branch case
386 # delete branch case
355 git_revs.append('delete_branch=>%s' % push_ref['name'])
387 git_revs.append('delete_branch=>%s' % push_ref['name'])
356 else:
388 else:
357 cmd = ['git', 'log',
389 cmd = ['git', 'log',
358 '{old_rev}..{new_rev}'.format(**push_ref),
390 '{old_rev}..{new_rev}'.format(**push_ref),
359 '--reverse', '--pretty=format:%H']
391 '--reverse', '--pretty=format:%H']
360 git_revs.extend(_run_command(cmd).splitlines())
392 git_revs.extend(_run_command(cmd).splitlines())
361 elif type_ == 'tags':
393 elif type_ == 'tags':
362 git_revs.append('tag=>%s' % push_ref['name'])
394 git_revs.append('tag=>%s' % push_ref['name'])
363
395
364 extras['commit_ids'] = git_revs
396 extras['commit_ids'] = git_revs
365
397
366 if 'repo_size' in extras['hooks']:
398 if 'repo_size' in extras['hooks']:
367 try:
399 try:
368 _call_hook('repo_size', extras, GitMessageWriter())
400 _call_hook('repo_size', extras, GitMessageWriter())
369 except:
401 except:
370 pass
402 pass
371
403
372 return _call_hook('post_push', extras, GitMessageWriter())
404 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,408 +1,434 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import base64
18 import base64
19 import locale
19 import locale
20 import logging
20 import logging
21 import uuid
21 import uuid
22 import wsgiref.util
22 import wsgiref.util
23 import traceback
23 import traceback
24 from itertools import chain
24 from itertools import chain
25
25
26 import msgpack
26 import msgpack
27 from beaker.cache import CacheManager
27 from beaker.cache import CacheManager
28 from beaker.util import parse_cache_config_options
28 from beaker.util import parse_cache_config_options
29 from pyramid.config import Configurator
29 from pyramid.config import Configurator
30 from pyramid.wsgi import wsgiapp
30 from pyramid.wsgi import wsgiapp
31
31
32 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
32 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
33 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
34 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
34 from vcsserver.echo_stub.echo_app import EchoApp
35 from vcsserver.echo_stub.echo_app import EchoApp
35 from vcsserver.exceptions import HTTPRepoLocked
36 from vcsserver.exceptions import HTTPRepoLocked
36 from vcsserver.server import VcsServer
37 from vcsserver.server import VcsServer
37
38
38 try:
39 try:
39 from vcsserver.git import GitFactory, GitRemote
40 from vcsserver.git import GitFactory, GitRemote
40 except ImportError:
41 except ImportError:
41 GitFactory = None
42 GitFactory = None
42 GitRemote = None
43 GitRemote = None
44
43 try:
45 try:
44 from vcsserver.hg import MercurialFactory, HgRemote
46 from vcsserver.hg import MercurialFactory, HgRemote
45 except ImportError:
47 except ImportError:
46 MercurialFactory = None
48 MercurialFactory = None
47 HgRemote = None
49 HgRemote = None
50
48 try:
51 try:
49 from vcsserver.svn import SubversionFactory, SvnRemote
52 from vcsserver.svn import SubversionFactory, SvnRemote
50 except ImportError:
53 except ImportError:
51 SubversionFactory = None
54 SubversionFactory = None
52 SvnRemote = None
55 SvnRemote = None
53
56
54 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
55
58
56
59
57 class VCS(object):
60 class VCS(object):
58 def __init__(self, locale=None, cache_config=None):
61 def __init__(self, locale=None, cache_config=None):
59 self.locale = locale
62 self.locale = locale
60 self.cache_config = cache_config
63 self.cache_config = cache_config
61 self._configure_locale()
64 self._configure_locale()
62 self._initialize_cache()
65 self._initialize_cache()
63
66
64 if GitFactory and GitRemote:
67 if GitFactory and GitRemote:
65 git_repo_cache = self.cache.get_cache_region(
68 git_repo_cache = self.cache.get_cache_region(
66 'git', region='repo_object')
69 'git', region='repo_object')
67 git_factory = GitFactory(git_repo_cache)
70 git_factory = GitFactory(git_repo_cache)
68 self._git_remote = GitRemote(git_factory)
71 self._git_remote = GitRemote(git_factory)
69 else:
72 else:
70 log.info("Git client import failed")
73 log.info("Git client import failed")
71
74
72 if MercurialFactory and HgRemote:
75 if MercurialFactory and HgRemote:
73 hg_repo_cache = self.cache.get_cache_region(
76 hg_repo_cache = self.cache.get_cache_region(
74 'hg', region='repo_object')
77 'hg', region='repo_object')
75 hg_factory = MercurialFactory(hg_repo_cache)
78 hg_factory = MercurialFactory(hg_repo_cache)
76 self._hg_remote = HgRemote(hg_factory)
79 self._hg_remote = HgRemote(hg_factory)
77 else:
80 else:
78 log.info("Mercurial client import failed")
81 log.info("Mercurial client import failed")
79
82
80 if SubversionFactory and SvnRemote:
83 if SubversionFactory and SvnRemote:
81 svn_repo_cache = self.cache.get_cache_region(
84 svn_repo_cache = self.cache.get_cache_region(
82 'svn', region='repo_object')
85 'svn', region='repo_object')
83 svn_factory = SubversionFactory(svn_repo_cache)
86 svn_factory = SubversionFactory(svn_repo_cache)
84 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
87 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
85 else:
88 else:
86 log.info("Subversion client import failed")
89 log.info("Subversion client import failed")
87
90
88 self._vcsserver = VcsServer()
91 self._vcsserver = VcsServer()
89
92
90 def _initialize_cache(self):
93 def _initialize_cache(self):
91 cache_config = parse_cache_config_options(self.cache_config)
94 cache_config = parse_cache_config_options(self.cache_config)
92 log.info('Initializing beaker cache: %s' % cache_config)
95 log.info('Initializing beaker cache: %s' % cache_config)
93 self.cache = CacheManager(**cache_config)
96 self.cache = CacheManager(**cache_config)
94
97
95 def _configure_locale(self):
98 def _configure_locale(self):
96 if self.locale:
99 if self.locale:
97 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
100 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
98 else:
101 else:
99 log.info(
102 log.info(
100 'Configuring locale subsystem based on environment variables')
103 'Configuring locale subsystem based on environment variables')
101 try:
104 try:
102 # If self.locale is the empty string, then the locale
105 # If self.locale is the empty string, then the locale
103 # module will use the environment variables. See the
106 # module will use the environment variables. See the
104 # documentation of the package `locale`.
107 # documentation of the package `locale`.
105 locale.setlocale(locale.LC_ALL, self.locale)
108 locale.setlocale(locale.LC_ALL, self.locale)
106
109
107 language_code, encoding = locale.getlocale()
110 language_code, encoding = locale.getlocale()
108 log.info(
111 log.info(
109 'Locale set to language code "%s" with encoding "%s".',
112 'Locale set to language code "%s" with encoding "%s".',
110 language_code, encoding)
113 language_code, encoding)
111 except locale.Error:
114 except locale.Error:
112 log.exception(
115 log.exception(
113 'Cannot set locale, not configuring the locale system')
116 'Cannot set locale, not configuring the locale system')
114
117
115
118
116 class WsgiProxy(object):
119 class WsgiProxy(object):
117 def __init__(self, wsgi):
120 def __init__(self, wsgi):
118 self.wsgi = wsgi
121 self.wsgi = wsgi
119
122
120 def __call__(self, environ, start_response):
123 def __call__(self, environ, start_response):
121 input_data = environ['wsgi.input'].read()
124 input_data = environ['wsgi.input'].read()
122 input_data = msgpack.unpackb(input_data)
125 input_data = msgpack.unpackb(input_data)
123
126
124 error = None
127 error = None
125 try:
128 try:
126 data, status, headers = self.wsgi.handle(
129 data, status, headers = self.wsgi.handle(
127 input_data['environment'], input_data['input_data'],
130 input_data['environment'], input_data['input_data'],
128 *input_data['args'], **input_data['kwargs'])
131 *input_data['args'], **input_data['kwargs'])
129 except Exception as e:
132 except Exception as e:
130 data, status, headers = [], None, None
133 data, status, headers = [], None, None
131 error = {
134 error = {
132 'message': str(e),
135 'message': str(e),
133 '_vcs_kind': getattr(e, '_vcs_kind', None)
136 '_vcs_kind': getattr(e, '_vcs_kind', None)
134 }
137 }
135
138
136 start_response(200, {})
139 start_response(200, {})
137 return self._iterator(error, status, headers, data)
140 return self._iterator(error, status, headers, data)
138
141
139 def _iterator(self, error, status, headers, data):
142 def _iterator(self, error, status, headers, data):
140 initial_data = [
143 initial_data = [
141 error,
144 error,
142 status,
145 status,
143 headers,
146 headers,
144 ]
147 ]
145
148
146 for d in chain(initial_data, data):
149 for d in chain(initial_data, data):
147 yield msgpack.packb(d)
150 yield msgpack.packb(d)
148
151
149
152
150 class HTTPApplication(object):
153 class HTTPApplication(object):
151 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
154 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
152
155
153 remote_wsgi = remote_wsgi
156 remote_wsgi = remote_wsgi
154 _use_echo_app = False
157 _use_echo_app = False
155
158
156 def __init__(self, settings=None):
159 def __init__(self, settings=None, global_config=None):
157 self.config = Configurator(settings=settings)
160 self.config = Configurator(settings=settings)
161 self.global_config = global_config
162
158 locale = settings.get('locale', '') or 'en_US.UTF-8'
163 locale = settings.get('locale', '') or 'en_US.UTF-8'
159 vcs = VCS(locale=locale, cache_config=settings)
164 vcs = VCS(locale=locale, cache_config=settings)
160 self._remotes = {
165 self._remotes = {
161 'hg': vcs._hg_remote,
166 'hg': vcs._hg_remote,
162 'git': vcs._git_remote,
167 'git': vcs._git_remote,
163 'svn': vcs._svn_remote,
168 'svn': vcs._svn_remote,
164 'server': vcs._vcsserver,
169 'server': vcs._vcsserver,
165 }
170 }
166 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
171 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
167 self._use_echo_app = True
172 self._use_echo_app = True
168 log.warning("Using EchoApp for VCS operations.")
173 log.warning("Using EchoApp for VCS operations.")
169 self.remote_wsgi = remote_wsgi_stub
174 self.remote_wsgi = remote_wsgi_stub
170 self._configure_settings(settings)
175 self._configure_settings(settings)
171 self._configure()
176 self._configure()
172
177
173 def _configure_settings(self, app_settings):
178 def _configure_settings(self, app_settings):
174 """
179 """
175 Configure the settings module.
180 Configure the settings module.
176 """
181 """
177 git_path = app_settings.get('git_path', None)
182 git_path = app_settings.get('git_path', None)
178 if git_path:
183 if git_path:
179 settings.GIT_EXECUTABLE = git_path
184 settings.GIT_EXECUTABLE = git_path
180
185
181 def _configure(self):
186 def _configure(self):
182 self.config.add_renderer(
187 self.config.add_renderer(
183 name='msgpack',
188 name='msgpack',
184 factory=self._msgpack_renderer_factory)
189 factory=self._msgpack_renderer_factory)
185
190
186 self.config.add_route('service', '/_service')
191 self.config.add_route('service', '/_service')
187 self.config.add_route('status', '/status')
192 self.config.add_route('status', '/status')
188 self.config.add_route('hg_proxy', '/proxy/hg')
193 self.config.add_route('hg_proxy', '/proxy/hg')
189 self.config.add_route('git_proxy', '/proxy/git')
194 self.config.add_route('git_proxy', '/proxy/git')
190 self.config.add_route('vcs', '/{backend}')
195 self.config.add_route('vcs', '/{backend}')
191 self.config.add_route('stream_git', '/stream/git/*repo_name')
196 self.config.add_route('stream_git', '/stream/git/*repo_name')
192 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
197 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
193
198
194 self.config.add_view(
199 self.config.add_view(
195 self.status_view, route_name='status', renderer='json')
200 self.status_view, route_name='status', renderer='json')
196 self.config.add_view(
201 self.config.add_view(
197 self.service_view, route_name='service', renderer='msgpack')
202 self.service_view, route_name='service', renderer='msgpack')
198
203
199 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
204 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
200 self.config.add_view(self.git_proxy(), route_name='git_proxy')
205 self.config.add_view(self.git_proxy(), route_name='git_proxy')
201 self.config.add_view(
206 self.config.add_view(
202 self.vcs_view, route_name='vcs', renderer='msgpack',
207 self.vcs_view, route_name='vcs', renderer='msgpack',
203 custom_predicates=[self.is_vcs_view])
208 custom_predicates=[self.is_vcs_view])
204
209
205 self.config.add_view(self.hg_stream(), route_name='stream_hg')
210 self.config.add_view(self.hg_stream(), route_name='stream_hg')
206 self.config.add_view(self.git_stream(), route_name='stream_git')
211 self.config.add_view(self.git_stream(), route_name='stream_git')
207
212
208 def notfound(request):
213 def notfound(request):
209 return {'status': '404 NOT FOUND'}
214 return {'status': '404 NOT FOUND'}
210 self.config.add_notfound_view(notfound, renderer='json')
215 self.config.add_notfound_view(notfound, renderer='json')
211
216
212 self.config.add_view(
217 self.config.add_view(self.handle_vcs_exception, context=Exception)
213 self.handle_vcs_exception, context=Exception,
214 custom_predicates=[self.is_vcs_exception])
215
216 self.config.add_view(
217 self.general_error_handler, context=Exception)
218
218
219 self.config.add_tween(
219 self.config.add_tween(
220 'vcsserver.tweens.RequestWrapperTween',
220 'vcsserver.tweens.RequestWrapperTween',
221 )
221 )
222
222
223 def wsgi_app(self):
223 def wsgi_app(self):
224 return self.config.make_wsgi_app()
224 return self.config.make_wsgi_app()
225
225
226 def vcs_view(self, request):
226 def vcs_view(self, request):
227 remote = self._remotes[request.matchdict['backend']]
227 remote = self._remotes[request.matchdict['backend']]
228 payload = msgpack.unpackb(request.body, use_list=True)
228 payload = msgpack.unpackb(request.body, use_list=True)
229 method = payload.get('method')
229 method = payload.get('method')
230 params = payload.get('params')
230 params = payload.get('params')
231 wire = params.get('wire')
231 wire = params.get('wire')
232 args = params.get('args')
232 args = params.get('args')
233 kwargs = params.get('kwargs')
233 kwargs = params.get('kwargs')
234 if wire:
234 if wire:
235 try:
235 try:
236 wire['context'] = uuid.UUID(wire['context'])
236 wire['context'] = uuid.UUID(wire['context'])
237 except KeyError:
237 except KeyError:
238 pass
238 pass
239 args.insert(0, wire)
239 args.insert(0, wire)
240
240
241 log.debug('method called:%s with kwargs:%s', method, kwargs)
241 log.debug('method called:%s with kwargs:%s', method, kwargs)
242 try:
242 try:
243 resp = getattr(remote, method)(*args, **kwargs)
243 resp = getattr(remote, method)(*args, **kwargs)
244 except Exception as e:
244 except Exception as e:
245 tb_info = traceback.format_exc()
245 tb_info = traceback.format_exc()
246
246
247 type_ = e.__class__.__name__
247 type_ = e.__class__.__name__
248 if type_ not in self.ALLOWED_EXCEPTIONS:
248 if type_ not in self.ALLOWED_EXCEPTIONS:
249 type_ = None
249 type_ = None
250
250
251 resp = {
251 resp = {
252 'id': payload.get('id'),
252 'id': payload.get('id'),
253 'error': {
253 'error': {
254 'message': e.message,
254 'message': e.message,
255 'traceback': tb_info,
255 'traceback': tb_info,
256 'type': type_
256 'type': type_
257 }
257 }
258 }
258 }
259 try:
259 try:
260 resp['error']['_vcs_kind'] = e._vcs_kind
260 resp['error']['_vcs_kind'] = e._vcs_kind
261 except AttributeError:
261 except AttributeError:
262 pass
262 pass
263 else:
263 else:
264 resp = {
264 resp = {
265 'id': payload.get('id'),
265 'id': payload.get('id'),
266 'result': resp
266 'result': resp
267 }
267 }
268
268
269 return resp
269 return resp
270
270
271 def status_view(self, request):
271 def status_view(self, request):
272 return {'status': 'OK'}
272 return {'status': 'OK'}
273
273
274 def service_view(self, request):
274 def service_view(self, request):
275 import vcsserver
275 import vcsserver
276 import ConfigParser as configparser
277
276 payload = msgpack.unpackb(request.body, use_list=True)
278 payload = msgpack.unpackb(request.body, use_list=True)
279
280 try:
281 path = self.global_config['__file__']
282 config = configparser.ConfigParser()
283 config.read(path)
284 parsed_ini = config
285 if parsed_ini.has_section('server:main'):
286 parsed_ini = dict(parsed_ini.items('server:main'))
287 except Exception:
288 log.exception('Failed to read .ini file for display')
289 parsed_ini = {}
290
277 resp = {
291 resp = {
278 'id': payload.get('id'),
292 'id': payload.get('id'),
279 'result': dict(
293 'result': dict(
280 version=vcsserver.__version__,
294 version=vcsserver.__version__,
281 config={},
295 config=parsed_ini,
282 payload=payload,
296 payload=payload,
283 )
297 )
284 }
298 }
285 return resp
299 return resp
286
300
287 def _msgpack_renderer_factory(self, info):
301 def _msgpack_renderer_factory(self, info):
288 def _render(value, system):
302 def _render(value, system):
289 value = msgpack.packb(value)
303 value = msgpack.packb(value)
290 request = system.get('request')
304 request = system.get('request')
291 if request is not None:
305 if request is not None:
292 response = request.response
306 response = request.response
293 ct = response.content_type
307 ct = response.content_type
294 if ct == response.default_content_type:
308 if ct == response.default_content_type:
295 response.content_type = 'application/x-msgpack'
309 response.content_type = 'application/x-msgpack'
296 return value
310 return value
297 return _render
311 return _render
298
312
299 def hg_proxy(self):
313 def hg_proxy(self):
300 @wsgiapp
314 @wsgiapp
301 def _hg_proxy(environ, start_response):
315 def _hg_proxy(environ, start_response):
302 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
316 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
303 return app(environ, start_response)
317 return app(environ, start_response)
304 return _hg_proxy
318 return _hg_proxy
305
319
306 def git_proxy(self):
320 def git_proxy(self):
307 @wsgiapp
321 @wsgiapp
308 def _git_proxy(environ, start_response):
322 def _git_proxy(environ, start_response):
309 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
323 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
310 return app(environ, start_response)
324 return app(environ, start_response)
311 return _git_proxy
325 return _git_proxy
312
326
313 def hg_stream(self):
327 def hg_stream(self):
314 if self._use_echo_app:
328 if self._use_echo_app:
315 @wsgiapp
329 @wsgiapp
316 def _hg_stream(environ, start_response):
330 def _hg_stream(environ, start_response):
317 app = EchoApp('fake_path', 'fake_name', None)
331 app = EchoApp('fake_path', 'fake_name', None)
318 return app(environ, start_response)
332 return app(environ, start_response)
319 return _hg_stream
333 return _hg_stream
320 else:
334 else:
321 @wsgiapp
335 @wsgiapp
322 def _hg_stream(environ, start_response):
336 def _hg_stream(environ, start_response):
323 repo_path = environ['HTTP_X_RC_REPO_PATH']
337 repo_path = environ['HTTP_X_RC_REPO_PATH']
324 repo_name = environ['HTTP_X_RC_REPO_NAME']
338 repo_name = environ['HTTP_X_RC_REPO_NAME']
325 packed_config = base64.b64decode(
339 packed_config = base64.b64decode(
326 environ['HTTP_X_RC_REPO_CONFIG'])
340 environ['HTTP_X_RC_REPO_CONFIG'])
327 config = msgpack.unpackb(packed_config)
341 config = msgpack.unpackb(packed_config)
328 app = scm_app.create_hg_wsgi_app(
342 app = scm_app.create_hg_wsgi_app(
329 repo_path, repo_name, config)
343 repo_path, repo_name, config)
330
344
331 # Consitent path information for hgweb
345 # Consitent path information for hgweb
332 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
346 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
333 environ['REPO_NAME'] = repo_name
347 environ['REPO_NAME'] = repo_name
334 return app(environ, ResponseFilter(start_response))
348 return app(environ, ResponseFilter(start_response))
335 return _hg_stream
349 return _hg_stream
336
350
337 def git_stream(self):
351 def git_stream(self):
338 if self._use_echo_app:
352 if self._use_echo_app:
339 @wsgiapp
353 @wsgiapp
340 def _git_stream(environ, start_response):
354 def _git_stream(environ, start_response):
341 app = EchoApp('fake_path', 'fake_name', None)
355 app = EchoApp('fake_path', 'fake_name', None)
342 return app(environ, start_response)
356 return app(environ, start_response)
343 return _git_stream
357 return _git_stream
344 else:
358 else:
345 @wsgiapp
359 @wsgiapp
346 def _git_stream(environ, start_response):
360 def _git_stream(environ, start_response):
347 repo_path = environ['HTTP_X_RC_REPO_PATH']
361 repo_path = environ['HTTP_X_RC_REPO_PATH']
348 repo_name = environ['HTTP_X_RC_REPO_NAME']
362 repo_name = environ['HTTP_X_RC_REPO_NAME']
349 packed_config = base64.b64decode(
363 packed_config = base64.b64decode(
350 environ['HTTP_X_RC_REPO_CONFIG'])
364 environ['HTTP_X_RC_REPO_CONFIG'])
351 config = msgpack.unpackb(packed_config)
365 config = msgpack.unpackb(packed_config)
352
366
353 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
367 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
354 app = scm_app.create_git_wsgi_app(
368 content_type = environ.get('CONTENT_TYPE', '')
355 repo_path, repo_name, config)
369
370 path = environ['PATH_INFO']
371 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
372 log.debug(
373 'LFS: Detecting if request `%s` is LFS server path based '
374 'on content type:`%s`, is_lfs:%s',
375 path, content_type, is_lfs_request)
376
377 if not is_lfs_request:
378 # fallback detection by path
379 if GIT_LFS_PROTO_PAT.match(path):
380 is_lfs_request = True
381 log.debug(
382 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
383 path, is_lfs_request)
384
385 if is_lfs_request:
386 app = scm_app.create_git_lfs_wsgi_app(
387 repo_path, repo_name, config)
388 else:
389 app = scm_app.create_git_wsgi_app(
390 repo_path, repo_name, config)
356 return app(environ, start_response)
391 return app(environ, start_response)
392
357 return _git_stream
393 return _git_stream
358
394
359 def is_vcs_view(self, context, request):
395 def is_vcs_view(self, context, request):
360 """
396 """
361 View predicate that returns true if given backend is supported by
397 View predicate that returns true if given backend is supported by
362 defined remotes.
398 defined remotes.
363 """
399 """
364 backend = request.matchdict.get('backend')
400 backend = request.matchdict.get('backend')
365 return backend in self._remotes
401 return backend in self._remotes
366
402
367 def is_vcs_exception(self, context, request):
368 """
369 View predicate that returns true if the context object is a VCS
370 exception.
371 """
372 return hasattr(context, '_vcs_kind')
373
374 def handle_vcs_exception(self, exception, request):
403 def handle_vcs_exception(self, exception, request):
375 if exception._vcs_kind == 'repo_locked':
404 _vcs_kind = getattr(exception, '_vcs_kind', '')
405 if _vcs_kind == 'repo_locked':
376 # Get custom repo-locked status code if present.
406 # Get custom repo-locked status code if present.
377 status_code = request.headers.get('X-RC-Locked-Status-Code')
407 status_code = request.headers.get('X-RC-Locked-Status-Code')
378 return HTTPRepoLocked(
408 return HTTPRepoLocked(
379 title=exception.message, status_code=status_code)
409 title=exception.message, status_code=status_code)
380
410
381 # Re-raise exception if we can not handle it.
411 # Re-raise exception if we can not handle it.
382 raise exception
383
384 def general_error_handler(self, exception, request):
385 log.exception(
412 log.exception(
386 'error occurred handling this request for path: %s',
413 'error occurred handling this request for path: %s', request.path)
387 request.path)
388 raise exception
414 raise exception
389
415
390
416
391 class ResponseFilter(object):
417 class ResponseFilter(object):
392
418
393 def __init__(self, start_response):
419 def __init__(self, start_response):
394 self._start_response = start_response
420 self._start_response = start_response
395
421
396 def __call__(self, status, response_headers, exc_info=None):
422 def __call__(self, status, response_headers, exc_info=None):
397 headers = tuple(
423 headers = tuple(
398 (h, v) for h, v in response_headers
424 (h, v) for h, v in response_headers
399 if not wsgiref.util.is_hop_by_hop(h))
425 if not wsgiref.util.is_hop_by_hop(h))
400 return self._start_response(status, headers, exc_info)
426 return self._start_response(status, headers, exc_info)
401
427
402
428
403 def main(global_config, **settings):
429 def main(global_config, **settings):
404 if MercurialFactory:
430 if MercurialFactory:
405 hgpatches.patch_largefiles_capabilities()
431 hgpatches.patch_largefiles_capabilities()
406 hgpatches.patch_subrepo_type_mapping()
432 hgpatches.patch_subrepo_type_mapping()
407 app = HTTPApplication(settings=settings)
433 app = HTTPApplication(settings=settings, global_config=global_config)
408 return app.wsgi_app()
434 return app.wsgi_app()
@@ -1,174 +1,209 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import logging
19 import logging
19 import os
20
20
21 import mercurial
21 import mercurial
22 import mercurial.error
22 import mercurial.error
23 import mercurial.hgweb.common
23 import mercurial.hgweb.common
24 import mercurial.hgweb.hgweb_mod
24 import mercurial.hgweb.hgweb_mod
25 import mercurial.hgweb.protocol
25 import mercurial.hgweb.protocol
26 import webob.exc
26 import webob.exc
27
27
28 from vcsserver import pygrack, exceptions, settings
28 from vcsserver import pygrack, exceptions, settings, git_lfs
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 # propagated from mercurial documentation
34 # propagated from mercurial documentation
35 HG_UI_SECTIONS = [
35 HG_UI_SECTIONS = [
36 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
36 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
37 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
38 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 ]
39 ]
40
40
41
41
42 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
42 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 """Extension of hgweb that simplifies some functions."""
43 """Extension of hgweb that simplifies some functions."""
44
44
45 def _get_view(self, repo):
45 def _get_view(self, repo):
46 """Views are not supported."""
46 """Views are not supported."""
47 return repo
47 return repo
48
48
49 def loadsubweb(self):
49 def loadsubweb(self):
50 """The result is only used in the templater method which is not used."""
50 """The result is only used in the templater method which is not used."""
51 return None
51 return None
52
52
53 def run(self):
53 def run(self):
54 """Unused function so raise an exception if accidentally called."""
54 """Unused function so raise an exception if accidentally called."""
55 raise NotImplementedError
55 raise NotImplementedError
56
56
57 def templater(self, req):
57 def templater(self, req):
58 """Function used in an unreachable code path.
58 """Function used in an unreachable code path.
59
59
60 This code is unreachable because we guarantee that the HTTP request,
60 This code is unreachable because we guarantee that the HTTP request,
61 corresponds to a Mercurial command. See the is_hg method. So, we are
61 corresponds to a Mercurial command. See the is_hg method. So, we are
62 never going to get a user-visible url.
62 never going to get a user-visible url.
63 """
63 """
64 raise NotImplementedError
64 raise NotImplementedError
65
65
66 def archivelist(self, nodeid):
66 def archivelist(self, nodeid):
67 """Unused function so raise an exception if accidentally called."""
67 """Unused function so raise an exception if accidentally called."""
68 raise NotImplementedError
68 raise NotImplementedError
69
69
70 def run_wsgi(self, req):
70 def run_wsgi(self, req):
71 """Check the request has a valid command, failing fast otherwise."""
71 """Check the request has a valid command, failing fast otherwise."""
72 cmd = req.form.get('cmd', [''])[0]
72 cmd = req.form.get('cmd', [''])[0]
73 if not mercurial.hgweb.protocol.iscmd(cmd):
73 if not mercurial.hgweb.protocol.iscmd(cmd):
74 req.respond(
74 req.respond(
75 mercurial.hgweb.common.ErrorResponse(
75 mercurial.hgweb.common.ErrorResponse(
76 mercurial.hgweb.common.HTTP_BAD_REQUEST),
76 mercurial.hgweb.common.HTTP_BAD_REQUEST),
77 mercurial.hgweb.protocol.HGTYPE
77 mercurial.hgweb.protocol.HGTYPE
78 )
78 )
79 return ['']
79 return ['']
80
80
81 return super(HgWeb, self).run_wsgi(req)
81 return super(HgWeb, self).run_wsgi(req)
82
82
83
83
84 def make_hg_ui_from_config(repo_config):
84 def make_hg_ui_from_config(repo_config):
85 baseui = mercurial.ui.ui()
85 baseui = mercurial.ui.ui()
86
86
87 # clean the baseui object
87 # clean the baseui object
88 baseui._ocfg = mercurial.config.config()
88 baseui._ocfg = mercurial.config.config()
89 baseui._ucfg = mercurial.config.config()
89 baseui._ucfg = mercurial.config.config()
90 baseui._tcfg = mercurial.config.config()
90 baseui._tcfg = mercurial.config.config()
91
91
92 for section, option, value in repo_config:
92 for section, option, value in repo_config:
93 baseui.setconfig(section, option, value)
93 baseui.setconfig(section, option, value)
94
94
95 # make our hgweb quiet so it doesn't print output
95 # make our hgweb quiet so it doesn't print output
96 baseui.setconfig('ui', 'quiet', 'true')
96 baseui.setconfig('ui', 'quiet', 'true')
97
97
98 return baseui
98 return baseui
99
99
100
100
101 def update_hg_ui_from_hgrc(baseui, repo_path):
101 def update_hg_ui_from_hgrc(baseui, repo_path):
102 path = os.path.join(repo_path, '.hg', 'hgrc')
102 path = os.path.join(repo_path, '.hg', 'hgrc')
103
103
104 if not os.path.isfile(path):
104 if not os.path.isfile(path):
105 log.debug('hgrc file is not present at %s, skipping...', path)
105 log.debug('hgrc file is not present at %s, skipping...', path)
106 return
106 return
107 log.debug('reading hgrc from %s', path)
107 log.debug('reading hgrc from %s', path)
108 cfg = mercurial.config.config()
108 cfg = mercurial.config.config()
109 cfg.read(path)
109 cfg.read(path)
110 for section in HG_UI_SECTIONS:
110 for section in HG_UI_SECTIONS:
111 for k, v in cfg.items(section):
111 for k, v in cfg.items(section):
112 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
112 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
113 baseui.setconfig(section, k, v)
113 baseui.setconfig(section, k, v)
114
114
115
115
116 def create_hg_wsgi_app(repo_path, repo_name, config):
116 def create_hg_wsgi_app(repo_path, repo_name, config):
117 """
117 """
118 Prepares a WSGI application to handle Mercurial requests.
118 Prepares a WSGI application to handle Mercurial requests.
119
119
120 :param config: is a list of 3-item tuples representing a ConfigObject
120 :param config: is a list of 3-item tuples representing a ConfigObject
121 (it is the serialized version of the config object).
121 (it is the serialized version of the config object).
122 """
122 """
123 log.debug("Creating Mercurial WSGI application")
123 log.debug("Creating Mercurial WSGI application")
124
124
125 baseui = make_hg_ui_from_config(config)
125 baseui = make_hg_ui_from_config(config)
126 update_hg_ui_from_hgrc(baseui, repo_path)
126 update_hg_ui_from_hgrc(baseui, repo_path)
127
127
128 try:
128 try:
129 return HgWeb(repo_path, name=repo_name, baseui=baseui)
129 return HgWeb(repo_path, name=repo_name, baseui=baseui)
130 except mercurial.error.RequirementError as exc:
130 except mercurial.error.RequirementError as exc:
131 raise exceptions.RequirementException(exc)
131 raise exceptions.RequirementException(exc)
132
132
133
133
134 class GitHandler(object):
134 class GitHandler(object):
135 """
136 Handler for Git operations like push/pull etc
137 """
135 def __init__(self, repo_location, repo_name, git_path, update_server_info,
138 def __init__(self, repo_location, repo_name, git_path, update_server_info,
136 extras):
139 extras):
137 if not os.path.isdir(repo_location):
140 if not os.path.isdir(repo_location):
138 raise OSError(repo_location)
141 raise OSError(repo_location)
139 self.content_path = repo_location
142 self.content_path = repo_location
140 self.repo_name = repo_name
143 self.repo_name = repo_name
141 self.repo_location = repo_location
144 self.repo_location = repo_location
142 self.extras = extras
145 self.extras = extras
143 self.git_path = git_path
146 self.git_path = git_path
144 self.update_server_info = update_server_info
147 self.update_server_info = update_server_info
145
148
146 def __call__(self, environ, start_response):
149 def __call__(self, environ, start_response):
147 app = webob.exc.HTTPNotFound()
150 app = webob.exc.HTTPNotFound()
148 candidate_paths = (
151 candidate_paths = (
149 self.content_path, os.path.join(self.content_path, '.git'))
152 self.content_path, os.path.join(self.content_path, '.git'))
150
153
151 for content_path in candidate_paths:
154 for content_path in candidate_paths:
152 try:
155 try:
153 app = pygrack.GitRepository(
156 app = pygrack.GitRepository(
154 self.repo_name, content_path, self.git_path,
157 self.repo_name, content_path, self.git_path,
155 self.update_server_info, self.extras)
158 self.update_server_info, self.extras)
156 break
159 break
157 except OSError:
160 except OSError:
158 continue
161 continue
159
162
160 return app(environ, start_response)
163 return app(environ, start_response)
161
164
162
165
163 def create_git_wsgi_app(repo_path, repo_name, config):
166 def create_git_wsgi_app(repo_path, repo_name, config):
164 """
167 """
165 Creates a WSGI application to handle Git requests.
168 Creates a WSGI application to handle Git requests.
166
169
167 :param config: is a dictionary holding the extras.
170 :param config: is a dictionary holding the extras.
168 """
171 """
169 git_path = settings.GIT_EXECUTABLE
172 git_path = settings.GIT_EXECUTABLE
170 update_server_info = config.pop('git_update_server_info')
173 update_server_info = config.pop('git_update_server_info')
171 app = GitHandler(
174 app = GitHandler(
172 repo_path, repo_name, git_path, update_server_info, config)
175 repo_path, repo_name, git_path, update_server_info, config)
173
176
174 return app
177 return app
178
179
180 class GitLFSHandler(object):
181 """
182 Handler for Git LFS operations
183 """
184
185 def __init__(self, repo_location, repo_name, git_path, update_server_info,
186 extras):
187 if not os.path.isdir(repo_location):
188 raise OSError(repo_location)
189 self.content_path = repo_location
190 self.repo_name = repo_name
191 self.repo_location = repo_location
192 self.extras = extras
193 self.git_path = git_path
194 self.update_server_info = update_server_info
195
196 def get_app(self, git_lfs_enabled, git_lfs_store_path):
197 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
198 return app
199
200
201 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
202 git_path = settings.GIT_EXECUTABLE
203 update_server_info = config.pop('git_update_server_info')
204 git_lfs_enabled = config.pop('git_lfs_enabled')
205 git_lfs_store_path = config.pop('git_lfs_store_path')
206 app = GitLFSHandler(
207 repo_path, repo_name, git_path, update_server_info, config)
208
209 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,651 +1,644 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
43 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
44 svn_compatible_versions = set([
44 svn_compatible_versions = set([
45 'pre-1.4-compatible',
45 'pre-1.4-compatible',
46 'pre-1.5-compatible',
46 'pre-1.5-compatible',
47 'pre-1.6-compatible',
47 'pre-1.6-compatible',
48 'pre-1.8-compatible',
48 'pre-1.8-compatible',
49 ])
49 ])
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except Exception as e:
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
60 raise_from_original(exceptions.UnhandledException)
61 raise
61 raise
62 return wrapper
62 return wrapper
63
63
64
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
76 class SubversionFactory(RepoFactory):
65 class SubversionFactory(RepoFactory):
77
66
78 def _create_repo(self, wire, create, compatible_version):
67 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
68 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
69 if create:
81 fs_config = {}
70 fs_config = {}
82 if compatible_version:
71 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
72 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
73 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
74 .format(compatible_version))
86 log.debug('Create SVN repo with compatible version "%s"',
75 log.debug('Create SVN repo with compatible version "%s"',
87 compatible_version)
76 compatible_version)
88 fs_config[compatible_version] = '1'
77 fs_config[compatible_version] = '1'
89 repo = svn.repos.create(path, "", "", None, fs_config)
78 repo = svn.repos.create(path, "", "", None, fs_config)
90 else:
79 else:
91 repo = svn.repos.open(path)
80 repo = svn.repos.open(path)
92 return repo
81 return repo
93
82
94 def repo(self, wire, create=False, compatible_version=None):
83 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
84 def create_new_repo():
96 return self._create_repo(wire, create, compatible_version)
85 return self._create_repo(wire, create, compatible_version)
97
86
98 return self._repo(wire, create_new_repo)
87 return self._repo(wire, create_new_repo)
99
88
100
89
101
90
102 NODE_TYPE_MAPPING = {
91 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
92 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
93 svn.core.svn_node_dir: 'dir',
105 }
94 }
106
95
107
96
108 class SvnRemote(object):
97 class SvnRemote(object):
109
98
110 def __init__(self, factory, hg_factory=None):
99 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
100 self._factory = factory
112 # TODO: Remove once we do not use internal Mercurial objects anymore
101 # TODO: Remove once we do not use internal Mercurial objects anymore
113 # for subversion
102 # for subversion
114 self._hg_factory = hg_factory
103 self._hg_factory = hg_factory
115
104
116 @reraise_safe_exceptions
105 @reraise_safe_exceptions
117 def discover_svn_version(self):
106 def discover_svn_version(self):
118 try:
107 try:
119 import svn.core
108 import svn.core
120 svn_ver = svn.core.SVN_VERSION
109 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
110 except ImportError:
122 svn_ver = None
111 svn_ver = None
123 return svn_ver
112 return svn_ver
124
113
125 def check_url(self, url, config_items):
114 def check_url(self, url, config_items):
126 # this can throw exception if not installed, but we detect this
115 # this can throw exception if not installed, but we detect this
127 from hgsubversion import svnrepo
116 from hgsubversion import svnrepo
128
117
129 baseui = self._hg_factory._create_config(config_items)
118 baseui = self._hg_factory._create_config(config_items)
130 # uuid function get's only valid UUID from proper repo, else
119 # uuid function get's only valid UUID from proper repo, else
131 # throws exception
120 # throws exception
132 try:
121 try:
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
122 svnrepo.svnremoterepo(baseui, url).svn.uuid
134 except:
123 except:
135 log.debug("Invalid svn url: %s", url)
124 log.debug("Invalid svn url: %s", url)
136 raise URLError(
125 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
126 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
127 return True
139
128
140 def is_path_valid_repository(self, wire, path):
129 def is_path_valid_repository(self, wire, path):
141 try:
130 try:
142 svn.repos.open(path)
131 svn.repos.open(path)
143 except svn.core.SubversionException:
132 except svn.core.SubversionException:
144 log.debug("Invalid Subversion path %s", path)
133 log.debug("Invalid Subversion path %s", path)
145 return False
134 return False
146 return True
135 return True
147
136
148 def lookup(self, wire, revision):
137 def lookup(self, wire, revision):
149 if revision not in [-1, None, 'HEAD']:
138 if revision not in [-1, None, 'HEAD']:
150 raise NotImplementedError
139 raise NotImplementedError
151 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
152 fs_ptr = svn.repos.fs(repo)
141 fs_ptr = svn.repos.fs(repo)
153 head = svn.fs.youngest_rev(fs_ptr)
142 head = svn.fs.youngest_rev(fs_ptr)
154 return head
143 return head
155
144
156 def lookup_interval(self, wire, start_ts, end_ts):
145 def lookup_interval(self, wire, start_ts, end_ts):
157 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
158 fsobj = svn.repos.fs(repo)
147 fsobj = svn.repos.fs(repo)
159 start_rev = None
148 start_rev = None
160 end_rev = None
149 end_rev = None
161 if start_ts:
150 if start_ts:
162 start_ts_svn = apr_time_t(start_ts)
151 start_ts_svn = apr_time_t(start_ts)
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
164 else:
153 else:
165 start_rev = 1
154 start_rev = 1
166 if end_ts:
155 if end_ts:
167 end_ts_svn = apr_time_t(end_ts)
156 end_ts_svn = apr_time_t(end_ts)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
169 else:
158 else:
170 end_rev = svn.fs.youngest_rev(fsobj)
159 end_rev = svn.fs.youngest_rev(fsobj)
171 return start_rev, end_rev
160 return start_rev, end_rev
172
161
173 def revision_properties(self, wire, revision):
162 def revision_properties(self, wire, revision):
174 repo = self._factory.repo(wire)
163 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
164 fs_ptr = svn.repos.fs(repo)
176 return svn.fs.revision_proplist(fs_ptr, revision)
165 return svn.fs.revision_proplist(fs_ptr, revision)
177
166
178 def revision_changes(self, wire, revision):
167 def revision_changes(self, wire, revision):
179
168
180 repo = self._factory.repo(wire)
169 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
170 fsobj = svn.repos.fs(repo)
182 rev_root = svn.fs.revision_root(fsobj, revision)
171 rev_root = svn.fs.revision_root(fsobj, revision)
183
172
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
186 base_dir = ""
175 base_dir = ""
187 send_deltas = False
176 send_deltas = False
188 svn.repos.replay2(
177 svn.repos.replay2(
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
190 editor_ptr, editor_baton, None)
179 editor_ptr, editor_baton, None)
191
180
192 added = []
181 added = []
193 changed = []
182 changed = []
194 removed = []
183 removed = []
195
184
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
197 for path, change in editor.changes.iteritems():
186 for path, change in editor.changes.iteritems():
198 # TODO: Decide what to do with directory nodes. Subversion can add
187 # TODO: Decide what to do with directory nodes. Subversion can add
199 # empty directories.
188 # empty directories.
200
189
201 if change.item_kind == svn.core.svn_node_dir:
190 if change.item_kind == svn.core.svn_node_dir:
202 continue
191 continue
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
204 added.append(path)
193 added.append(path)
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 svn.repos.CHANGE_ACTION_REPLACE]:
195 svn.repos.CHANGE_ACTION_REPLACE]:
207 changed.append(path)
196 changed.append(path)
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
209 removed.append(path)
198 removed.append(path)
210 else:
199 else:
211 raise NotImplementedError(
200 raise NotImplementedError(
212 "Action %s not supported on path %s" % (
201 "Action %s not supported on path %s" % (
213 change.action, path))
202 change.action, path))
214
203
215 changes = {
204 changes = {
216 'added': added,
205 'added': added,
217 'changed': changed,
206 'changed': changed,
218 'removed': removed,
207 'removed': removed,
219 }
208 }
220 return changes
209 return changes
221
210
222 def node_history(self, wire, path, revision, limit):
211 def node_history(self, wire, path, revision, limit):
223 cross_copies = False
212 cross_copies = False
224 repo = self._factory.repo(wire)
213 repo = self._factory.repo(wire)
225 fsobj = svn.repos.fs(repo)
214 fsobj = svn.repos.fs(repo)
226 rev_root = svn.fs.revision_root(fsobj, revision)
215 rev_root = svn.fs.revision_root(fsobj, revision)
227
216
228 history_revisions = []
217 history_revisions = []
229 history = svn.fs.node_history(rev_root, path)
218 history = svn.fs.node_history(rev_root, path)
230 history = svn.fs.history_prev(history, cross_copies)
219 history = svn.fs.history_prev(history, cross_copies)
231 while history:
220 while history:
232 __, node_revision = svn.fs.history_location(history)
221 __, node_revision = svn.fs.history_location(history)
233 history_revisions.append(node_revision)
222 history_revisions.append(node_revision)
234 if limit and len(history_revisions) >= limit:
223 if limit and len(history_revisions) >= limit:
235 break
224 break
236 history = svn.fs.history_prev(history, cross_copies)
225 history = svn.fs.history_prev(history, cross_copies)
237 return history_revisions
226 return history_revisions
238
227
239 def node_properties(self, wire, path, revision):
228 def node_properties(self, wire, path, revision):
240 repo = self._factory.repo(wire)
229 repo = self._factory.repo(wire)
241 fsobj = svn.repos.fs(repo)
230 fsobj = svn.repos.fs(repo)
242 rev_root = svn.fs.revision_root(fsobj, revision)
231 rev_root = svn.fs.revision_root(fsobj, revision)
243 return svn.fs.node_proplist(rev_root, path)
232 return svn.fs.node_proplist(rev_root, path)
244
233
245 def file_annotate(self, wire, path, revision):
234 def file_annotate(self, wire, path, revision):
246 abs_path = 'file://' + urllib.pathname2url(
235 abs_path = 'file://' + urllib.pathname2url(
247 vcspath.join(wire['path'], path))
236 vcspath.join(wire['path'], path))
248 file_uri = svn.core.svn_path_canonicalize(abs_path)
237 file_uri = svn.core.svn_path_canonicalize(abs_path)
249
238
250 start_rev = svn_opt_revision_value_t(0)
239 start_rev = svn_opt_revision_value_t(0)
251 peg_rev = svn_opt_revision_value_t(revision)
240 peg_rev = svn_opt_revision_value_t(revision)
252 end_rev = peg_rev
241 end_rev = peg_rev
253
242
254 annotations = []
243 annotations = []
255
244
256 def receiver(line_no, revision, author, date, line, pool):
245 def receiver(line_no, revision, author, date, line, pool):
257 annotations.append((line_no, revision, line))
246 annotations.append((line_no, revision, line))
258
247
259 # TODO: Cannot use blame5, missing typemap function in the swig code
248 # TODO: Cannot use blame5, missing typemap function in the swig code
260 try:
249 try:
261 svn.client.blame2(
250 svn.client.blame2(
262 file_uri, peg_rev, start_rev, end_rev,
251 file_uri, peg_rev, start_rev, end_rev,
263 receiver, svn.client.create_context())
252 receiver, svn.client.create_context())
264 except svn.core.SubversionException as exc:
253 except svn.core.SubversionException as exc:
265 log.exception("Error during blame operation.")
254 log.exception("Error during blame operation.")
266 raise Exception(
255 raise Exception(
267 "Blame not supported or file does not exist at path %s. "
256 "Blame not supported or file does not exist at path %s. "
268 "Error %s." % (path, exc))
257 "Error %s." % (path, exc))
269
258
270 return annotations
259 return annotations
271
260
272 def get_node_type(self, wire, path, rev=None):
261 def get_node_type(self, wire, path, rev=None):
273 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
274 fs_ptr = svn.repos.fs(repo)
263 fs_ptr = svn.repos.fs(repo)
275 if rev is None:
264 if rev is None:
276 rev = svn.fs.youngest_rev(fs_ptr)
265 rev = svn.fs.youngest_rev(fs_ptr)
277 root = svn.fs.revision_root(fs_ptr, rev)
266 root = svn.fs.revision_root(fs_ptr, rev)
278 node = svn.fs.check_path(root, path)
267 node = svn.fs.check_path(root, path)
279 return NODE_TYPE_MAPPING.get(node, None)
268 return NODE_TYPE_MAPPING.get(node, None)
280
269
281 def get_nodes(self, wire, path, revision=None):
270 def get_nodes(self, wire, path, revision=None):
282 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
283 fsobj = svn.repos.fs(repo)
272 fsobj = svn.repos.fs(repo)
284 if revision is None:
273 if revision is None:
285 revision = svn.fs.youngest_rev(fsobj)
274 revision = svn.fs.youngest_rev(fsobj)
286 root = svn.fs.revision_root(fsobj, revision)
275 root = svn.fs.revision_root(fsobj, revision)
287 entries = svn.fs.dir_entries(root, path)
276 entries = svn.fs.dir_entries(root, path)
288 result = []
277 result = []
289 for entry_path, entry_info in entries.iteritems():
278 for entry_path, entry_info in entries.iteritems():
290 result.append(
279 result.append(
291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
292 return result
281 return result
293
282
294 def get_file_content(self, wire, path, rev=None):
283 def get_file_content(self, wire, path, rev=None):
295 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
296 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
297 if rev is None:
286 if rev is None:
298 rev = svn.fs.youngest_revision(fsobj)
287 rev = svn.fs.youngest_revision(fsobj)
299 root = svn.fs.revision_root(fsobj, rev)
288 root = svn.fs.revision_root(fsobj, rev)
300 content = svn.core.Stream(svn.fs.file_contents(root, path))
289 content = svn.core.Stream(svn.fs.file_contents(root, path))
301 return content.read()
290 return content.read()
302
291
303 def get_file_size(self, wire, path, revision=None):
292 def get_file_size(self, wire, path, revision=None):
304 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
305 fsobj = svn.repos.fs(repo)
294 fsobj = svn.repos.fs(repo)
306 if revision is None:
295 if revision is None:
307 revision = svn.fs.youngest_revision(fsobj)
296 revision = svn.fs.youngest_revision(fsobj)
308 root = svn.fs.revision_root(fsobj, revision)
297 root = svn.fs.revision_root(fsobj, revision)
309 size = svn.fs.file_length(root, path)
298 size = svn.fs.file_length(root, path)
310 return size
299 return size
311
300
312 def create_repository(self, wire, compatible_version=None):
301 def create_repository(self, wire, compatible_version=None):
313 log.info('Creating Subversion repository in path "%s"', wire['path'])
302 log.info('Creating Subversion repository in path "%s"', wire['path'])
314 self._factory.repo(wire, create=True,
303 self._factory.repo(wire, create=True,
315 compatible_version=compatible_version)
304 compatible_version=compatible_version)
316
305
317 def import_remote_repository(self, wire, src_url):
306 def import_remote_repository(self, wire, src_url):
318 repo_path = wire['path']
307 repo_path = wire['path']
319 if not self.is_path_valid_repository(wire, repo_path):
308 if not self.is_path_valid_repository(wire, repo_path):
320 raise Exception(
309 raise Exception(
321 "Path %s is not a valid Subversion repository." % repo_path)
310 "Path %s is not a valid Subversion repository." % repo_path)
322 # TODO: johbo: URL checks ?
311 # TODO: johbo: URL checks ?
323 rdump = subprocess.Popen(
312 rdump = subprocess.Popen(
324 ['svnrdump', 'dump', '--non-interactive', src_url],
313 ['svnrdump', 'dump', '--non-interactive', src_url],
325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
326 load = subprocess.Popen(
315 load = subprocess.Popen(
327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
328
317
329 # TODO: johbo: This can be a very long operation, might be better
318 # TODO: johbo: This can be a very long operation, might be better
330 # to track some kind of status and provide an api to check if the
319 # to track some kind of status and provide an api to check if the
331 # import is done.
320 # import is done.
332 rdump.wait()
321 rdump.wait()
333 load.wait()
322 load.wait()
334
323
335 if rdump.returncode != 0:
324 if rdump.returncode != 0:
336 errors = rdump.stderr.read()
325 errors = rdump.stderr.read()
337 log.error('svnrdump dump failed: statuscode %s: message: %s',
326 log.error('svnrdump dump failed: statuscode %s: message: %s',
338 rdump.returncode, errors)
327 rdump.returncode, errors)
339 reason = 'UNKNOWN'
328 reason = 'UNKNOWN'
340 if 'svnrdump: E230001:' in errors:
329 if 'svnrdump: E230001:' in errors:
341 reason = 'INVALID_CERTIFICATE'
330 reason = 'INVALID_CERTIFICATE'
342 raise Exception(
331 raise Exception(
343 'Failed to dump the remote repository from %s.' % src_url,
332 'Failed to dump the remote repository from %s.' % src_url,
344 reason)
333 reason)
345 if load.returncode != 0:
334 if load.returncode != 0:
346 raise Exception(
335 raise Exception(
347 'Failed to load the dump of remote repository from %s.' %
336 'Failed to load the dump of remote repository from %s.' %
348 (src_url, ))
337 (src_url, ))
349
338
350 def commit(self, wire, message, author, timestamp, updated, removed):
339 def commit(self, wire, message, author, timestamp, updated, removed):
351 assert isinstance(message, str)
340 assert isinstance(message, str)
352 assert isinstance(author, str)
341 assert isinstance(author, str)
353
342
354 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
355 fsobj = svn.repos.fs(repo)
344 fsobj = svn.repos.fs(repo)
356
345
357 rev = svn.fs.youngest_rev(fsobj)
346 rev = svn.fs.youngest_rev(fsobj)
358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
359 txn_root = svn.fs.txn_root(txn)
348 txn_root = svn.fs.txn_root(txn)
360
349
361 for node in updated:
350 for node in updated:
362 TxnNodeProcessor(node, txn_root).update()
351 TxnNodeProcessor(node, txn_root).update()
363 for node in removed:
352 for node in removed:
364 TxnNodeProcessor(node, txn_root).remove()
353 TxnNodeProcessor(node, txn_root).remove()
365
354
366 commit_id = svn.repos.fs_commit_txn(repo, txn)
355 commit_id = svn.repos.fs_commit_txn(repo, txn)
367
356
368 if timestamp:
357 if timestamp:
369 apr_time = apr_time_t(timestamp)
358 apr_time = apr_time_t(timestamp)
370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
372
361
373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
374 return commit_id
363 return commit_id
375
364
376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
377 ignore_whitespace=False, context=3):
366 ignore_whitespace=False, context=3):
378
367
379 wire.update(cache=False)
368 wire.update(cache=False)
380 repo = self._factory.repo(wire)
369 repo = self._factory.repo(wire)
381 diff_creator = SvnDiffer(
370 diff_creator = SvnDiffer(
382 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
383 try:
372 try:
384 return diff_creator.generate_diff()
373 return diff_creator.generate_diff()
385 except svn.core.SubversionException as e:
374 except svn.core.SubversionException as e:
386 log.exception(
375 log.exception(
387 "Error during diff operation operation. "
376 "Error during diff operation operation. "
388 "Path might not exist %s, %s" % (path1, path2))
377 "Path might not exist %s, %s" % (path1, path2))
389 return ""
378 return ""
390
379
380 @reraise_safe_exceptions
381 def is_large_file(self, wire, path):
382 return False
383
391
384
392 class SvnDiffer(object):
385 class SvnDiffer(object):
393 """
386 """
394 Utility to create diffs based on difflib and the Subversion api
387 Utility to create diffs based on difflib and the Subversion api
395 """
388 """
396
389
397 binary_content = False
390 binary_content = False
398
391
399 def __init__(
392 def __init__(
400 self, repo, src_rev, src_path, tgt_rev, tgt_path,
393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
401 ignore_whitespace, context):
394 ignore_whitespace, context):
402 self.repo = repo
395 self.repo = repo
403 self.ignore_whitespace = ignore_whitespace
396 self.ignore_whitespace = ignore_whitespace
404 self.context = context
397 self.context = context
405
398
406 fsobj = svn.repos.fs(repo)
399 fsobj = svn.repos.fs(repo)
407
400
408 self.tgt_rev = tgt_rev
401 self.tgt_rev = tgt_rev
409 self.tgt_path = tgt_path or ''
402 self.tgt_path = tgt_path or ''
410 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
411 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
412
405
413 self.src_rev = src_rev
406 self.src_rev = src_rev
414 self.src_path = src_path or self.tgt_path
407 self.src_path = src_path or self.tgt_path
415 self.src_root = svn.fs.revision_root(fsobj, src_rev)
408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
416 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
417
410
418 self._validate()
411 self._validate()
419
412
420 def _validate(self):
413 def _validate(self):
421 if (self.tgt_kind != svn.core.svn_node_none and
414 if (self.tgt_kind != svn.core.svn_node_none and
422 self.src_kind != svn.core.svn_node_none and
415 self.src_kind != svn.core.svn_node_none and
423 self.src_kind != self.tgt_kind):
416 self.src_kind != self.tgt_kind):
424 # TODO: johbo: proper error handling
417 # TODO: johbo: proper error handling
425 raise Exception(
418 raise Exception(
426 "Source and target are not compatible for diff generation. "
419 "Source and target are not compatible for diff generation. "
427 "Source type: %s, target type: %s" %
420 "Source type: %s, target type: %s" %
428 (self.src_kind, self.tgt_kind))
421 (self.src_kind, self.tgt_kind))
429
422
430 def generate_diff(self):
423 def generate_diff(self):
431 buf = StringIO.StringIO()
424 buf = StringIO.StringIO()
432 if self.tgt_kind == svn.core.svn_node_dir:
425 if self.tgt_kind == svn.core.svn_node_dir:
433 self._generate_dir_diff(buf)
426 self._generate_dir_diff(buf)
434 else:
427 else:
435 self._generate_file_diff(buf)
428 self._generate_file_diff(buf)
436 return buf.getvalue()
429 return buf.getvalue()
437
430
438 def _generate_dir_diff(self, buf):
431 def _generate_dir_diff(self, buf):
439 editor = DiffChangeEditor()
432 editor = DiffChangeEditor()
440 editor_ptr, editor_baton = svn.delta.make_editor(editor)
433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
441 svn.repos.dir_delta2(
434 svn.repos.dir_delta2(
442 self.src_root,
435 self.src_root,
443 self.src_path,
436 self.src_path,
444 '', # src_entry
437 '', # src_entry
445 self.tgt_root,
438 self.tgt_root,
446 self.tgt_path,
439 self.tgt_path,
447 editor_ptr, editor_baton,
440 editor_ptr, editor_baton,
448 authorization_callback_allow_all,
441 authorization_callback_allow_all,
449 False, # text_deltas
442 False, # text_deltas
450 svn.core.svn_depth_infinity, # depth
443 svn.core.svn_depth_infinity, # depth
451 False, # entry_props
444 False, # entry_props
452 False, # ignore_ancestry
445 False, # ignore_ancestry
453 )
446 )
454
447
455 for path, __, change in sorted(editor.changes):
448 for path, __, change in sorted(editor.changes):
456 self._generate_node_diff(
449 self._generate_node_diff(
457 buf, change, path, self.tgt_path, path, self.src_path)
450 buf, change, path, self.tgt_path, path, self.src_path)
458
451
459 def _generate_file_diff(self, buf):
452 def _generate_file_diff(self, buf):
460 change = None
453 change = None
461 if self.src_kind == svn.core.svn_node_none:
454 if self.src_kind == svn.core.svn_node_none:
462 change = "add"
455 change = "add"
463 elif self.tgt_kind == svn.core.svn_node_none:
456 elif self.tgt_kind == svn.core.svn_node_none:
464 change = "delete"
457 change = "delete"
465 tgt_base, tgt_path = vcspath.split(self.tgt_path)
458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
466 src_base, src_path = vcspath.split(self.src_path)
459 src_base, src_path = vcspath.split(self.src_path)
467 self._generate_node_diff(
460 self._generate_node_diff(
468 buf, change, tgt_path, tgt_base, src_path, src_base)
461 buf, change, tgt_path, tgt_base, src_path, src_base)
469
462
470 def _generate_node_diff(
463 def _generate_node_diff(
471 self, buf, change, tgt_path, tgt_base, src_path, src_base):
464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
472
465
473 if self.src_rev == self.tgt_rev and tgt_base == src_base:
466 if self.src_rev == self.tgt_rev and tgt_base == src_base:
474 # makes consistent behaviour with git/hg to return empty diff if
467 # makes consistent behaviour with git/hg to return empty diff if
475 # we compare same revisions
468 # we compare same revisions
476 return
469 return
477
470
478 tgt_full_path = vcspath.join(tgt_base, tgt_path)
471 tgt_full_path = vcspath.join(tgt_base, tgt_path)
479 src_full_path = vcspath.join(src_base, src_path)
472 src_full_path = vcspath.join(src_base, src_path)
480
473
481 self.binary_content = False
474 self.binary_content = False
482 mime_type = self._get_mime_type(tgt_full_path)
475 mime_type = self._get_mime_type(tgt_full_path)
483
476
484 if mime_type and not mime_type.startswith('text'):
477 if mime_type and not mime_type.startswith('text'):
485 self.binary_content = True
478 self.binary_content = True
486 buf.write("=" * 67 + '\n')
479 buf.write("=" * 67 + '\n')
487 buf.write("Cannot display: file marked as a binary type.\n")
480 buf.write("Cannot display: file marked as a binary type.\n")
488 buf.write("svn:mime-type = %s\n" % mime_type)
481 buf.write("svn:mime-type = %s\n" % mime_type)
489 buf.write("Index: %s\n" % (tgt_path, ))
482 buf.write("Index: %s\n" % (tgt_path, ))
490 buf.write("=" * 67 + '\n')
483 buf.write("=" * 67 + '\n')
491 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
484 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
492 'tgt_path': tgt_path})
485 'tgt_path': tgt_path})
493
486
494 if change == 'add':
487 if change == 'add':
495 # TODO: johbo: SVN is missing a zero here compared to git
488 # TODO: johbo: SVN is missing a zero here compared to git
496 buf.write("new file mode 10644\n")
489 buf.write("new file mode 10644\n")
497
490
498 #TODO(marcink): intro to binary detection of svn patches
491 #TODO(marcink): intro to binary detection of svn patches
499 # if self.binary_content:
492 # if self.binary_content:
500 # buf.write('GIT binary patch\n')
493 # buf.write('GIT binary patch\n')
501
494
502 buf.write("--- /dev/null\t(revision 0)\n")
495 buf.write("--- /dev/null\t(revision 0)\n")
503 src_lines = []
496 src_lines = []
504 else:
497 else:
505 if change == 'delete':
498 if change == 'delete':
506 buf.write("deleted file mode 10644\n")
499 buf.write("deleted file mode 10644\n")
507
500
508 #TODO(marcink): intro to binary detection of svn patches
501 #TODO(marcink): intro to binary detection of svn patches
509 # if self.binary_content:
502 # if self.binary_content:
510 # buf.write('GIT binary patch\n')
503 # buf.write('GIT binary patch\n')
511
504
512 buf.write("--- a/%s\t(revision %s)\n" % (
505 buf.write("--- a/%s\t(revision %s)\n" % (
513 src_path, self.src_rev))
506 src_path, self.src_rev))
514 src_lines = self._svn_readlines(self.src_root, src_full_path)
507 src_lines = self._svn_readlines(self.src_root, src_full_path)
515
508
516 if change == 'delete':
509 if change == 'delete':
517 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
510 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
518 tgt_lines = []
511 tgt_lines = []
519 else:
512 else:
520 buf.write("+++ b/%s\t(revision %s)\n" % (
513 buf.write("+++ b/%s\t(revision %s)\n" % (
521 tgt_path, self.tgt_rev))
514 tgt_path, self.tgt_rev))
522 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
515 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
523
516
524 if not self.binary_content:
517 if not self.binary_content:
525 udiff = svn_diff.unified_diff(
518 udiff = svn_diff.unified_diff(
526 src_lines, tgt_lines, context=self.context,
519 src_lines, tgt_lines, context=self.context,
527 ignore_blank_lines=self.ignore_whitespace,
520 ignore_blank_lines=self.ignore_whitespace,
528 ignore_case=False,
521 ignore_case=False,
529 ignore_space_changes=self.ignore_whitespace)
522 ignore_space_changes=self.ignore_whitespace)
530 buf.writelines(udiff)
523 buf.writelines(udiff)
531
524
532 def _get_mime_type(self, path):
525 def _get_mime_type(self, path):
533 try:
526 try:
534 mime_type = svn.fs.node_prop(
527 mime_type = svn.fs.node_prop(
535 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
528 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
536 except svn.core.SubversionException:
529 except svn.core.SubversionException:
537 mime_type = svn.fs.node_prop(
530 mime_type = svn.fs.node_prop(
538 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
531 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
539 return mime_type
532 return mime_type
540
533
541 def _svn_readlines(self, fs_root, node_path):
534 def _svn_readlines(self, fs_root, node_path):
542 if self.binary_content:
535 if self.binary_content:
543 return []
536 return []
544 node_kind = svn.fs.check_path(fs_root, node_path)
537 node_kind = svn.fs.check_path(fs_root, node_path)
545 if node_kind not in (
538 if node_kind not in (
546 svn.core.svn_node_file, svn.core.svn_node_symlink):
539 svn.core.svn_node_file, svn.core.svn_node_symlink):
547 return []
540 return []
548 content = svn.core.Stream(
541 content = svn.core.Stream(
549 svn.fs.file_contents(fs_root, node_path)).read()
542 svn.fs.file_contents(fs_root, node_path)).read()
550 return content.splitlines(True)
543 return content.splitlines(True)
551
544
552
545
553 class DiffChangeEditor(svn.delta.Editor):
546 class DiffChangeEditor(svn.delta.Editor):
554 """
547 """
555 Records changes between two given revisions
548 Records changes between two given revisions
556 """
549 """
557
550
558 def __init__(self):
551 def __init__(self):
559 self.changes = []
552 self.changes = []
560
553
561 def delete_entry(self, path, revision, parent_baton, pool=None):
554 def delete_entry(self, path, revision, parent_baton, pool=None):
562 self.changes.append((path, None, 'delete'))
555 self.changes.append((path, None, 'delete'))
563
556
564 def add_file(
557 def add_file(
565 self, path, parent_baton, copyfrom_path, copyfrom_revision,
558 self, path, parent_baton, copyfrom_path, copyfrom_revision,
566 file_pool=None):
559 file_pool=None):
567 self.changes.append((path, 'file', 'add'))
560 self.changes.append((path, 'file', 'add'))
568
561
569 def open_file(self, path, parent_baton, base_revision, file_pool=None):
562 def open_file(self, path, parent_baton, base_revision, file_pool=None):
570 self.changes.append((path, 'file', 'change'))
563 self.changes.append((path, 'file', 'change'))
571
564
572
565
573 def authorization_callback_allow_all(root, path, pool):
566 def authorization_callback_allow_all(root, path, pool):
574 return True
567 return True
575
568
576
569
577 class TxnNodeProcessor(object):
570 class TxnNodeProcessor(object):
578 """
571 """
579 Utility to process the change of one node within a transaction root.
572 Utility to process the change of one node within a transaction root.
580
573
581 It encapsulates the knowledge of how to add, update or remove
574 It encapsulates the knowledge of how to add, update or remove
582 a node for a given transaction root. The purpose is to support the method
575 a node for a given transaction root. The purpose is to support the method
583 `SvnRemote.commit`.
576 `SvnRemote.commit`.
584 """
577 """
585
578
586 def __init__(self, node, txn_root):
579 def __init__(self, node, txn_root):
587 assert isinstance(node['path'], str)
580 assert isinstance(node['path'], str)
588
581
589 self.node = node
582 self.node = node
590 self.txn_root = txn_root
583 self.txn_root = txn_root
591
584
592 def update(self):
585 def update(self):
593 self._ensure_parent_dirs()
586 self._ensure_parent_dirs()
594 self._add_file_if_node_does_not_exist()
587 self._add_file_if_node_does_not_exist()
595 self._update_file_content()
588 self._update_file_content()
596 self._update_file_properties()
589 self._update_file_properties()
597
590
598 def remove(self):
591 def remove(self):
599 svn.fs.delete(self.txn_root, self.node['path'])
592 svn.fs.delete(self.txn_root, self.node['path'])
600 # TODO: Clean up directory if empty
593 # TODO: Clean up directory if empty
601
594
602 def _ensure_parent_dirs(self):
595 def _ensure_parent_dirs(self):
603 curdir = vcspath.dirname(self.node['path'])
596 curdir = vcspath.dirname(self.node['path'])
604 dirs_to_create = []
597 dirs_to_create = []
605 while not self._svn_path_exists(curdir):
598 while not self._svn_path_exists(curdir):
606 dirs_to_create.append(curdir)
599 dirs_to_create.append(curdir)
607 curdir = vcspath.dirname(curdir)
600 curdir = vcspath.dirname(curdir)
608
601
609 for curdir in reversed(dirs_to_create):
602 for curdir in reversed(dirs_to_create):
610 log.debug('Creating missing directory "%s"', curdir)
603 log.debug('Creating missing directory "%s"', curdir)
611 svn.fs.make_dir(self.txn_root, curdir)
604 svn.fs.make_dir(self.txn_root, curdir)
612
605
613 def _svn_path_exists(self, path):
606 def _svn_path_exists(self, path):
614 path_status = svn.fs.check_path(self.txn_root, path)
607 path_status = svn.fs.check_path(self.txn_root, path)
615 return path_status != svn.core.svn_node_none
608 return path_status != svn.core.svn_node_none
616
609
617 def _add_file_if_node_does_not_exist(self):
610 def _add_file_if_node_does_not_exist(self):
618 kind = svn.fs.check_path(self.txn_root, self.node['path'])
611 kind = svn.fs.check_path(self.txn_root, self.node['path'])
619 if kind == svn.core.svn_node_none:
612 if kind == svn.core.svn_node_none:
620 svn.fs.make_file(self.txn_root, self.node['path'])
613 svn.fs.make_file(self.txn_root, self.node['path'])
621
614
622 def _update_file_content(self):
615 def _update_file_content(self):
623 assert isinstance(self.node['content'], str)
616 assert isinstance(self.node['content'], str)
624 handler, baton = svn.fs.apply_textdelta(
617 handler, baton = svn.fs.apply_textdelta(
625 self.txn_root, self.node['path'], None, None)
618 self.txn_root, self.node['path'], None, None)
626 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
619 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
627
620
628 def _update_file_properties(self):
621 def _update_file_properties(self):
629 properties = self.node.get('properties', {})
622 properties = self.node.get('properties', {})
630 for key, value in properties.iteritems():
623 for key, value in properties.iteritems():
631 svn.fs.change_node_prop(
624 svn.fs.change_node_prop(
632 self.txn_root, self.node['path'], key, value)
625 self.txn_root, self.node['path'], key, value)
633
626
634
627
635 def apr_time_t(timestamp):
628 def apr_time_t(timestamp):
636 """
629 """
637 Convert a Python timestamp into APR timestamp type apr_time_t
630 Convert a Python timestamp into APR timestamp type apr_time_t
638 """
631 """
639 return timestamp * 1E6
632 return timestamp * 1E6
640
633
641
634
642 def svn_opt_revision_value_t(num):
635 def svn_opt_revision_value_t(num):
643 """
636 """
644 Put `num` into a `svn_opt_revision_value_t` structure.
637 Put `num` into a `svn_opt_revision_value_t` structure.
645 """
638 """
646 value = svn.core.svn_opt_revision_value_t()
639 value = svn.core.svn_opt_revision_value_t()
647 value.number = num
640 value.number = num
648 revision = svn.core.svn_opt_revision_t()
641 revision = svn.core.svn_opt_revision_t()
649 revision.kind = svn.core.svn_opt_revision_number
642 revision.kind = svn.core.svn_opt_revision_number
650 revision.value = value
643 revision.value = value
651 return revision
644 return revision
@@ -1,57 +1,72 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 def safe_int(val, default=None):
20 """
21 Returns int() of val if val is not convertable to int use default
22 instead
19
23
20 # TODO: johbo: That's a copy from rhodecode
24 :param val:
25 :param default:
26 """
27
28 try:
29 val = int(val)
30 except (ValueError, TypeError):
31 val = default
32
33 return val
34
35
21 def safe_str(unicode_, to_encoding=['utf8']):
36 def safe_str(unicode_, to_encoding=['utf8']):
22 """
37 """
23 safe str function. Does few trick to turn unicode_ into string
38 safe str function. Does few trick to turn unicode_ into string
24
39
25 In case of UnicodeEncodeError, we try to return it with encoding detected
40 In case of UnicodeEncodeError, we try to return it with encoding detected
26 by chardet library if it fails fallback to string with errors replaced
41 by chardet library if it fails fallback to string with errors replaced
27
42
28 :param unicode_: unicode to encode
43 :param unicode_: unicode to encode
29 :rtype: str
44 :rtype: str
30 :returns: str object
45 :returns: str object
31 """
46 """
32
47
33 # if it's not basestr cast to str
48 # if it's not basestr cast to str
34 if not isinstance(unicode_, basestring):
49 if not isinstance(unicode_, basestring):
35 return str(unicode_)
50 return str(unicode_)
36
51
37 if isinstance(unicode_, str):
52 if isinstance(unicode_, str):
38 return unicode_
53 return unicode_
39
54
40 if not isinstance(to_encoding, (list, tuple)):
55 if not isinstance(to_encoding, (list, tuple)):
41 to_encoding = [to_encoding]
56 to_encoding = [to_encoding]
42
57
43 for enc in to_encoding:
58 for enc in to_encoding:
44 try:
59 try:
45 return unicode_.encode(enc)
60 return unicode_.encode(enc)
46 except UnicodeEncodeError:
61 except UnicodeEncodeError:
47 pass
62 pass
48
63
49 try:
64 try:
50 import chardet
65 import chardet
51 encoding = chardet.detect(unicode_)['encoding']
66 encoding = chardet.detect(unicode_)['encoding']
52 if encoding is None:
67 if encoding is None:
53 raise UnicodeEncodeError()
68 raise UnicodeEncodeError()
54
69
55 return unicode_.encode(encoding)
70 return unicode_.encode(encoding)
56 except (ImportError, UnicodeEncodeError):
71 except (ImportError, UnicodeEncodeError):
57 return unicode_.encode(to_encoding[0], 'replace')
72 return unicode_.encode(to_encoding[0], 'replace')
General Comments 0
You need to be logged in to leave comments. Login now