##// END OF EJS Templates
code: update copyrights to 2020
marcink -
r850:cbc05af2 default
parent child Browse files
Show More
@@ -1,28 +1,28 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import pkgutil
19 19
20 20
21 21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
22 22
23 23 # link to config for pyramid
24 24 CONFIG = {}
25 25
26 26 # Populated with the settings dictionary from application init in
27 27 #
28 28 PYRAMID_SETTINGS = {}
@@ -1,76 +1,76 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 from vcsserver.lib.rc_cache import region_meta
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class RepoFactory(object):
28 28 """
29 29 Utility to create instances of repository
30 30
31 31 It provides internal caching of the `repo` object based on
32 32 the :term:`call context`.
33 33 """
34 34 repo_type = None
35 35
36 36 def __init__(self):
37 37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 38
39 39 def _create_config(self, path, config):
40 40 config = {}
41 41 return config
42 42
43 43 def _create_repo(self, wire, create):
44 44 raise NotImplementedError()
45 45
46 46 def repo(self, wire, create=False):
47 47 raise NotImplementedError()
48 48
49 49
50 50 def obfuscate_qs(query_string):
51 51 if query_string is None:
52 52 return None
53 53
54 54 parsed = []
55 55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
56 56 if k in ['auth_token', 'api_key']:
57 57 v = "*****"
58 58 parsed.append((k, v))
59 59
60 60 return '&'.join('{}{}'.format(
61 61 k, '={}'.format(v) if v else '') for k, v in parsed)
62 62
63 63
64 64 def raise_from_original(new_type):
65 65 """
66 66 Raise a new exception type with original args and traceback.
67 67 """
68 68 exc_type, exc_value, exc_traceback = sys.exc_info()
69 69 new_exc = new_type(*exc_value.args)
70 70 # store the original traceback into the new exc
71 71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72 72
73 73 try:
74 74 raise new_exc, None, exc_traceback
75 75 finally:
76 76 del exc_traceback
@@ -1,121 +1,121 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 28
29 29
30 30 def _make_exception(kind, org_exc, *args):
31 31 """
32 32 Prepares a base `Exception` instance to be sent over the wire.
33 33
34 34 To give our caller a hint what this is about, it will attach an attribute
35 35 `_vcs_kind` to the exception.
36 36 """
37 37 exc = Exception(*args)
38 38 exc._vcs_kind = kind
39 39 exc._org_exc = org_exc
40 40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 41 return exc
42 42
43 43
44 44 def AbortException(org_exc=None):
45 45 def _make_exception_wrapper(*args):
46 46 return _make_exception('abort', org_exc, *args)
47 47 return _make_exception_wrapper
48 48
49 49
50 50 def ArchiveException(org_exc=None):
51 51 def _make_exception_wrapper(*args):
52 52 return _make_exception('archive', org_exc, *args)
53 53 return _make_exception_wrapper
54 54
55 55
56 56 def LookupException(org_exc=None):
57 57 def _make_exception_wrapper(*args):
58 58 return _make_exception('lookup', org_exc, *args)
59 59 return _make_exception_wrapper
60 60
61 61
62 62 def VcsException(org_exc=None):
63 63 def _make_exception_wrapper(*args):
64 64 return _make_exception('error', org_exc, *args)
65 65 return _make_exception_wrapper
66 66
67 67
68 68 def RepositoryLockedException(org_exc=None):
69 69 def _make_exception_wrapper(*args):
70 70 return _make_exception('repo_locked', org_exc, *args)
71 71 return _make_exception_wrapper
72 72
73 73
74 74 def RepositoryBranchProtectedException(org_exc=None):
75 75 def _make_exception_wrapper(*args):
76 76 return _make_exception('repo_branch_protected', org_exc, *args)
77 77 return _make_exception_wrapper
78 78
79 79
80 80 def RequirementException(org_exc=None):
81 81 def _make_exception_wrapper(*args):
82 82 return _make_exception('requirement', org_exc, *args)
83 83 return _make_exception_wrapper
84 84
85 85
86 86 def UnhandledException(org_exc=None):
87 87 def _make_exception_wrapper(*args):
88 88 return _make_exception('unhandled', org_exc, *args)
89 89 return _make_exception_wrapper
90 90
91 91
92 92 def URLError(org_exc=None):
93 93 def _make_exception_wrapper(*args):
94 94 return _make_exception('url_error', org_exc, *args)
95 95 return _make_exception_wrapper
96 96
97 97
98 98 def SubrepoMergeException(org_exc=None):
99 99 def _make_exception_wrapper(*args):
100 100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 101 return _make_exception_wrapper
102 102
103 103
104 104 class HTTPRepoLocked(HTTPLocked):
105 105 """
106 106 Subclass of HTTPLocked response that allows to set the title and status
107 107 code via constructor arguments.
108 108 """
109 109 def __init__(self, title, status_code=None, **kwargs):
110 110 self.code = status_code or HTTPLocked.code
111 111 self.title = title
112 112 super(HTTPRepoLocked, self).__init__(**kwargs)
113 113
114 114
115 115 class HTTPRepoBranchProtected(HTTPForbidden):
116 116 def __init__(self, *args, **kwargs):
117 117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118 118
119 119
120 120 class RefNotFoundException(KeyError):
121 121 pass
@@ -1,1192 +1,1192 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from dulwich import index, objects
33 33 from dulwich.client import HttpGitClient, LocalGitClient
34 34 from dulwich.errors import (
35 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 36 MissingCommitError, ObjectMissing, HangupException,
37 37 UnexpectedCommandError)
38 38 from dulwich.repo import Repo as DulwichRepo
39 39 from dulwich.server import update_server_info
40 40
41 41 from vcsserver import exceptions, settings, subprocessio
42 42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 43 from vcsserver.base import RepoFactory, obfuscate_qs
44 44 from vcsserver.hgcompat import (
45 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 46 from vcsserver.git_lfs.lib import LFSOidStore
47 47 from vcsserver.vcs_base import RemoteBase
48 48
49 49 DIR_STAT = stat.S_IFDIR
50 50 FILE_MODE = stat.S_IFMT
51 51 GIT_LINK = objects.S_IFGITLINK
52 52 PEELED_REF_MARKER = '^{}'
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 def str_to_dulwich(value):
59 59 """
60 60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 61 """
62 62 return value.decode(settings.WIRE_ENCODING)
63 63
64 64
65 65 def reraise_safe_exceptions(func):
66 66 """Converts Dulwich exceptions to something neutral."""
67 67
68 68 @wraps(func)
69 69 def wrapper(*args, **kwargs):
70 70 try:
71 71 return func(*args, **kwargs)
72 72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 73 exc = exceptions.LookupException(org_exc=e)
74 74 raise exc(safe_str(e))
75 75 except (HangupException, UnexpectedCommandError) as e:
76 76 exc = exceptions.VcsException(org_exc=e)
77 77 raise exc(safe_str(e))
78 78 except Exception as e:
79 79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 80 # (KeyError on empty repos), we cannot track this and catch all
81 81 # exceptions, it's an exceptions from other handlers
82 82 #if not hasattr(e, '_vcs_kind'):
83 83 #log.exception("Unhandled exception in git remote call")
84 84 #raise_from_original(exceptions.UnhandledException)
85 85 raise
86 86 return wrapper
87 87
88 88
89 89 class Repo(DulwichRepo):
90 90 """
91 91 A wrapper for dulwich Repo class.
92 92
93 93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 94 "Too many open files" error. We need to close all opened file descriptors
95 95 once the repo object is destroyed.
96 96 """
97 97 def __del__(self):
98 98 if hasattr(self, 'object_store'):
99 99 self.close()
100 100
101 101
102 102 class Repository(LibGit2Repo):
103 103
104 104 def __enter__(self):
105 105 return self
106 106
107 107 def __exit__(self, exc_type, exc_val, exc_tb):
108 108 self.free()
109 109
110 110
111 111 class GitFactory(RepoFactory):
112 112 repo_type = 'git'
113 113
114 114 def _create_repo(self, wire, create, use_libgit2=False):
115 115 if use_libgit2:
116 116 return Repository(wire['path'])
117 117 else:
118 118 repo_path = str_to_dulwich(wire['path'])
119 119 return Repo(repo_path)
120 120
121 121 def repo(self, wire, create=False, use_libgit2=False):
122 122 """
123 123 Get a repository instance for the given path.
124 124 """
125 125 return self._create_repo(wire, create, use_libgit2)
126 126
127 127 def repo_libgit2(self, wire):
128 128 return self.repo(wire, use_libgit2=True)
129 129
130 130
131 131 class GitRemote(RemoteBase):
132 132
133 133 def __init__(self, factory):
134 134 self._factory = factory
135 135 self._bulk_methods = {
136 136 "date": self.date,
137 137 "author": self.author,
138 138 "branch": self.branch,
139 139 "message": self.message,
140 140 "parents": self.parents,
141 141 "_commit": self.revision,
142 142 }
143 143
144 144 def _wire_to_config(self, wire):
145 145 if 'config' in wire:
146 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 147 return {}
148 148
149 149 def _remote_conf(self, config):
150 150 params = [
151 151 '-c', 'core.askpass=""',
152 152 ]
153 153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 154 if ssl_cert_dir:
155 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 156 return params
157 157
158 158 @reraise_safe_exceptions
159 159 def discover_git_version(self):
160 160 stdout, _ = self.run_git_command(
161 161 {}, ['--version'], _bare=True, _safe=True)
162 162 prefix = 'git version'
163 163 if stdout.startswith(prefix):
164 164 stdout = stdout[len(prefix):]
165 165 return stdout.strip()
166 166
167 167 @reraise_safe_exceptions
168 168 def is_empty(self, wire):
169 169 repo_init = self._factory.repo_libgit2(wire)
170 170 with repo_init as repo:
171 171
172 172 try:
173 173 has_head = repo.head.name
174 174 if has_head:
175 175 return False
176 176
177 177 # NOTE(marcink): check again using more expensive method
178 178 return repo.is_empty
179 179 except Exception:
180 180 pass
181 181
182 182 return True
183 183
184 184 @reraise_safe_exceptions
185 185 def assert_correct_path(self, wire):
186 186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 188 def _assert_correct_path(_context_uid, _repo_id):
189 189 try:
190 190 repo_init = self._factory.repo_libgit2(wire)
191 191 with repo_init as repo:
192 192 pass
193 193 except pygit2.GitError:
194 194 path = wire.get('path')
195 195 tb = traceback.format_exc()
196 196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 197 return False
198 198
199 199 return True
200 200 return _assert_correct_path(context_uid, repo_id)
201 201
202 202 @reraise_safe_exceptions
203 203 def bare(self, wire):
204 204 repo_init = self._factory.repo_libgit2(wire)
205 205 with repo_init as repo:
206 206 return repo.is_bare
207 207
208 208 @reraise_safe_exceptions
209 209 def blob_as_pretty_string(self, wire, sha):
210 210 repo_init = self._factory.repo_libgit2(wire)
211 211 with repo_init as repo:
212 212 blob_obj = repo[sha]
213 213 blob = blob_obj.data
214 214 return blob
215 215
216 216 @reraise_safe_exceptions
217 217 def blob_raw_length(self, wire, sha):
218 218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 220 def _blob_raw_length(_repo_id, _sha):
221 221
222 222 repo_init = self._factory.repo_libgit2(wire)
223 223 with repo_init as repo:
224 224 blob = repo[sha]
225 225 return blob.size
226 226
227 227 return _blob_raw_length(repo_id, sha)
228 228
229 229 def _parse_lfs_pointer(self, raw_content):
230 230
231 231 spec_string = 'version https://git-lfs.github.com/spec'
232 232 if raw_content and raw_content.startswith(spec_string):
233 233 pattern = re.compile(r"""
234 234 (?:\n)?
235 235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 237 ^size[ ](?P<oid_size>[0-9]+)\n
238 238 (?:\n)?
239 239 """, re.VERBOSE | re.MULTILINE)
240 240 match = pattern.match(raw_content)
241 241 if match:
242 242 return match.groupdict()
243 243
244 244 return {}
245 245
246 246 @reraise_safe_exceptions
247 247 def is_large_file(self, wire, commit_id):
248 248 cache_on, context_uid, repo_id = self._cache_on(wire)
249 249
250 250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 251 def _is_large_file(_repo_id, _sha):
252 252 repo_init = self._factory.repo_libgit2(wire)
253 253 with repo_init as repo:
254 254 blob = repo[commit_id]
255 255 if blob.is_binary:
256 256 return {}
257 257
258 258 return self._parse_lfs_pointer(blob.data)
259 259
260 260 return _is_large_file(repo_id, commit_id)
261 261
262 262 @reraise_safe_exceptions
263 263 def is_binary(self, wire, tree_id):
264 264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 265
266 266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def in_largefiles_store(self, wire, oid):
277 277 conf = self._wire_to_config(wire)
278 278 repo_init = self._factory.repo_libgit2(wire)
279 279 with repo_init as repo:
280 280 repo_name = repo.path
281 281
282 282 store_location = conf.get('vcs_git_lfs_store_location')
283 283 if store_location:
284 284
285 285 store = LFSOidStore(
286 286 oid=oid, repo=repo_name, store_location=store_location)
287 287 return store.has_oid()
288 288
289 289 return False
290 290
291 291 @reraise_safe_exceptions
292 292 def store_path(self, wire, oid):
293 293 conf = self._wire_to_config(wire)
294 294 repo_init = self._factory.repo_libgit2(wire)
295 295 with repo_init as repo:
296 296 repo_name = repo.path
297 297
298 298 store_location = conf.get('vcs_git_lfs_store_location')
299 299 if store_location:
300 300 store = LFSOidStore(
301 301 oid=oid, repo=repo_name, store_location=store_location)
302 302 return store.oid_path
303 303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304 304
305 305 @reraise_safe_exceptions
306 306 def bulk_request(self, wire, rev, pre_load):
307 307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 309 def _bulk_request(_repo_id, _rev, _pre_load):
310 310 result = {}
311 311 for attr in pre_load:
312 312 try:
313 313 method = self._bulk_methods[attr]
314 314 args = [wire, rev]
315 315 result[attr] = method(*args)
316 316 except KeyError as e:
317 317 raise exceptions.VcsException(e)(
318 318 "Unknown bulk attribute: %s" % attr)
319 319 return result
320 320
321 321 return _bulk_request(repo_id, rev, sorted(pre_load))
322 322
323 323 def _build_opener(self, url):
324 324 handlers = []
325 325 url_obj = url_parser(url)
326 326 _, authinfo = url_obj.authinfo()
327 327
328 328 if authinfo:
329 329 # create a password manager
330 330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 331 passmgr.add_password(*authinfo)
332 332
333 333 handlers.extend((httpbasicauthhandler(passmgr),
334 334 httpdigestauthhandler(passmgr)))
335 335
336 336 return urllib2.build_opener(*handlers)
337 337
338 338 def _type_id_to_name(self, type_id):
339 339 return {
340 340 1: b'commit',
341 341 2: b'tree',
342 342 3: b'blob',
343 343 4: b'tag'
344 344 }[type_id]
345 345
346 346 @reraise_safe_exceptions
347 347 def check_url(self, url, config):
348 348 url_obj = url_parser(url)
349 349 test_uri, _ = url_obj.authinfo()
350 350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 351 url_obj.query = obfuscate_qs(url_obj.query)
352 352 cleaned_uri = str(url_obj)
353 353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354 354
355 355 if not test_uri.endswith('info/refs'):
356 356 test_uri = test_uri.rstrip('/') + '/info/refs'
357 357
358 358 o = self._build_opener(url)
359 359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360 360
361 361 q = {"service": 'git-upload-pack'}
362 362 qs = '?%s' % urllib.urlencode(q)
363 363 cu = "%s%s" % (test_uri, qs)
364 364 req = urllib2.Request(cu, None, {})
365 365
366 366 try:
367 367 log.debug("Trying to open URL %s", cleaned_uri)
368 368 resp = o.open(req)
369 369 if resp.code != 200:
370 370 raise exceptions.URLError()('Return Code is not 200')
371 371 except Exception as e:
372 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 373 # means it cannot be cloned
374 374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375 375
376 376 # now detect if it's proper git repo
377 377 gitdata = resp.read()
378 378 if 'service=git-upload-pack' in gitdata:
379 379 pass
380 380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 381 # old style git can return some other format !
382 382 pass
383 383 else:
384 384 raise exceptions.URLError()(
385 385 "url [%s] does not look like an git" % (cleaned_uri,))
386 386
387 387 return True
388 388
389 389 @reraise_safe_exceptions
390 390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 392 remote_refs = self.pull(wire, url, apply_refs=False)
393 393 repo = self._factory.repo(wire)
394 394 if isinstance(valid_refs, list):
395 395 valid_refs = tuple(valid_refs)
396 396
397 397 for k in remote_refs:
398 398 # only parse heads/tags and skip so called deferred tags
399 399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 400 repo[k] = remote_refs[k]
401 401
402 402 if update_after_clone:
403 403 # we want to checkout HEAD
404 404 repo["HEAD"] = remote_refs["HEAD"]
405 405 index.build_index_from_tree(repo.path, repo.index_path(),
406 406 repo.object_store, repo["HEAD"].tree)
407 407
408 408 @reraise_safe_exceptions
409 409 def branch(self, wire, commit_id):
410 410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 412 def _branch(_context_uid, _repo_id, _commit_id):
413 413 regex = re.compile('^refs/heads')
414 414
415 415 def filter_with(ref):
416 416 return regex.match(ref[0]) and ref[1] == _commit_id
417 417
418 418 branches = filter(filter_with, self.get_refs(wire).items())
419 419 return [x[0].split('refs/heads/')[-1] for x in branches]
420 420
421 421 return _branch(context_uid, repo_id, commit_id)
422 422
423 423 @reraise_safe_exceptions
424 424 def commit_branches(self, wire, commit_id):
425 425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 428 repo_init = self._factory.repo_libgit2(wire)
429 429 with repo_init as repo:
430 430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 431 return branches
432 432
433 433 return _commit_branches(context_uid, repo_id, commit_id)
434 434
435 435 @reraise_safe_exceptions
436 436 def add_object(self, wire, content):
437 437 repo_init = self._factory.repo_libgit2(wire)
438 438 with repo_init as repo:
439 439 blob = objects.Blob()
440 440 blob.set_raw_string(content)
441 441 repo.object_store.add_object(blob)
442 442 return blob.id
443 443
444 444 # TODO: this is quite complex, check if that can be simplified
445 445 @reraise_safe_exceptions
446 446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 447 repo = self._factory.repo(wire)
448 448 object_store = repo.object_store
449 449
450 450 # Create tree and populates it with blobs
451 451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452 452
453 453 for node in updated:
454 454 # Compute subdirs if needed
455 455 dirpath, nodename = vcspath.split(node['path'])
456 456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 457 parent = commit_tree
458 458 ancestors = [('', parent)]
459 459
460 460 # Tries to dig for the deepest existing tree
461 461 while dirnames:
462 462 curdir = dirnames.pop(0)
463 463 try:
464 464 dir_id = parent[curdir][1]
465 465 except KeyError:
466 466 # put curdir back into dirnames and stops
467 467 dirnames.insert(0, curdir)
468 468 break
469 469 else:
470 470 # If found, updates parent
471 471 parent = repo[dir_id]
472 472 ancestors.append((curdir, parent))
473 473 # Now parent is deepest existing tree and we need to create
474 474 # subtrees for dirnames (in reverse order)
475 475 # [this only applies for nodes from added]
476 476 new_trees = []
477 477
478 478 blob = objects.Blob.from_string(node['content'])
479 479
480 480 if dirnames:
481 481 # If there are trees which should be created we need to build
482 482 # them now (in reverse order)
483 483 reversed_dirnames = list(reversed(dirnames))
484 484 curtree = objects.Tree()
485 485 curtree[node['node_path']] = node['mode'], blob.id
486 486 new_trees.append(curtree)
487 487 for dirname in reversed_dirnames[:-1]:
488 488 newtree = objects.Tree()
489 489 newtree[dirname] = (DIR_STAT, curtree.id)
490 490 new_trees.append(newtree)
491 491 curtree = newtree
492 492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 493 else:
494 494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495 495
496 496 new_trees.append(parent)
497 497 # Update ancestors
498 498 reversed_ancestors = reversed(
499 499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 500 for parent, tree, path in reversed_ancestors:
501 501 parent[path] = (DIR_STAT, tree.id)
502 502 object_store.add_object(tree)
503 503
504 504 object_store.add_object(blob)
505 505 for tree in new_trees:
506 506 object_store.add_object(tree)
507 507
508 508 for node_path in removed:
509 509 paths = node_path.split('/')
510 510 tree = commit_tree
511 511 trees = [tree]
512 512 # Traverse deep into the forest...
513 513 for path in paths:
514 514 try:
515 515 obj = repo[tree[path][1]]
516 516 if isinstance(obj, objects.Tree):
517 517 trees.append(obj)
518 518 tree = obj
519 519 except KeyError:
520 520 break
521 521 # Cut down the blob and all rotten trees on the way back...
522 522 for path, tree in reversed(zip(paths, trees)):
523 523 del tree[path]
524 524 if tree:
525 525 # This tree still has elements - don't remove it or any
526 526 # of it's parents
527 527 break
528 528
529 529 object_store.add_object(commit_tree)
530 530
531 531 # Create commit
532 532 commit = objects.Commit()
533 533 commit.tree = commit_tree.id
534 534 for k, v in commit_data.iteritems():
535 535 setattr(commit, k, v)
536 536 object_store.add_object(commit)
537 537
538 538 self.create_branch(wire, branch, commit.id)
539 539
540 540 # dulwich set-ref
541 541 ref = 'refs/heads/%s' % branch
542 542 repo.refs[ref] = commit.id
543 543
544 544 return commit.id
545 545
546 546 @reraise_safe_exceptions
547 547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 548 if url != 'default' and '://' not in url:
549 549 client = LocalGitClient(url)
550 550 else:
551 551 url_obj = url_parser(url)
552 552 o = self._build_opener(url)
553 553 url, _ = url_obj.authinfo()
554 554 client = HttpGitClient(base_url=url, opener=o)
555 555 repo = self._factory.repo(wire)
556 556
557 557 determine_wants = repo.object_store.determine_wants_all
558 558 if refs:
559 559 def determine_wants_requested(references):
560 560 return [references[r] for r in references if r in refs]
561 561 determine_wants = determine_wants_requested
562 562
563 563 try:
564 564 remote_refs = client.fetch(
565 565 path=url, target=repo, determine_wants=determine_wants)
566 566 except NotGitRepository as e:
567 567 log.warning(
568 568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 569 # Exception can contain unicode which we convert
570 570 raise exceptions.AbortException(e)(repr(e))
571 571
572 572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 573 # refs filtered by `determine_wants` function. We need to filter result
574 574 # as well
575 575 if refs:
576 576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577 577
578 578 if apply_refs:
579 579 # TODO: johbo: Needs proper test coverage with a git repository
580 580 # that contains a tag object, so that we would end up with
581 581 # a peeled ref at this point.
582 582 for k in remote_refs:
583 583 if k.endswith(PEELED_REF_MARKER):
584 584 log.debug("Skipping peeled reference %s", k)
585 585 continue
586 586 repo[k] = remote_refs[k]
587 587
588 588 if refs and not update_after:
589 589 # mikhail: explicitly set the head to the last ref.
590 590 repo['HEAD'] = remote_refs[refs[-1]]
591 591
592 592 if update_after:
593 593 # we want to checkout HEAD
594 594 repo["HEAD"] = remote_refs["HEAD"]
595 595 index.build_index_from_tree(repo.path, repo.index_path(),
596 596 repo.object_store, repo["HEAD"].tree)
597 597 return remote_refs
598 598
599 599 @reraise_safe_exceptions
600 600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 601 repo = self._factory.repo(wire)
602 602 if refs and not isinstance(refs, (list, tuple)):
603 603 refs = [refs]
604 604
605 605 config = self._wire_to_config(wire)
606 606 # get all remote refs we'll use to fetch later
607 607 cmd = ['ls-remote']
608 608 if not all_refs:
609 609 cmd += ['--heads', '--tags']
610 610 cmd += [url]
611 611 output, __ = self.run_git_command(
612 612 wire, cmd, fail_on_stderr=False,
613 613 _copts=self._remote_conf(config),
614 614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615 615
616 616 remote_refs = collections.OrderedDict()
617 617 fetch_refs = []
618 618
619 619 for ref_line in output.splitlines():
620 620 sha, ref = ref_line.split('\t')
621 621 sha = sha.strip()
622 622 if ref in remote_refs:
623 623 # duplicate, skip
624 624 continue
625 625 if ref.endswith(PEELED_REF_MARKER):
626 626 log.debug("Skipping peeled reference %s", ref)
627 627 continue
628 628 # don't sync HEAD
629 629 if ref in ['HEAD']:
630 630 continue
631 631
632 632 remote_refs[ref] = sha
633 633
634 634 if refs and sha in refs:
635 635 # we filter fetch using our specified refs
636 636 fetch_refs.append('{}:{}'.format(ref, ref))
637 637 elif not refs:
638 638 fetch_refs.append('{}:{}'.format(ref, ref))
639 639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640 640
641 641 if fetch_refs:
642 642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 643 fetch_refs_chunks = list(chunk)
644 644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 645 _out, _err = self.run_git_command(
646 646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 647 fail_on_stderr=False,
648 648 _copts=self._remote_conf(config),
649 649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650 650
651 651 return remote_refs
652 652
653 653 @reraise_safe_exceptions
654 654 def sync_push(self, wire, url, refs=None):
655 655 if not self.check_url(url, wire):
656 656 return
657 657 config = self._wire_to_config(wire)
658 658 self._factory.repo(wire)
659 659 self.run_git_command(
660 660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 661 _copts=self._remote_conf(config),
662 662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 663
664 664 @reraise_safe_exceptions
665 665 def get_remote_refs(self, wire, url):
666 666 repo = Repo(url)
667 667 return repo.get_refs()
668 668
669 669 @reraise_safe_exceptions
670 670 def get_description(self, wire):
671 671 repo = self._factory.repo(wire)
672 672 return repo.get_description()
673 673
674 674 @reraise_safe_exceptions
675 675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 676 repo = self._factory.repo(wire)
677 677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678 678
679 679 wire_remote = wire.copy()
680 680 wire_remote['path'] = path2
681 681 repo_remote = self._factory.repo(wire_remote)
682 682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683 683
684 684 revs = [
685 685 x.commit.id
686 686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 687 return revs
688 688
689 689 @reraise_safe_exceptions
690 690 def get_object(self, wire, sha, maybe_unreachable=False):
691 691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 693 def _get_object(_context_uid, _repo_id, _sha):
694 694 repo_init = self._factory.repo_libgit2(wire)
695 695 with repo_init as repo:
696 696
697 697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 698 try:
699 699 commit = repo.revparse_single(sha)
700 700 except KeyError:
701 701 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 702 # here, we instead give something more explicit
703 703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 704 raise exceptions.LookupException(e)(missing_commit_err)
705 705 except ValueError as e:
706 706 raise exceptions.LookupException(e)(missing_commit_err)
707 707
708 708 is_tag = False
709 709 if isinstance(commit, pygit2.Tag):
710 710 commit = repo.get(commit.target)
711 711 is_tag = True
712 712
713 713 check_dangling = True
714 714 if is_tag:
715 715 check_dangling = False
716 716
717 717 if check_dangling and maybe_unreachable:
718 718 check_dangling = False
719 719
720 720 # we used a reference and it parsed means we're not having a dangling commit
721 721 if sha != commit.hex:
722 722 check_dangling = False
723 723
724 724 if check_dangling:
725 725 # check for dangling commit
726 726 for branch in repo.branches.with_commit(commit.hex):
727 727 if branch:
728 728 break
729 729 else:
730 730 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 731 # here, we instead give something more explicit
732 732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 733 raise exceptions.LookupException(e)(missing_commit_err)
734 734
735 735 commit_id = commit.hex
736 736 type_id = commit.type
737 737
738 738 return {
739 739 'id': commit_id,
740 740 'type': self._type_id_to_name(type_id),
741 741 'commit_id': commit_id,
742 742 'idx': 0
743 743 }
744 744
745 745 return _get_object(context_uid, repo_id, sha)
746 746
747 747 @reraise_safe_exceptions
748 748 def get_refs(self, wire):
749 749 cache_on, context_uid, repo_id = self._cache_on(wire)
750 750 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 751 def _get_refs(_context_uid, _repo_id):
752 752
753 753 repo_init = self._factory.repo_libgit2(wire)
754 754 with repo_init as repo:
755 755 regex = re.compile('^refs/(heads|tags)/')
756 756 return {x.name: x.target.hex for x in
757 757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
758 758
759 759 return _get_refs(context_uid, repo_id)
760 760
761 761 @reraise_safe_exceptions
762 762 def get_branch_pointers(self, wire):
763 763 cache_on, context_uid, repo_id = self._cache_on(wire)
764 764 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 765 def _get_branch_pointers(_context_uid, _repo_id):
766 766
767 767 repo_init = self._factory.repo_libgit2(wire)
768 768 regex = re.compile('^refs/heads')
769 769 with repo_init as repo:
770 770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
771 771 return {x.target.hex: x.shorthand for x in branches}
772 772
773 773 return _get_branch_pointers(context_uid, repo_id)
774 774
775 775 @reraise_safe_exceptions
776 776 def head(self, wire, show_exc=True):
777 777 cache_on, context_uid, repo_id = self._cache_on(wire)
778 778 @self.region.conditional_cache_on_arguments(condition=cache_on)
779 779 def _head(_context_uid, _repo_id, _show_exc):
780 780 repo_init = self._factory.repo_libgit2(wire)
781 781 with repo_init as repo:
782 782 try:
783 783 return repo.head.peel().hex
784 784 except Exception:
785 785 if show_exc:
786 786 raise
787 787 return _head(context_uid, repo_id, show_exc)
788 788
789 789 @reraise_safe_exceptions
790 790 def init(self, wire):
791 791 repo_path = str_to_dulwich(wire['path'])
792 792 self.repo = Repo.init(repo_path)
793 793
794 794 @reraise_safe_exceptions
795 795 def init_bare(self, wire):
796 796 repo_path = str_to_dulwich(wire['path'])
797 797 self.repo = Repo.init_bare(repo_path)
798 798
799 799 @reraise_safe_exceptions
800 800 def revision(self, wire, rev):
801 801
802 802 cache_on, context_uid, repo_id = self._cache_on(wire)
803 803 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 804 def _revision(_context_uid, _repo_id, _rev):
805 805 repo_init = self._factory.repo_libgit2(wire)
806 806 with repo_init as repo:
807 807 commit = repo[rev]
808 808 obj_data = {
809 809 'id': commit.id.hex,
810 810 }
811 811 # tree objects itself don't have tree_id attribute
812 812 if hasattr(commit, 'tree_id'):
813 813 obj_data['tree'] = commit.tree_id.hex
814 814
815 815 return obj_data
816 816 return _revision(context_uid, repo_id, rev)
817 817
818 818 @reraise_safe_exceptions
819 819 def date(self, wire, commit_id):
820 820 cache_on, context_uid, repo_id = self._cache_on(wire)
821 821 @self.region.conditional_cache_on_arguments(condition=cache_on)
822 822 def _date(_repo_id, _commit_id):
823 823 repo_init = self._factory.repo_libgit2(wire)
824 824 with repo_init as repo:
825 825 commit = repo[commit_id]
826 826
827 827 if hasattr(commit, 'commit_time'):
828 828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 829 else:
830 830 commit = commit.get_object()
831 831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832 832
833 833 # TODO(marcink): check dulwich difference of offset vs timezone
834 834 return [commit_time, commit_time_offset]
835 835 return _date(repo_id, commit_id)
836 836
837 837 @reraise_safe_exceptions
838 838 def author(self, wire, commit_id):
839 839 cache_on, context_uid, repo_id = self._cache_on(wire)
840 840 @self.region.conditional_cache_on_arguments(condition=cache_on)
841 841 def _author(_repo_id, _commit_id):
842 842 repo_init = self._factory.repo_libgit2(wire)
843 843 with repo_init as repo:
844 844 commit = repo[commit_id]
845 845
846 846 if hasattr(commit, 'author'):
847 847 author = commit.author
848 848 else:
849 849 author = commit.get_object().author
850 850
851 851 if author.email:
852 852 return u"{} <{}>".format(author.name, author.email)
853 853
854 854 try:
855 855 return u"{}".format(author.name)
856 856 except Exception:
857 857 return u"{}".format(safe_unicode(author.raw_name))
858 858
859 859 return _author(repo_id, commit_id)
860 860
861 861 @reraise_safe_exceptions
862 862 def message(self, wire, commit_id):
863 863 cache_on, context_uid, repo_id = self._cache_on(wire)
864 864 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 865 def _message(_repo_id, _commit_id):
866 866 repo_init = self._factory.repo_libgit2(wire)
867 867 with repo_init as repo:
868 868 commit = repo[commit_id]
869 869 return commit.message
870 870 return _message(repo_id, commit_id)
871 871
872 872 @reraise_safe_exceptions
873 873 def parents(self, wire, commit_id):
874 874 cache_on, context_uid, repo_id = self._cache_on(wire)
875 875 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 876 def _parents(_repo_id, _commit_id):
877 877 repo_init = self._factory.repo_libgit2(wire)
878 878 with repo_init as repo:
879 879 commit = repo[commit_id]
880 880 if hasattr(commit, 'parent_ids'):
881 881 parent_ids = commit.parent_ids
882 882 else:
883 883 parent_ids = commit.get_object().parent_ids
884 884
885 885 return [x.hex for x in parent_ids]
886 886 return _parents(repo_id, commit_id)
887 887
888 888 @reraise_safe_exceptions
889 889 def children(self, wire, commit_id):
890 890 cache_on, context_uid, repo_id = self._cache_on(wire)
891 891 @self.region.conditional_cache_on_arguments(condition=cache_on)
892 892 def _children(_repo_id, _commit_id):
893 893 output, __ = self.run_git_command(
894 894 wire, ['rev-list', '--all', '--children'])
895 895
896 896 child_ids = []
897 897 pat = re.compile(r'^%s' % commit_id)
898 898 for l in output.splitlines():
899 899 if pat.match(l):
900 900 found_ids = l.split(' ')[1:]
901 901 child_ids.extend(found_ids)
902 902
903 903 return child_ids
904 904 return _children(repo_id, commit_id)
905 905
906 906 @reraise_safe_exceptions
907 907 def set_refs(self, wire, key, value):
908 908 repo_init = self._factory.repo_libgit2(wire)
909 909 with repo_init as repo:
910 910 repo.references.create(key, value, force=True)
911 911
912 912 @reraise_safe_exceptions
913 913 def create_branch(self, wire, branch_name, commit_id, force=False):
914 914 repo_init = self._factory.repo_libgit2(wire)
915 915 with repo_init as repo:
916 916 commit = repo[commit_id]
917 917
918 918 if force:
919 919 repo.branches.local.create(branch_name, commit, force=force)
920 920 elif not repo.branches.get(branch_name):
921 921 # create only if that branch isn't existing
922 922 repo.branches.local.create(branch_name, commit, force=force)
923 923
924 924 @reraise_safe_exceptions
925 925 def remove_ref(self, wire, key):
926 926 repo_init = self._factory.repo_libgit2(wire)
927 927 with repo_init as repo:
928 928 repo.references.delete(key)
929 929
930 930 @reraise_safe_exceptions
931 931 def tag_remove(self, wire, tag_name):
932 932 repo_init = self._factory.repo_libgit2(wire)
933 933 with repo_init as repo:
934 934 key = 'refs/tags/{}'.format(tag_name)
935 935 repo.references.delete(key)
936 936
937 937 @reraise_safe_exceptions
938 938 def tree_changes(self, wire, source_id, target_id):
939 939 # TODO(marcink): remove this seems it's only used by tests
940 940 repo = self._factory.repo(wire)
941 941 source = repo[source_id].tree if source_id else None
942 942 target = repo[target_id].tree
943 943 result = repo.object_store.tree_changes(source, target)
944 944 return list(result)
945 945
946 946 @reraise_safe_exceptions
947 947 def tree_and_type_for_path(self, wire, commit_id, path):
948 948
949 949 cache_on, context_uid, repo_id = self._cache_on(wire)
950 950 @self.region.conditional_cache_on_arguments(condition=cache_on)
951 951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 952 repo_init = self._factory.repo_libgit2(wire)
953 953
954 954 with repo_init as repo:
955 955 commit = repo[commit_id]
956 956 try:
957 957 tree = commit.tree[path]
958 958 except KeyError:
959 959 return None, None, None
960 960
961 961 return tree.id.hex, tree.type, tree.filemode
962 962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963 963
964 964 @reraise_safe_exceptions
965 965 def tree_items(self, wire, tree_id):
966 966 cache_on, context_uid, repo_id = self._cache_on(wire)
967 967 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 968 def _tree_items(_repo_id, _tree_id):
969 969
970 970 repo_init = self._factory.repo_libgit2(wire)
971 971 with repo_init as repo:
972 972 try:
973 973 tree = repo[tree_id]
974 974 except KeyError:
975 975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976 976
977 977 result = []
978 978 for item in tree:
979 979 item_sha = item.hex
980 980 item_mode = item.filemode
981 981 item_type = item.type
982 982
983 983 if item_type == 'commit':
984 984 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 985 item_type = 'link'
986 986
987 987 result.append((item.name, item_mode, item_sha, item_type))
988 988 return result
989 989 return _tree_items(repo_id, tree_id)
990 990
991 991 @reraise_safe_exceptions
992 992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 993 """
994 994 Old version that uses subprocess to call diff
995 995 """
996 996
997 997 flags = [
998 998 '-U%s' % context, '--patch',
999 999 '--binary',
1000 1000 '--find-renames',
1001 1001 '--no-indent-heuristic',
1002 1002 # '--indent-heuristic',
1003 1003 #'--full-index',
1004 1004 #'--abbrev=40'
1005 1005 ]
1006 1006
1007 1007 if opt_ignorews:
1008 1008 flags.append('--ignore-all-space')
1009 1009
1010 1010 if commit_id_1 == self.EMPTY_COMMIT:
1011 1011 cmd = ['show'] + flags + [commit_id_2]
1012 1012 else:
1013 1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014 1014
1015 1015 if file_filter:
1016 1016 cmd.extend(['--', file_filter])
1017 1017
1018 1018 diff, __ = self.run_git_command(wire, cmd)
1019 1019 # If we used 'show' command, strip first few lines (until actual diff
1020 1020 # starts)
1021 1021 if commit_id_1 == self.EMPTY_COMMIT:
1022 1022 lines = diff.splitlines()
1023 1023 x = 0
1024 1024 for line in lines:
1025 1025 if line.startswith('diff'):
1026 1026 break
1027 1027 x += 1
1028 1028 # Append new line just like 'diff' command do
1029 1029 diff = '\n'.join(lines[x:]) + '\n'
1030 1030 return diff
1031 1031
1032 1032 @reraise_safe_exceptions
1033 1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 1034 repo_init = self._factory.repo_libgit2(wire)
1035 1035 with repo_init as repo:
1036 1036 swap = True
1037 1037 flags = 0
1038 1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039 1039
1040 1040 if opt_ignorews:
1041 1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042 1042
1043 1043 if commit_id_1 == self.EMPTY_COMMIT:
1044 1044 comm1 = repo[commit_id_2]
1045 1045 diff_obj = comm1.tree.diff_to_tree(
1046 1046 flags=flags, context_lines=context, swap=swap)
1047 1047
1048 1048 else:
1049 1049 comm1 = repo[commit_id_2]
1050 1050 comm2 = repo[commit_id_1]
1051 1051 diff_obj = comm1.tree.diff_to_tree(
1052 1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 1053 similar_flags = 0
1054 1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 1055 diff_obj.find_similar(flags=similar_flags)
1056 1056
1057 1057 if file_filter:
1058 1058 for p in diff_obj:
1059 1059 if p.delta.old_file.path == file_filter:
1060 1060 return p.patch or ''
1061 1061 # fo matching path == no diff
1062 1062 return ''
1063 1063 return diff_obj.patch or ''
1064 1064
1065 1065 @reraise_safe_exceptions
1066 1066 def node_history(self, wire, commit_id, path, limit):
1067 1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1069 1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 1070 # optimize for n==1, rev-list is much faster for that use-case
1071 1071 if limit == 1:
1072 1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 1073 else:
1074 1074 cmd = ['log']
1075 1075 if limit:
1076 1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078 1078
1079 1079 output, __ = self.run_git_command(wire, cmd)
1080 1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081 1081
1082 1082 return [x for x in commit_ids]
1083 1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084 1084
1085 1085 @reraise_safe_exceptions
1086 1086 def node_annotate(self, wire, commit_id, path):
1087 1087
1088 1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 1089 # -l ==> outputs long shas (and we need all 40 characters)
1090 1090 # --root ==> doesn't put '^' character for boundaries
1091 1091 # -r commit_id ==> blames for the given commit
1092 1092 output, __ = self.run_git_command(wire, cmd)
1093 1093
1094 1094 result = []
1095 1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 1096 line_no = i + 1
1097 1097 commit_id, line = re.split(r' ', blame_line, 1)
1098 1098 result.append((line_no, commit_id, line))
1099 1099 return result
1100 1100
1101 1101 @reraise_safe_exceptions
1102 1102 def update_server_info(self, wire):
1103 1103 repo = self._factory.repo(wire)
1104 1104 update_server_info(repo)
1105 1105
1106 1106 @reraise_safe_exceptions
1107 1107 def get_all_commit_ids(self, wire):
1108 1108
1109 1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1111 1111 def _get_all_commit_ids(_context_uid, _repo_id):
1112 1112
1113 1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 1114 try:
1115 1115 output, __ = self.run_git_command(wire, cmd)
1116 1116 return output.splitlines()
1117 1117 except Exception:
1118 1118 # Can be raised for empty repositories
1119 1119 return []
1120 1120 return _get_all_commit_ids(context_uid, repo_id)
1121 1121
1122 1122 @reraise_safe_exceptions
1123 1123 def run_git_command(self, wire, cmd, **opts):
1124 1124 path = wire.get('path', None)
1125 1125
1126 1126 if path and os.path.isdir(path):
1127 1127 opts['cwd'] = path
1128 1128
1129 1129 if '_bare' in opts:
1130 1130 _copts = []
1131 1131 del opts['_bare']
1132 1132 else:
1133 1133 _copts = ['-c', 'core.quotepath=false', ]
1134 1134 safe_call = False
1135 1135 if '_safe' in opts:
1136 1136 # no exc on failure
1137 1137 del opts['_safe']
1138 1138 safe_call = True
1139 1139
1140 1140 if '_copts' in opts:
1141 1141 _copts.extend(opts['_copts'] or [])
1142 1142 del opts['_copts']
1143 1143
1144 1144 gitenv = os.environ.copy()
1145 1145 gitenv.update(opts.pop('extra_env', {}))
1146 1146 # need to clean fix GIT_DIR !
1147 1147 if 'GIT_DIR' in gitenv:
1148 1148 del gitenv['GIT_DIR']
1149 1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151 1151
1152 1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 1153 _opts = {'env': gitenv, 'shell': False}
1154 1154
1155 1155 proc = None
1156 1156 try:
1157 1157 _opts.update(opts)
1158 1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159 1159
1160 1160 return ''.join(proc), ''.join(proc.error)
1161 1161 except (EnvironmentError, OSError) as err:
1162 1162 cmd = ' '.join(cmd) # human friendly CMD
1163 1163 tb_err = ("Couldn't run git command (%s).\n"
1164 1164 "Original error was:%s\n"
1165 1165 "Call options:%s\n"
1166 1166 % (cmd, err, _opts))
1167 1167 log.exception(tb_err)
1168 1168 if safe_call:
1169 1169 return '', err
1170 1170 else:
1171 1171 raise exceptions.VcsException()(tb_err)
1172 1172 finally:
1173 1173 if proc:
1174 1174 proc.close()
1175 1175
1176 1176 @reraise_safe_exceptions
1177 1177 def install_hooks(self, wire, force=False):
1178 1178 from vcsserver.hook_utils import install_git_hooks
1179 1179 bare = self.bare(wire)
1180 1180 path = wire['path']
1181 1181 return install_git_hooks(path, bare, force_create=force)
1182 1182
1183 1183 @reraise_safe_exceptions
1184 1184 def get_hooks_info(self, wire):
1185 1185 from vcsserver.hook_utils import (
1186 1186 get_git_pre_hook_version, get_git_post_hook_version)
1187 1187 bare = self.bare(wire)
1188 1188 path = wire['path']
1189 1189 return {
1190 1190 'pre_version': get_git_pre_hook_version(path, bare),
1191 1191 'post_version': get_git_post_hook_version(path, bare),
1192 1192 }
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 from app import create_app
@@ -1,292 +1,292 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import re
19 19 import logging
20 20 from wsgiref.util import FileWrapper
21 21
22 22 import simplejson as json
23 23 from pyramid.config import Configurator
24 24 from pyramid.response import Response, FileIter
25 25 from pyramid.httpexceptions import (
26 26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 27 HTTPUnprocessableEntity)
28 28
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.utils import safe_int
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 38
39 39
40 40 def write_response_error(http_exception, text=None):
41 41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 45 _exception.body = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
49 49
50 50
51 51 class AuthHeaderRequired(object):
52 52 """
53 53 Decorator to check if request has proper auth-header
54 54 """
55 55
56 56 def __call__(self, func):
57 57 return get_cython_compat_decorator(self.__wrapper, func)
58 58
59 59 def __wrapper(self, func, *fargs, **fkwargs):
60 60 request = fargs[1]
61 61 auth = request.authorization
62 62 if not auth:
63 63 return write_response_error(HTTPForbidden)
64 64 return func(*fargs[1:], **fkwargs)
65 65
66 66
67 67 # views
68 68
69 69 def lfs_objects(request):
70 70 # indicate not supported, V1 API
71 71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 73
74 74
75 75 @AuthHeaderRequired()
76 76 def lfs_objects_batch(request):
77 77 """
78 78 The client sends the following information to the Batch endpoint to transfer some objects:
79 79
80 80 operation - Should be download or upload.
81 81 transfers - An optional Array of String identifiers for transfer
82 82 adapters that the client has configured. If omitted, the basic
83 83 transfer adapter MUST be assumed by the server.
84 84 objects - An Array of objects to download.
85 85 oid - String OID of the LFS object.
86 86 size - Integer byte size of the LFS object. Must be at least zero.
87 87 """
88 88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 89 auth = request.authorization
90 90 repo = request.matchdict.get('repo')
91 91 data = request.json
92 92 operation = data.get('operation')
93 93 http_scheme = request.registry.git_lfs_http_scheme
94 94
95 95 if operation not in ('download', 'upload'):
96 96 log.debug('LFS: unsupported operation:%s', operation)
97 97 return write_response_error(
98 98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99 99
100 100 if 'objects' not in data:
101 101 log.debug('LFS: missing objects data')
102 102 return write_response_error(
103 103 HTTPBadRequest, 'missing objects data')
104 104
105 105 log.debug('LFS: handling operation of type: %s', operation)
106 106
107 107 objects = []
108 108 for o in data['objects']:
109 109 try:
110 110 oid = o['oid']
111 111 obj_size = o['size']
112 112 except KeyError:
113 113 log.exception('LFS, failed to extract data')
114 114 return write_response_error(
115 115 HTTPBadRequest, 'unsupported data in objects')
116 116
117 117 obj_data = {'oid': oid}
118 118
119 119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 120 _scheme=http_scheme)
121 121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 122 _scheme=http_scheme)
123 123 store = LFSOidStore(
124 124 oid, repo, store_location=request.registry.git_lfs_store_path)
125 125 handler = OidHandler(
126 126 store, repo, auth, oid, obj_size, obj_data,
127 127 obj_href, obj_verify_href)
128 128
129 129 # this verifies also OIDs
130 130 actions, errors = handler.exec_operation(operation)
131 131 if errors:
132 132 log.warning('LFS: got following errors: %s', errors)
133 133 obj_data['errors'] = errors
134 134
135 135 if actions:
136 136 obj_data['actions'] = actions
137 137
138 138 obj_data['size'] = obj_size
139 139 obj_data['authenticated'] = True
140 140 objects.append(obj_data)
141 141
142 142 result = {'objects': objects, 'transfer': 'basic'}
143 143 log.debug('LFS Response %s', safe_result(result))
144 144
145 145 return result
146 146
147 147
148 148 def lfs_objects_oid_upload(request):
149 149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 150 repo = request.matchdict.get('repo')
151 151 oid = request.matchdict.get('oid')
152 152 store = LFSOidStore(
153 153 oid, repo, store_location=request.registry.git_lfs_store_path)
154 154 engine = store.get_engine(mode='wb')
155 155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156 156
157 157 body = request.environ['wsgi.input']
158 158
159 159 with engine as f:
160 160 blksize = 64 * 1024 # 64kb
161 161 while True:
162 162 # read in chunks as stream comes in from Gunicorn
163 163 # this is a specific Gunicorn support function.
164 164 # might work differently on waitress
165 165 chunk = body.read(blksize)
166 166 if not chunk:
167 167 break
168 168 f.write(chunk)
169 169
170 170 return {'upload': 'ok'}
171 171
172 172
173 173 def lfs_objects_oid_download(request):
174 174 repo = request.matchdict.get('repo')
175 175 oid = request.matchdict.get('oid')
176 176
177 177 store = LFSOidStore(
178 178 oid, repo, store_location=request.registry.git_lfs_store_path)
179 179 if not store.has_oid():
180 180 log.debug('LFS: oid %s does not exists in store', oid)
181 181 return write_response_error(
182 182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183 183
184 184 # TODO(marcink): support range header ?
185 185 # Range: bytes=0-, `bytes=(\d+)\-.*`
186 186
187 187 f = open(store.oid_path, 'rb')
188 188 response = Response(
189 189 content_type='application/octet-stream', app_iter=FileIter(f))
190 190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 191 return response
192 192
193 193
194 194 def lfs_objects_verify(request):
195 195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 196 repo = request.matchdict.get('repo')
197 197
198 198 data = request.json
199 199 oid = data.get('oid')
200 200 size = safe_int(data.get('size'))
201 201
202 202 if not (oid and size):
203 203 return write_response_error(
204 204 HTTPBadRequest, 'missing oid and size in request data')
205 205
206 206 store = LFSOidStore(
207 207 oid, repo, store_location=request.registry.git_lfs_store_path)
208 208 if not store.has_oid():
209 209 log.debug('LFS: oid %s does not exists in store', oid)
210 210 return write_response_error(
211 211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212 212
213 213 store_size = store.size_oid()
214 214 if store_size != size:
215 215 msg = 'requested file size mismatch store size:%s requested:%s' % (
216 216 store_size, size)
217 217 return write_response_error(
218 218 HTTPUnprocessableEntity, msg)
219 219
220 220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221 221
222 222
223 223 def lfs_objects_lock(request):
224 224 return write_response_error(
225 225 HTTPNotImplemented, 'GIT LFS locking api not supported')
226 226
227 227
228 228 def not_found(request):
229 229 return write_response_error(
230 230 HTTPNotFound, 'request path not found')
231 231
232 232
233 233 def lfs_disabled(request):
234 234 return write_response_error(
235 235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236 236
237 237
238 238 def git_lfs_app(config):
239 239
240 240 # v1 API deprecation endpoint
241 241 config.add_route('lfs_objects',
242 242 '/{repo:.*?[^/]}/info/lfs/objects')
243 243 config.add_view(lfs_objects, route_name='lfs_objects',
244 244 request_method='POST', renderer='json')
245 245
246 246 # locking API
247 247 config.add_route('lfs_objects_lock',
248 248 '/{repo:.*?[^/]}/info/lfs/locks')
249 249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 250 request_method=('POST', 'GET'), renderer='json')
251 251
252 252 config.add_route('lfs_objects_lock_verify',
253 253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 255 request_method=('POST', 'GET'), renderer='json')
256 256
257 257 # batch API
258 258 config.add_route('lfs_objects_batch',
259 259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 261 request_method='POST', renderer='json')
262 262
263 263 # oid upload/download API
264 264 config.add_route('lfs_objects_oid',
265 265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 267 request_method='PUT', renderer='json')
268 268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 269 request_method='GET', renderer='json')
270 270
271 271 # verification API
272 272 config.add_route('lfs_objects_verify',
273 273 '/{repo:.*?[^/]}/info/lfs/verify')
274 274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 275 request_method='POST', renderer='json')
276 276
277 277 # not found handler for API
278 278 config.add_notfound_view(not_found, renderer='json')
279 279
280 280
281 281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 282 config = Configurator()
283 283 if git_lfs_enabled:
284 284 config.include(git_lfs_app)
285 285 config.registry.git_lfs_store_path = git_lfs_store_path
286 286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 287 else:
288 288 # not found handler for API, reporting disabled LFS support
289 289 config.add_notfound_view(lfs_disabled, renderer='json')
290 290
291 291 app = config.make_wsgi_app()
292 292 return app
@@ -1,175 +1,175 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import logging
21 21 from collections import OrderedDict
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 class OidHandler(object):
27 27
28 28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 29 obj_verify_href=None):
30 30 self.current_store = store
31 31 self.repo_name = repo_name
32 32 self.auth = auth
33 33 self.oid = oid
34 34 self.obj_size = obj_size
35 35 self.obj_data = obj_data
36 36 self.obj_href = obj_href
37 37 self.obj_verify_href = obj_verify_href
38 38
39 39 def get_store(self, mode=None):
40 40 return self.current_store
41 41
42 42 def get_auth(self):
43 43 """returns auth header for re-use in upload/download"""
44 44 return " ".join(self.auth)
45 45
46 46 def download(self):
47 47
48 48 store = self.get_store()
49 49 response = None
50 50 has_errors = None
51 51
52 52 if not store.has_oid():
53 53 # error reply back to client that something is wrong with dl
54 54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 55 has_errors = OrderedDict(
56 56 error=OrderedDict(
57 57 code=404,
58 58 message=err_msg
59 59 )
60 60 )
61 61
62 62 download_action = OrderedDict(
63 63 href=self.obj_href,
64 64 header=OrderedDict([("Authorization", self.get_auth())])
65 65 )
66 66 if not has_errors:
67 67 response = OrderedDict(download=download_action)
68 68 return response, has_errors
69 69
70 70 def upload(self, skip_existing=True):
71 71 """
72 72 Write upload action for git-lfs server
73 73 """
74 74
75 75 store = self.get_store()
76 76 response = None
77 77 has_errors = None
78 78
79 79 # verify if we have the OID before, if we do, reply with empty
80 80 if store.has_oid():
81 81 log.debug('LFS: store already has oid %s', store.oid)
82 82
83 83 # validate size
84 84 store_size = store.size_oid()
85 85 size_match = store_size == self.obj_size
86 86 if not size_match:
87 87 log.warning(
88 88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 89 self.oid, store_size, self.obj_size)
90 90 elif skip_existing:
91 91 log.debug('LFS: skipping further action as oid is existing')
92 92 return response, has_errors
93 93
94 94 chunked = ("Transfer-Encoding", "chunked")
95 95 upload_action = OrderedDict(
96 96 href=self.obj_href,
97 97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 98 )
99 99 if not has_errors:
100 100 response = OrderedDict(upload=upload_action)
101 101 # if specified in handler, return the verification endpoint
102 102 if self.obj_verify_href:
103 103 verify_action = OrderedDict(
104 104 href=self.obj_verify_href,
105 105 header=OrderedDict([("Authorization", self.get_auth())])
106 106 )
107 107 response['verify'] = verify_action
108 108 return response, has_errors
109 109
110 110 def exec_operation(self, operation, *args, **kwargs):
111 111 handler = getattr(self, operation)
112 112 log.debug('LFS: handling request using %s handler', handler)
113 113 return handler(*args, **kwargs)
114 114
115 115
116 116 class LFSOidStore(object):
117 117
118 118 def __init__(self, oid, repo, store_location=None):
119 119 self.oid = oid
120 120 self.repo = repo
121 121 self.store_path = store_location or self.get_default_store()
122 122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 123 self.oid_path = os.path.join(self.store_path, oid)
124 124 self.fd = None
125 125
126 126 def get_engine(self, mode):
127 127 """
128 128 engine = .get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('...')
131 131 """
132 132
133 133 class StoreEngine(object):
134 134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 135 self.mode = mode
136 136 self.store_path = store_path
137 137 self.oid_path = oid_path
138 138 self.tmp_oid_path = tmp_oid_path
139 139
140 140 def __enter__(self):
141 141 if not os.path.isdir(self.store_path):
142 142 os.makedirs(self.store_path)
143 143
144 144 # TODO(marcink): maybe write metadata here with size/oid ?
145 145 fd = open(self.tmp_oid_path, self.mode)
146 146 self.fd = fd
147 147 return fd
148 148
149 149 def __exit__(self, exc_type, exc_value, traceback):
150 150 # close tmp file, and rename to final destination
151 151 self.fd.close()
152 152 shutil.move(self.tmp_oid_path, self.oid_path)
153 153
154 154 return StoreEngine(
155 155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156 156
157 157 def get_default_store(self):
158 158 """
159 159 Default store, consistent with defaults of Mercurial large files store
160 160 which is /home/username/.cache/largefiles
161 161 """
162 162 user_home = os.path.expanduser("~")
163 163 return os.path.join(user_home, '.cache', 'lfs-store')
164 164
165 165 def has_oid(self):
166 166 return os.path.exists(os.path.join(self.store_path, self.oid))
167 167
168 168 def size_oid(self):
169 169 size = -1
170 170
171 171 if self.has_oid():
172 172 oid = os.path.join(self.store_path, self.oid)
173 173 size = os.stat(oid).st_size
174 174
175 175 return size
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,272 +1,272 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 21 import simplejson as json
22 22
23 23 from vcsserver.git_lfs.app import create_app
24 24
25 25
26 26 @pytest.fixture(scope='function')
27 27 def git_lfs_app(tmpdir):
28 28 custom_app = WebObTestApp(create_app(
29 29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 30 git_lfs_http_scheme='http'))
31 31 custom_app._store = str(tmpdir)
32 32 return custom_app
33 33
34 34
35 35 @pytest.fixture(scope='function')
36 36 def git_lfs_https_app(tmpdir):
37 37 custom_app = WebObTestApp(create_app(
38 38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 39 git_lfs_http_scheme='https'))
40 40 custom_app._store = str(tmpdir)
41 41 return custom_app
42 42
43 43
44 44 @pytest.fixture()
45 45 def http_auth():
46 46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
47 47
48 48
49 49 class TestLFSApplication(object):
50 50
51 51 def test_app_wrong_path(self, git_lfs_app):
52 52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
53 53
54 54 def test_app_deprecated_endpoint(self, git_lfs_app):
55 55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 56 assert response.status_code == 501
57 57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
58 58
59 59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 61 assert response.status_code == 501
62 62 assert json.loads(response.text) == {
63 63 u'message': u'GIT LFS locking api not supported'}
64 64
65 65 def test_app_lock_api_not_available(self, git_lfs_app):
66 66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 67 assert response.status_code == 501
68 68 assert json.loads(response.text) == {
69 69 u'message': u'GIT LFS locking api not supported'}
70 70
71 71 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 72 git_lfs_app.post_json(
73 73 '/repo/info/lfs/objects/batch', params={}, status=403)
74 74
75 75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
76 76 response = git_lfs_app.post_json(
77 77 '/repo/info/lfs/objects/batch', params={}, status=400,
78 78 extra_environ=http_auth)
79 79 assert json.loads(response.text) == {
80 80 u'message': u'unsupported operation mode: `None`'}
81 81
82 82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 83 response = git_lfs_app.post_json(
84 84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 85 status=400, extra_environ=http_auth)
86 86 assert json.loads(response.text) == {
87 87 u'message': u'missing objects data'}
88 88
89 89 def test_app_batch_api_unsupported_data_in_objects(
90 90 self, git_lfs_app, http_auth):
91 91 params = {'operation': 'download',
92 92 'objects': [{}]}
93 93 response = git_lfs_app.post_json(
94 94 '/repo/info/lfs/objects/batch', params=params, status=400,
95 95 extra_environ=http_auth)
96 96 assert json.loads(response.text) == {
97 97 u'message': u'unsupported data in objects'}
98 98
99 99 def test_app_batch_api_download_missing_object(
100 100 self, git_lfs_app, http_auth):
101 101 params = {'operation': 'download',
102 102 'objects': [{'oid': '123', 'size': '1024'}]}
103 103 response = git_lfs_app.post_json(
104 104 '/repo/info/lfs/objects/batch', params=params,
105 105 extra_environ=http_auth)
106 106
107 107 expected_objects = [
108 108 {u'authenticated': True,
109 109 u'errors': {u'error': {
110 110 u'code': 404,
111 111 u'message': u'object: 123 does not exist in store'}},
112 112 u'oid': u'123',
113 113 u'size': u'1024'}
114 114 ]
115 115 assert json.loads(response.text) == {
116 116 'objects': expected_objects, 'transfer': 'basic'}
117 117
118 118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
119 119 oid = '456'
120 120 oid_path = os.path.join(git_lfs_app._store, oid)
121 121 if not os.path.isdir(os.path.dirname(oid_path)):
122 122 os.makedirs(os.path.dirname(oid_path))
123 123 with open(oid_path, 'wb') as f:
124 124 f.write('OID_CONTENT')
125 125
126 126 params = {'operation': 'download',
127 127 'objects': [{'oid': oid, 'size': '1024'}]}
128 128 response = git_lfs_app.post_json(
129 129 '/repo/info/lfs/objects/batch', params=params,
130 130 extra_environ=http_auth)
131 131
132 132 expected_objects = [
133 133 {u'authenticated': True,
134 134 u'actions': {
135 135 u'download': {
136 136 u'header': {u'Authorization': u'Basic XXXXX'},
137 137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
138 138 },
139 139 u'oid': u'456',
140 140 u'size': u'1024'}
141 141 ]
142 142 assert json.loads(response.text) == {
143 143 'objects': expected_objects, 'transfer': 'basic'}
144 144
145 145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
146 146 params = {'operation': 'upload',
147 147 'objects': [{'oid': '123', 'size': '1024'}]}
148 148 response = git_lfs_app.post_json(
149 149 '/repo/info/lfs/objects/batch', params=params,
150 150 extra_environ=http_auth)
151 151 expected_objects = [
152 152 {u'authenticated': True,
153 153 u'actions': {
154 154 u'upload': {
155 155 u'header': {u'Authorization': u'Basic XXXXX',
156 156 u'Transfer-Encoding': u'chunked'},
157 157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
158 158 u'verify': {
159 159 u'header': {u'Authorization': u'Basic XXXXX'},
160 160 u'href': u'http://localhost/repo/info/lfs/verify'}
161 161 },
162 162 u'oid': u'123',
163 163 u'size': u'1024'}
164 164 ]
165 165 assert json.loads(response.text) == {
166 166 'objects': expected_objects, 'transfer': 'basic'}
167 167
168 168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 169 params = {'operation': 'upload',
170 170 'objects': [{'oid': '123', 'size': '1024'}]}
171 171 response = git_lfs_https_app.post_json(
172 172 '/repo/info/lfs/objects/batch', params=params,
173 173 extra_environ=http_auth)
174 174 expected_objects = [
175 175 {u'authenticated': True,
176 176 u'actions': {
177 177 u'upload': {
178 178 u'header': {u'Authorization': u'Basic XXXXX',
179 179 u'Transfer-Encoding': u'chunked'},
180 180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 181 u'verify': {
182 182 u'header': {u'Authorization': u'Basic XXXXX'},
183 183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 184 },
185 185 u'oid': u'123',
186 186 u'size': u'1024'}
187 187 ]
188 188 assert json.loads(response.text) == {
189 189 'objects': expected_objects, 'transfer': 'basic'}
190 190
191 191 def test_app_verify_api_missing_data(self, git_lfs_app):
192 192 params = {'oid': 'missing'}
193 193 response = git_lfs_app.post_json(
194 194 '/repo/info/lfs/verify', params=params,
195 195 status=400)
196 196
197 197 assert json.loads(response.text) == {
198 198 u'message': u'missing oid and size in request data'}
199 199
200 200 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 201 params = {'oid': 'missing', 'size': '1024'}
202 202 response = git_lfs_app.post_json(
203 203 '/repo/info/lfs/verify', params=params,
204 204 status=404)
205 205
206 206 assert json.loads(response.text) == {
207 207 u'message': u'oid `missing` does not exists in store'}
208 208
209 209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 210 oid = 'existing'
211 211 oid_path = os.path.join(git_lfs_app._store, oid)
212 212 if not os.path.isdir(os.path.dirname(oid_path)):
213 213 os.makedirs(os.path.dirname(oid_path))
214 214 with open(oid_path, 'wb') as f:
215 215 f.write('OID_CONTENT')
216 216
217 217 params = {'oid': oid, 'size': '1024'}
218 218 response = git_lfs_app.post_json(
219 219 '/repo/info/lfs/verify', params=params, status=422)
220 220
221 221 assert json.loads(response.text) == {
222 222 u'message': u'requested file size mismatch '
223 223 u'store size:11 requested:1024'}
224 224
225 225 def test_app_verify_api(self, git_lfs_app):
226 226 oid = 'existing'
227 227 oid_path = os.path.join(git_lfs_app._store, oid)
228 228 if not os.path.isdir(os.path.dirname(oid_path)):
229 229 os.makedirs(os.path.dirname(oid_path))
230 230 with open(oid_path, 'wb') as f:
231 231 f.write('OID_CONTENT')
232 232
233 233 params = {'oid': oid, 'size': 11}
234 234 response = git_lfs_app.post_json(
235 235 '/repo/info/lfs/verify', params=params)
236 236
237 237 assert json.loads(response.text) == {
238 238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
239 239
240 240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 241 oid = 'missing'
242 242
243 243 response = git_lfs_app.get(
244 244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245 245
246 246 assert json.loads(response.text) == {
247 247 u'message': u'requested file with oid `missing` not found in store'}
248 248
249 249 def test_app_download_api(self, git_lfs_app):
250 250 oid = 'existing'
251 251 oid_path = os.path.join(git_lfs_app._store, oid)
252 252 if not os.path.isdir(os.path.dirname(oid_path)):
253 253 os.makedirs(os.path.dirname(oid_path))
254 254 with open(oid_path, 'wb') as f:
255 255 f.write('OID_CONTENT')
256 256
257 257 response = git_lfs_app.get(
258 258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
259 259 assert response
260 260
261 261 def test_app_upload(self, git_lfs_app):
262 262 oid = 'uploaded'
263 263
264 264 response = git_lfs_app.put(
265 265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266 266
267 267 assert json.loads(response.text) == {u'upload': u'ok'}
268 268
269 269 # verify that we actually wrote that OID
270 270 oid_path = os.path.join(git_lfs_app._store, oid)
271 271 assert os.path.isfile(oid_path)
272 272 assert 'CONTENT' == open(oid_path).read()
@@ -1,141 +1,141 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 21
22 22
23 23 @pytest.fixture()
24 24 def lfs_store(tmpdir):
25 25 repo = 'test'
26 26 oid = '123456789'
27 27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 28 return store
29 29
30 30
31 31 @pytest.fixture()
32 32 def oid_handler(lfs_store):
33 33 store = lfs_store
34 34 repo = store.repo
35 35 oid = store.oid
36 36
37 37 oid_handler = OidHandler(
38 38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 39 oid=oid,
40 40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 41 obj_verify_href='http://localhost/verify')
42 42 return oid_handler
43 43
44 44
45 45 class TestOidHandler(object):
46 46
47 47 @pytest.mark.parametrize('exec_action', [
48 48 'download',
49 49 'upload',
50 50 ])
51 51 def test_exec_action(self, exec_action, oid_handler):
52 52 handler = oid_handler.exec_operation(exec_action)
53 53 assert handler
54 54
55 55 def test_exec_action_undefined(self, oid_handler):
56 56 with pytest.raises(AttributeError):
57 57 oid_handler.exec_operation('wrong')
58 58
59 59 def test_download_oid_not_existing(self, oid_handler):
60 60 response, has_errors = oid_handler.exec_operation('download')
61 61
62 62 assert response is None
63 63 assert has_errors['error'] == {
64 64 'code': 404,
65 65 'message': 'object: 123456789 does not exist in store'}
66 66
67 67 def test_download_oid(self, oid_handler):
68 68 store = oid_handler.get_store()
69 69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 70 os.makedirs(os.path.dirname(store.oid_path))
71 71
72 72 with open(store.oid_path, 'wb') as f:
73 73 f.write('CONTENT')
74 74
75 75 response, has_errors = oid_handler.exec_operation('download')
76 76
77 77 assert has_errors is None
78 78 assert response['download'] == {
79 79 'header': {'Authorization': 'basic xxxx'},
80 80 'href': 'http://localhost/handle_oid'
81 81 }
82 82
83 83 def test_upload_oid_that_exists(self, oid_handler):
84 84 store = oid_handler.get_store()
85 85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 86 os.makedirs(os.path.dirname(store.oid_path))
87 87
88 88 with open(store.oid_path, 'wb') as f:
89 89 f.write('CONTENT')
90 90 oid_handler.obj_size = 7
91 91 response, has_errors = oid_handler.exec_operation('upload')
92 92 assert has_errors is None
93 93 assert response is None
94 94
95 95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 96 store = oid_handler.get_store()
97 97 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 98 os.makedirs(os.path.dirname(store.oid_path))
99 99
100 100 with open(store.oid_path, 'wb') as f:
101 101 f.write('CONTENT')
102 102
103 103 oid_handler.obj_size = 10240
104 104 response, has_errors = oid_handler.exec_operation('upload')
105 105 assert has_errors is None
106 106 assert response['upload'] == {
107 107 'header': {'Authorization': 'basic xxxx',
108 108 'Transfer-Encoding': 'chunked'},
109 109 'href': 'http://localhost/handle_oid',
110 110 }
111 111
112 112 def test_upload_oid(self, oid_handler):
113 113 response, has_errors = oid_handler.exec_operation('upload')
114 114 assert has_errors is None
115 115 assert response['upload'] == {
116 116 'header': {'Authorization': 'basic xxxx',
117 117 'Transfer-Encoding': 'chunked'},
118 118 'href': 'http://localhost/handle_oid'
119 119 }
120 120
121 121
122 122 class TestLFSStore(object):
123 123 def test_write_oid(self, lfs_store):
124 124 oid_location = lfs_store.oid_path
125 125
126 126 assert not os.path.isfile(oid_location)
127 127
128 128 engine = lfs_store.get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('CONTENT')
131 131
132 132 assert os.path.isfile(oid_location)
133 133
134 134 def test_detect_has_oid(self, lfs_store):
135 135
136 136 assert lfs_store.has_oid() is False
137 137 engine = lfs_store.get_engine(mode='wb')
138 138 with engine as f:
139 139 f.write('CONTENT')
140 140
141 141 assert lfs_store.has_oid() is True No newline at end of file
@@ -1,50 +1,50 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import copy
18 18 from functools import wraps
19 19
20 20
21 21 def get_cython_compat_decorator(wrapper, func):
22 22 """
23 23 Creates a cython compatible decorator. The previously used
24 24 decorator.decorator() function seems to be incompatible with cython.
25 25
26 26 :param wrapper: __wrapper method of the decorator class
27 27 :param func: decorated function
28 28 """
29 29 @wraps(func)
30 30 def local_wrapper(*args, **kwds):
31 31 return wrapper(func, *args, **kwds)
32 32 local_wrapper.__wrapped__ = func
33 33 return local_wrapper
34 34
35 35
36 36 def safe_result(result):
37 37 """clean result for better representation in logs"""
38 38 clean_copy = copy.deepcopy(result)
39 39
40 40 try:
41 41 if 'objects' in clean_copy:
42 42 for oid_data in clean_copy['objects']:
43 43 if 'actions' in oid_data:
44 44 for action_name, data in oid_data['actions'].items():
45 45 if 'header' in data:
46 46 data['header'] = {'Authorization': '*****'}
47 47 except Exception:
48 48 return result
49 49
50 50 return clean_copy
@@ -1,1009 +1,1009 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase, purge
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30 from mercurial import repair
31 31
32 32 import vcsserver
33 33 from vcsserver import exceptions
34 34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 35 from vcsserver.hgcompat import (
36 36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 40 RepoLookupError, InterventionRequired, RequirementError)
41 41 from vcsserver.vcs_base import RemoteBase
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def make_ui_from_config(repo_config):
47 47
48 48 class LoggingUI(ui.ui):
49 49 def status(self, *msg, **opts):
50 50 log.info(' '.join(msg).rstrip('\n'))
51 51 super(LoggingUI, self).status(*msg, **opts)
52 52
53 53 def warn(self, *msg, **opts):
54 54 log.warn(' '.join(msg).rstrip('\n'))
55 55 super(LoggingUI, self).warn(*msg, **opts)
56 56
57 57 def error(self, *msg, **opts):
58 58 log.error(' '.join(msg).rstrip('\n'))
59 59 super(LoggingUI, self).error(*msg, **opts)
60 60
61 61 def note(self, *msg, **opts):
62 62 log.info(' '.join(msg).rstrip('\n'))
63 63 super(LoggingUI, self).note(*msg, **opts)
64 64
65 65 def debug(self, *msg, **opts):
66 66 log.debug(' '.join(msg).rstrip('\n'))
67 67 super(LoggingUI, self).debug(*msg, **opts)
68 68
69 69 baseui = LoggingUI()
70 70
71 71 # clean the baseui object
72 72 baseui._ocfg = hgconfig.config()
73 73 baseui._ucfg = hgconfig.config()
74 74 baseui._tcfg = hgconfig.config()
75 75
76 76 for section, option, value in repo_config:
77 77 baseui.setconfig(section, option, value)
78 78
79 79 # make our hgweb quiet so it doesn't print output
80 80 baseui.setconfig('ui', 'quiet', 'true')
81 81
82 82 baseui.setconfig('ui', 'paginate', 'never')
83 83 # for better Error reporting of Mercurial
84 84 baseui.setconfig('ui', 'message-output', 'stderr')
85 85
86 86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 87 # signal in a non-main thread, thus generating a ValueError.
88 88 baseui.setconfig('worker', 'numcpus', 1)
89 89
90 90 # If there is no config for the largefiles extension, we explicitly disable
91 91 # it here. This overrides settings from repositories hgrc file. Recent
92 92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 93 # repo.
94 94 if not baseui.hasconfig('extensions', 'largefiles'):
95 95 log.debug('Explicitly disable largefiles extension for repo.')
96 96 baseui.setconfig('extensions', 'largefiles', '!')
97 97
98 98 return baseui
99 99
100 100
101 101 def reraise_safe_exceptions(func):
102 102 """Decorator for converting mercurial exceptions to something neutral."""
103 103
104 104 def wrapper(*args, **kwargs):
105 105 try:
106 106 return func(*args, **kwargs)
107 107 except (Abort, InterventionRequired) as e:
108 108 raise_from_original(exceptions.AbortException(e))
109 109 except RepoLookupError as e:
110 110 raise_from_original(exceptions.LookupException(e))
111 111 except RequirementError as e:
112 112 raise_from_original(exceptions.RequirementException(e))
113 113 except RepoError as e:
114 114 raise_from_original(exceptions.VcsException(e))
115 115 except LookupError as e:
116 116 raise_from_original(exceptions.LookupException(e))
117 117 except Exception as e:
118 118 if not hasattr(e, '_vcs_kind'):
119 119 log.exception("Unhandled exception in hg remote call")
120 120 raise_from_original(exceptions.UnhandledException(e))
121 121
122 122 raise
123 123 return wrapper
124 124
125 125
126 126 class MercurialFactory(RepoFactory):
127 127 repo_type = 'hg'
128 128
129 129 def _create_config(self, config, hooks=True):
130 130 if not hooks:
131 131 hooks_to_clean = frozenset((
132 132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 134 new_config = []
135 135 for section, option, value in config:
136 136 if section == 'hooks' and option in hooks_to_clean:
137 137 continue
138 138 new_config.append((section, option, value))
139 139 config = new_config
140 140
141 141 baseui = make_ui_from_config(config)
142 142 return baseui
143 143
144 144 def _create_repo(self, wire, create):
145 145 baseui = self._create_config(wire["config"])
146 146 return instance(baseui, wire["path"], create)
147 147
148 148 def repo(self, wire, create=False):
149 149 """
150 150 Get a repository instance for the given path.
151 151 """
152 152 return self._create_repo(wire, create)
153 153
154 154
155 155 def patch_ui_message_output(baseui):
156 156 baseui.setconfig('ui', 'quiet', 'false')
157 157 output = io.BytesIO()
158 158
159 159 def write(data, **unused_kwargs):
160 160 output.write(data)
161 161
162 162 baseui.status = write
163 163 baseui.write = write
164 164 baseui.warn = write
165 165 baseui.debug = write
166 166
167 167 return baseui, output
168 168
169 169
170 170 class HgRemote(RemoteBase):
171 171
172 172 def __init__(self, factory):
173 173 self._factory = factory
174 174 self._bulk_methods = {
175 175 "affected_files": self.ctx_files,
176 176 "author": self.ctx_user,
177 177 "branch": self.ctx_branch,
178 178 "children": self.ctx_children,
179 179 "date": self.ctx_date,
180 180 "message": self.ctx_description,
181 181 "parents": self.ctx_parents,
182 182 "status": self.ctx_status,
183 183 "obsolete": self.ctx_obsolete,
184 184 "phase": self.ctx_phase,
185 185 "hidden": self.ctx_hidden,
186 186 "_file_paths": self.ctx_list,
187 187 }
188 188
189 189 def _get_ctx(self, repo, ref):
190 190 return get_ctx(repo, ref)
191 191
192 192 @reraise_safe_exceptions
193 193 def discover_hg_version(self):
194 194 from mercurial import util
195 195 return util.version()
196 196
197 197 @reraise_safe_exceptions
198 198 def is_empty(self, wire):
199 199 repo = self._factory.repo(wire)
200 200
201 201 try:
202 202 return len(repo) == 0
203 203 except Exception:
204 204 log.exception("failed to read object_store")
205 205 return False
206 206
207 207 @reraise_safe_exceptions
208 208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 209 if kind == "tgz":
210 210 archiver = archival.tarit(archive_path, mtime, "gz")
211 211 elif kind == "tbz2":
212 212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 213 elif kind == 'zip':
214 214 archiver = archival.zipit(archive_path, mtime)
215 215 else:
216 216 raise exceptions.ArchiveException()(
217 217 'Remote does not support: "%s".' % kind)
218 218
219 219 for f_path, f_mode, f_is_link, f_content in file_info:
220 220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 221 archiver.done()
222 222
223 223 @reraise_safe_exceptions
224 224 def bookmarks(self, wire):
225 225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 227 def _bookmarks(_context_uid, _repo_id):
228 228 repo = self._factory.repo(wire)
229 229 return dict(repo._bookmarks)
230 230
231 231 return _bookmarks(context_uid, repo_id)
232 232
233 233 @reraise_safe_exceptions
234 234 def branches(self, wire, normal, closed):
235 235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 238 repo = self._factory.repo(wire)
239 239 iter_branches = repo.branchmap().iterbranches()
240 240 bt = {}
241 241 for branch_name, _heads, tip, is_closed in iter_branches:
242 242 if normal and not is_closed:
243 243 bt[branch_name] = tip
244 244 if closed and is_closed:
245 245 bt[branch_name] = tip
246 246
247 247 return bt
248 248
249 249 return _branches(context_uid, repo_id, normal, closed)
250 250
251 251 @reraise_safe_exceptions
252 252 def bulk_request(self, wire, commit_id, pre_load):
253 253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 256 result = {}
257 257 for attr in pre_load:
258 258 try:
259 259 method = self._bulk_methods[attr]
260 260 result[attr] = method(wire, commit_id)
261 261 except KeyError as e:
262 262 raise exceptions.VcsException(e)(
263 263 'Unknown bulk attribute: "%s"' % attr)
264 264 return result
265 265
266 266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_branch(self, wire, commit_id):
270 270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 272 def _ctx_branch(_repo_id, _commit_id):
273 273 repo = self._factory.repo(wire)
274 274 ctx = self._get_ctx(repo, commit_id)
275 275 return ctx.branch()
276 276 return _ctx_branch(repo_id, commit_id)
277 277
278 278 @reraise_safe_exceptions
279 279 def ctx_date(self, wire, commit_id):
280 280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 282 def _ctx_date(_repo_id, _commit_id):
283 283 repo = self._factory.repo(wire)
284 284 ctx = self._get_ctx(repo, commit_id)
285 285 return ctx.date()
286 286 return _ctx_date(repo_id, commit_id)
287 287
288 288 @reraise_safe_exceptions
289 289 def ctx_description(self, wire, revision):
290 290 repo = self._factory.repo(wire)
291 291 ctx = self._get_ctx(repo, revision)
292 292 return ctx.description()
293 293
294 294 @reraise_safe_exceptions
295 295 def ctx_files(self, wire, commit_id):
296 296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 298 def _ctx_files(_repo_id, _commit_id):
299 299 repo = self._factory.repo(wire)
300 300 ctx = self._get_ctx(repo, commit_id)
301 301 return ctx.files()
302 302
303 303 return _ctx_files(repo_id, commit_id)
304 304
305 305 @reraise_safe_exceptions
306 306 def ctx_list(self, path, revision):
307 307 repo = self._factory.repo(path)
308 308 ctx = self._get_ctx(repo, revision)
309 309 return list(ctx)
310 310
311 311 @reraise_safe_exceptions
312 312 def ctx_parents(self, wire, commit_id):
313 313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 315 def _ctx_parents(_repo_id, _commit_id):
316 316 repo = self._factory.repo(wire)
317 317 ctx = self._get_ctx(repo, commit_id)
318 318 return [parent.hex() for parent in ctx.parents()
319 319 if not (parent.hidden() or parent.obsolete())]
320 320
321 321 return _ctx_parents(repo_id, commit_id)
322 322
323 323 @reraise_safe_exceptions
324 324 def ctx_children(self, wire, commit_id):
325 325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 327 def _ctx_children(_repo_id, _commit_id):
328 328 repo = self._factory.repo(wire)
329 329 ctx = self._get_ctx(repo, commit_id)
330 330 return [child.hex() for child in ctx.children()
331 331 if not (child.hidden() or child.obsolete())]
332 332
333 333 return _ctx_children(repo_id, commit_id)
334 334
335 335 @reraise_safe_exceptions
336 336 def ctx_phase(self, wire, commit_id):
337 337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 340 repo = self._factory.repo(wire)
341 341 ctx = self._get_ctx(repo, commit_id)
342 342 # public=0, draft=1, secret=3
343 343 return ctx.phase()
344 344 return _ctx_phase(context_uid, repo_id, commit_id)
345 345
346 346 @reraise_safe_exceptions
347 347 def ctx_obsolete(self, wire, commit_id):
348 348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 351 repo = self._factory.repo(wire)
352 352 ctx = self._get_ctx(repo, commit_id)
353 353 return ctx.obsolete()
354 354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355 355
356 356 @reraise_safe_exceptions
357 357 def ctx_hidden(self, wire, commit_id):
358 358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 361 repo = self._factory.repo(wire)
362 362 ctx = self._get_ctx(repo, commit_id)
363 363 return ctx.hidden()
364 364 return _ctx_hidden(context_uid, repo_id, commit_id)
365 365
366 366 @reraise_safe_exceptions
367 367 def ctx_substate(self, wire, revision):
368 368 repo = self._factory.repo(wire)
369 369 ctx = self._get_ctx(repo, revision)
370 370 return ctx.substate
371 371
372 372 @reraise_safe_exceptions
373 373 def ctx_status(self, wire, revision):
374 374 repo = self._factory.repo(wire)
375 375 ctx = self._get_ctx(repo, revision)
376 376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 377 # object of status (odd, custom named tuple in mercurial) is not
378 378 # correctly serializable, we make it a list, as the underling
379 379 # API expects this to be a list
380 380 return list(status)
381 381
382 382 @reraise_safe_exceptions
383 383 def ctx_user(self, wire, revision):
384 384 repo = self._factory.repo(wire)
385 385 ctx = self._get_ctx(repo, revision)
386 386 return ctx.user()
387 387
388 388 @reraise_safe_exceptions
389 389 def check_url(self, url, config):
390 390 _proto = None
391 391 if '+' in url[:url.find('://')]:
392 392 _proto = url[0:url.find('+')]
393 393 url = url[url.find('+') + 1:]
394 394 handlers = []
395 395 url_obj = url_parser(url)
396 396 test_uri, authinfo = url_obj.authinfo()
397 397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 398 url_obj.query = obfuscate_qs(url_obj.query)
399 399
400 400 cleaned_uri = str(url_obj)
401 401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402 402
403 403 if authinfo:
404 404 # create a password manager
405 405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
406 406 passmgr.add_password(*authinfo)
407 407
408 408 handlers.extend((httpbasicauthhandler(passmgr),
409 409 httpdigestauthhandler(passmgr)))
410 410
411 411 o = urllib2.build_opener(*handlers)
412 412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 413 ('Accept', 'application/mercurial-0.1')]
414 414
415 415 q = {"cmd": 'between'}
416 416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 417 qs = '?%s' % urllib.urlencode(q)
418 418 cu = "%s%s" % (test_uri, qs)
419 419 req = urllib2.Request(cu, None, {})
420 420
421 421 try:
422 422 log.debug("Trying to open URL %s", cleaned_uri)
423 423 resp = o.open(req)
424 424 if resp.code != 200:
425 425 raise exceptions.URLError()('Return Code is not 200')
426 426 except Exception as e:
427 427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 428 # means it cannot be cloned
429 429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430 430
431 431 # now check if it's a proper hg repo, but don't do it for svn
432 432 try:
433 433 if _proto == 'svn':
434 434 pass
435 435 else:
436 436 # check for pure hg repos
437 437 log.debug(
438 438 "Verifying if URL is a Mercurial repository: %s",
439 439 cleaned_uri)
440 440 ui = make_ui_from_config(config)
441 441 peer_checker = makepeer(ui, url)
442 442 peer_checker.lookup('tip')
443 443 except Exception as e:
444 444 log.warning("URL is not a valid Mercurial repository: %s",
445 445 cleaned_uri)
446 446 raise exceptions.URLError(e)(
447 447 "url [%s] does not look like an hg repo org_exc: %s"
448 448 % (cleaned_uri, e))
449 449
450 450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 451 return True
452 452
453 453 @reraise_safe_exceptions
454 454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 455 repo = self._factory.repo(wire)
456 456
457 457 if file_filter:
458 458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 459 else:
460 460 match_filter = file_filter
461 461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462 462
463 463 try:
464 464 return "".join(patch.diff(
465 465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 466 except RepoLookupError as e:
467 467 raise exceptions.LookupException(e)()
468 468
469 469 @reraise_safe_exceptions
470 470 def node_history(self, wire, revision, path, limit):
471 471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 474 repo = self._factory.repo(wire)
475 475
476 476 ctx = self._get_ctx(repo, revision)
477 477 fctx = ctx.filectx(path)
478 478
479 479 def history_iter():
480 480 limit_rev = fctx.rev()
481 481 for obj in reversed(list(fctx.filelog())):
482 482 obj = fctx.filectx(obj)
483 483 ctx = obj.changectx()
484 484 if ctx.hidden() or ctx.obsolete():
485 485 continue
486 486
487 487 if limit_rev >= obj.rev():
488 488 yield obj
489 489
490 490 history = []
491 491 for cnt, obj in enumerate(history_iter()):
492 492 if limit and cnt >= limit:
493 493 break
494 494 history.append(hex(obj.node()))
495 495
496 496 return [x for x in history]
497 497 return _node_history(context_uid, repo_id, revision, path, limit)
498 498
499 499 @reraise_safe_exceptions
500 500 def node_history_untill(self, wire, revision, path, limit):
501 501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 503 def _node_history_until(_context_uid, _repo_id):
504 504 repo = self._factory.repo(wire)
505 505 ctx = self._get_ctx(repo, revision)
506 506 fctx = ctx.filectx(path)
507 507
508 508 file_log = list(fctx.filelog())
509 509 if limit:
510 510 # Limit to the last n items
511 511 file_log = file_log[-limit:]
512 512
513 513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515 515
516 516 @reraise_safe_exceptions
517 517 def fctx_annotate(self, wire, revision, path):
518 518 repo = self._factory.repo(wire)
519 519 ctx = self._get_ctx(repo, revision)
520 520 fctx = ctx.filectx(path)
521 521
522 522 result = []
523 523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 524 ln_no = i
525 525 sha = hex(annotate_obj.fctx.node())
526 526 content = annotate_obj.text
527 527 result.append((ln_no, sha, content))
528 528 return result
529 529
530 530 @reraise_safe_exceptions
531 531 def fctx_node_data(self, wire, revision, path):
532 532 repo = self._factory.repo(wire)
533 533 ctx = self._get_ctx(repo, revision)
534 534 fctx = ctx.filectx(path)
535 535 return fctx.data()
536 536
537 537 @reraise_safe_exceptions
538 538 def fctx_flags(self, wire, commit_id, path):
539 539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 541 def _fctx_flags(_repo_id, _commit_id, _path):
542 542 repo = self._factory.repo(wire)
543 543 ctx = self._get_ctx(repo, commit_id)
544 544 fctx = ctx.filectx(path)
545 545 return fctx.flags()
546 546
547 547 return _fctx_flags(repo_id, commit_id, path)
548 548
549 549 @reraise_safe_exceptions
550 550 def fctx_size(self, wire, commit_id, path):
551 551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 553 def _fctx_size(_repo_id, _revision, _path):
554 554 repo = self._factory.repo(wire)
555 555 ctx = self._get_ctx(repo, commit_id)
556 556 fctx = ctx.filectx(path)
557 557 return fctx.size()
558 558 return _fctx_size(repo_id, commit_id, path)
559 559
560 560 @reraise_safe_exceptions
561 561 def get_all_commit_ids(self, wire, name):
562 562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 565 repo = self._factory.repo(wire)
566 566 repo = repo.filtered(name)
567 567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
568 568 return revs
569 569 return _get_all_commit_ids(context_uid, repo_id, name)
570 570
571 571 @reraise_safe_exceptions
572 572 def get_config_value(self, wire, section, name, untrusted=False):
573 573 repo = self._factory.repo(wire)
574 574 return repo.ui.config(section, name, untrusted=untrusted)
575 575
576 576 @reraise_safe_exceptions
577 577 def is_large_file(self, wire, commit_id, path):
578 578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 581 return largefiles.lfutil.isstandin(path)
582 582
583 583 return _is_large_file(context_uid, repo_id, commit_id, path)
584 584
585 585 @reraise_safe_exceptions
586 586 def is_binary(self, wire, revision, path):
587 587 cache_on, context_uid, repo_id = self._cache_on(wire)
588 588
589 589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 590 def _is_binary(_repo_id, _sha, _path):
591 591 repo = self._factory.repo(wire)
592 592 ctx = self._get_ctx(repo, revision)
593 593 fctx = ctx.filectx(path)
594 594 return fctx.isbinary()
595 595
596 596 return _is_binary(repo_id, revision, path)
597 597
598 598 @reraise_safe_exceptions
599 599 def in_largefiles_store(self, wire, sha):
600 600 repo = self._factory.repo(wire)
601 601 return largefiles.lfutil.instore(repo, sha)
602 602
603 603 @reraise_safe_exceptions
604 604 def in_user_cache(self, wire, sha):
605 605 repo = self._factory.repo(wire)
606 606 return largefiles.lfutil.inusercache(repo.ui, sha)
607 607
608 608 @reraise_safe_exceptions
609 609 def store_path(self, wire, sha):
610 610 repo = self._factory.repo(wire)
611 611 return largefiles.lfutil.storepath(repo, sha)
612 612
613 613 @reraise_safe_exceptions
614 614 def link(self, wire, sha, path):
615 615 repo = self._factory.repo(wire)
616 616 largefiles.lfutil.link(
617 617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618 618
619 619 @reraise_safe_exceptions
620 620 def localrepository(self, wire, create=False):
621 621 self._factory.repo(wire, create=create)
622 622
623 623 @reraise_safe_exceptions
624 624 def lookup(self, wire, revision, both):
625 625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 627 def _lookup(_context_uid, _repo_id, _revision, _both):
628 628
629 629 repo = self._factory.repo(wire)
630 630 rev = _revision
631 631 if isinstance(rev, int):
632 632 # NOTE(marcink):
633 633 # since Mercurial doesn't support negative indexes properly
634 634 # we need to shift accordingly by one to get proper index, e.g
635 635 # repo[-1] => repo[-2]
636 636 # repo[0] => repo[-1]
637 637 if rev <= 0:
638 638 rev = rev + -1
639 639 try:
640 640 ctx = self._get_ctx(repo, rev)
641 641 except (TypeError, RepoLookupError) as e:
642 642 e._org_exc_tb = traceback.format_exc()
643 643 raise exceptions.LookupException(e)(rev)
644 644 except LookupError as e:
645 645 e._org_exc_tb = traceback.format_exc()
646 646 raise exceptions.LookupException(e)(e.name)
647 647
648 648 if not both:
649 649 return ctx.hex()
650 650
651 651 ctx = repo[ctx.hex()]
652 652 return ctx.hex(), ctx.rev()
653 653
654 654 return _lookup(context_uid, repo_id, revision, both)
655 655
656 656 @reraise_safe_exceptions
657 657 def sync_push(self, wire, url):
658 658 if not self.check_url(url, wire['config']):
659 659 return
660 660
661 661 repo = self._factory.repo(wire)
662 662
663 663 # Disable any prompts for this repo
664 664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665 665
666 666 bookmarks = dict(repo._bookmarks).keys()
667 667 remote = peer(repo, {}, url)
668 668 # Disable any prompts for this remote
669 669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670 670
671 671 return exchange.push(
672 672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673 673
674 674 @reraise_safe_exceptions
675 675 def revision(self, wire, rev):
676 676 repo = self._factory.repo(wire)
677 677 ctx = self._get_ctx(repo, rev)
678 678 return ctx.rev()
679 679
680 680 @reraise_safe_exceptions
681 681 def rev_range(self, wire, commit_filter):
682 682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 683
684 684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 685 def _rev_range(_context_uid, _repo_id, _filter):
686 686 repo = self._factory.repo(wire)
687 687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 688 return revisions
689 689
690 690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691 691
692 692 @reraise_safe_exceptions
693 693 def rev_range_hash(self, wire, node):
694 694 repo = self._factory.repo(wire)
695 695
696 696 def get_revs(repo, rev_opt):
697 697 if rev_opt:
698 698 revs = revrange(repo, rev_opt)
699 699 if len(revs) == 0:
700 700 return (nullrev, nullrev)
701 701 return max(revs), min(revs)
702 702 else:
703 703 return len(repo) - 1, 0
704 704
705 705 stop, start = get_revs(repo, [node + ':'])
706 706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
707 707 return revs
708 708
709 709 @reraise_safe_exceptions
710 710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 711 other_path = kwargs.pop('other_path', None)
712 712
713 713 # case when we want to compare two independent repositories
714 714 if other_path and other_path != wire["path"]:
715 715 baseui = self._factory._create_config(wire["config"])
716 716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 717 else:
718 718 repo = self._factory.repo(wire)
719 719 return list(repo.revs(rev_spec, *args))
720 720
721 721 @reraise_safe_exceptions
722 722 def verify(self, wire,):
723 723 repo = self._factory.repo(wire)
724 724 baseui = self._factory._create_config(wire['config'])
725 725
726 726 baseui, output = patch_ui_message_output(baseui)
727 727
728 728 repo.ui = baseui
729 729 verify.verify(repo)
730 730 return output.getvalue()
731 731
732 732 @reraise_safe_exceptions
733 733 def hg_update_cache(self, wire,):
734 734 repo = self._factory.repo(wire)
735 735 baseui = self._factory._create_config(wire['config'])
736 736 baseui, output = patch_ui_message_output(baseui)
737 737
738 738 repo.ui = baseui
739 739 with repo.wlock(), repo.lock():
740 740 repo.updatecaches(full=True)
741 741
742 742 return output.getvalue()
743 743
744 744 @reraise_safe_exceptions
745 745 def hg_rebuild_fn_cache(self, wire,):
746 746 repo = self._factory.repo(wire)
747 747 baseui = self._factory._create_config(wire['config'])
748 748 baseui, output = patch_ui_message_output(baseui)
749 749
750 750 repo.ui = baseui
751 751
752 752 repair.rebuildfncache(baseui, repo)
753 753
754 754 return output.getvalue()
755 755
756 756 @reraise_safe_exceptions
757 757 def tags(self, wire):
758 758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 760 def _tags(_context_uid, _repo_id):
761 761 repo = self._factory.repo(wire)
762 762 return repo.tags()
763 763
764 764 return _tags(context_uid, repo_id)
765 765
766 766 @reraise_safe_exceptions
767 767 def update(self, wire, node=None, clean=False):
768 768 repo = self._factory.repo(wire)
769 769 baseui = self._factory._create_config(wire['config'])
770 770 commands.update(baseui, repo, node=node, clean=clean)
771 771
772 772 @reraise_safe_exceptions
773 773 def identify(self, wire):
774 774 repo = self._factory.repo(wire)
775 775 baseui = self._factory._create_config(wire['config'])
776 776 output = io.BytesIO()
777 777 baseui.write = output.write
778 778 # This is required to get a full node id
779 779 baseui.debugflag = True
780 780 commands.identify(baseui, repo, id=True)
781 781
782 782 return output.getvalue()
783 783
784 784 @reraise_safe_exceptions
785 785 def heads(self, wire, branch=None):
786 786 repo = self._factory.repo(wire)
787 787 baseui = self._factory._create_config(wire['config'])
788 788 output = io.BytesIO()
789 789
790 790 def write(data, **unused_kwargs):
791 791 output.write(data)
792 792
793 793 baseui.write = write
794 794 if branch:
795 795 args = [branch]
796 796 else:
797 797 args = []
798 798 commands.heads(baseui, repo, template='{node} ', *args)
799 799
800 800 return output.getvalue()
801 801
802 802 @reraise_safe_exceptions
803 803 def ancestor(self, wire, revision1, revision2):
804 804 repo = self._factory.repo(wire)
805 805 changelog = repo.changelog
806 806 lookup = repo.lookup
807 807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 808 return hex(a)
809 809
810 810 @reraise_safe_exceptions
811 811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 813 clone(baseui, source, dest, noupdate=not update_after_clone)
814 814
815 815 @reraise_safe_exceptions
816 816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817 817
818 818 repo = self._factory.repo(wire)
819 819 baseui = self._factory._create_config(wire['config'])
820 820 publishing = baseui.configbool('phases', 'publish')
821 821 if publishing:
822 822 new_commit = 'public'
823 823 else:
824 824 new_commit = 'draft'
825 825
826 826 def _filectxfn(_repo, ctx, path):
827 827 """
828 828 Marks given path as added/changed/removed in a given _repo. This is
829 829 for internal mercurial commit function.
830 830 """
831 831
832 832 # check if this path is removed
833 833 if path in removed:
834 834 # returning None is a way to mark node for removal
835 835 return None
836 836
837 837 # check if this path is added
838 838 for node in updated:
839 839 if node['path'] == path:
840 840 return memfilectx(
841 841 _repo,
842 842 changectx=ctx,
843 843 path=node['path'],
844 844 data=node['content'],
845 845 islink=False,
846 846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 847 copysource=False)
848 848
849 849 raise exceptions.AbortException()(
850 850 "Given path haven't been marked as added, "
851 851 "changed or removed (%s)" % path)
852 852
853 853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854 854
855 855 commit_ctx = memctx(
856 856 repo=repo,
857 857 parents=parents,
858 858 text=message,
859 859 files=files,
860 860 filectxfn=_filectxfn,
861 861 user=user,
862 862 date=(commit_time, commit_timezone),
863 863 extra=extra)
864 864
865 865 n = repo.commitctx(commit_ctx)
866 866 new_id = hex(n)
867 867
868 868 return new_id
869 869
870 870 @reraise_safe_exceptions
871 871 def pull(self, wire, url, commit_ids=None):
872 872 repo = self._factory.repo(wire)
873 873 # Disable any prompts for this repo
874 874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875 875
876 876 remote = peer(repo, {}, url)
877 877 # Disable any prompts for this remote
878 878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879 879
880 880 if commit_ids:
881 881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882 882
883 883 return exchange.pull(
884 884 repo, remote, heads=commit_ids, force=None).cgresult
885 885
886 886 @reraise_safe_exceptions
887 887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 888 repo = self._factory.repo(wire)
889 889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890 890
891 891 # Mercurial internally has a lot of logic that checks ONLY if
892 892 # option is defined, we just pass those if they are defined then
893 893 opts = {}
894 894 if bookmark:
895 895 opts['bookmark'] = bookmark
896 896 if branch:
897 897 opts['branch'] = branch
898 898 if revision:
899 899 opts['rev'] = revision
900 900
901 901 commands.pull(baseui, repo, source, **opts)
902 902
903 903 @reraise_safe_exceptions
904 904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 905 repo = self._factory.repo(wire)
906 906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 908 new_branch=push_branches)
909 909
910 910 @reraise_safe_exceptions
911 911 def strip(self, wire, revision, update, backup):
912 912 repo = self._factory.repo(wire)
913 913 ctx = self._get_ctx(repo, revision)
914 914 hgext_strip(
915 915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916 916
917 917 @reraise_safe_exceptions
918 918 def get_unresolved_files(self, wire):
919 919 repo = self._factory.repo(wire)
920 920
921 921 log.debug('Calculating unresolved files for repo: %s', repo)
922 922 output = io.BytesIO()
923 923
924 924 def write(data, **unused_kwargs):
925 925 output.write(data)
926 926
927 927 baseui = self._factory._create_config(wire['config'])
928 928 baseui.write = write
929 929
930 930 commands.resolve(baseui, repo, list=True)
931 931 unresolved = output.getvalue().splitlines(0)
932 932 return unresolved
933 933
934 934 @reraise_safe_exceptions
935 935 def merge(self, wire, revision):
936 936 repo = self._factory.repo(wire)
937 937 baseui = self._factory._create_config(wire['config'])
938 938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939 939
940 940 # In case of sub repositories are used mercurial prompts the user in
941 941 # case of merge conflicts or different sub repository sources. By
942 942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 943 # used but instead uses a default value.
944 944 repo.ui.setconfig('ui', 'interactive', False)
945 945 commands.merge(baseui, repo, rev=revision)
946 946
947 947 @reraise_safe_exceptions
948 948 def merge_state(self, wire):
949 949 repo = self._factory.repo(wire)
950 950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951 951
952 952 # In case of sub repositories are used mercurial prompts the user in
953 953 # case of merge conflicts or different sub repository sources. By
954 954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 955 # used but instead uses a default value.
956 956 repo.ui.setconfig('ui', 'interactive', False)
957 957 ms = hg_merge.mergestate(repo)
958 958 return [x for x in ms.unresolved()]
959 959
960 960 @reraise_safe_exceptions
961 961 def commit(self, wire, message, username, close_branch=False):
962 962 repo = self._factory.repo(wire)
963 963 baseui = self._factory._create_config(wire['config'])
964 964 repo.ui.setconfig('ui', 'username', username)
965 965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966 966
967 967 @reraise_safe_exceptions
968 968 def rebase(self, wire, source=None, dest=None, abort=False):
969 969 repo = self._factory.repo(wire)
970 970 baseui = self._factory._create_config(wire['config'])
971 971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 972 # In case of sub repositories are used mercurial prompts the user in
973 973 # case of merge conflicts or different sub repository sources. By
974 974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 975 # used but instead uses a default value.
976 976 repo.ui.setconfig('ui', 'interactive', False)
977 977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978 978
979 979 @reraise_safe_exceptions
980 980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 981 repo = self._factory.repo(wire)
982 982 ctx = self._get_ctx(repo, revision)
983 983 node = ctx.node()
984 984
985 985 date = (tag_time, tag_timezone)
986 986 try:
987 987 hg_tag.tag(repo, name, node, message, local, user, date)
988 988 except Abort as e:
989 989 log.exception("Tag operation aborted")
990 990 # Exception can contain unicode which we convert
991 991 raise exceptions.AbortException(e)(repr(e))
992 992
993 993 @reraise_safe_exceptions
994 994 def bookmark(self, wire, bookmark, revision=None):
995 995 repo = self._factory.repo(wire)
996 996 baseui = self._factory._create_config(wire['config'])
997 997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998 998
999 999 @reraise_safe_exceptions
1000 1000 def install_hooks(self, wire, force=False):
1001 1001 # we don't need any special hooks for Mercurial
1002 1002 pass
1003 1003
1004 1004 @reraise_safe_exceptions
1005 1005 def get_hooks_info(self, wire):
1006 1006 return {
1007 1007 'pre_version': vcsserver.__version__,
1008 1008 'post_version': vcsserver.__version__,
1009 1009 }
@@ -1,79 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import subrepoutil
40 40 from mercurial import tags as hg_tag
41 41
42 42 from mercurial.commands import clone, nullid, pull
43 43 from mercurial.context import memctx, memfilectx
44 44 from mercurial.error import (
45 45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 46 RequirementError, ProgrammingError)
47 47 from mercurial.hgweb import hgweb_mod
48 48 from mercurial.localrepo import instance
49 49 from mercurial.match import match
50 50 from mercurial.mdiff import diffopts
51 51 from mercurial.node import bin, hex
52 52 from mercurial.encoding import tolocal
53 53 from mercurial.discovery import findcommonoutgoing
54 54 from mercurial.hg import peer
55 55 from mercurial.httppeer import makepeer
56 56 from mercurial.util import url as hg_url
57 57 from mercurial.scmutil import revrange, revsymbol
58 58 from mercurial.node import nullrev
59 59 from mercurial import exchange
60 60 from hgext import largefiles
61 61
62 62 # those authnadlers are patched for python 2.6.5 bug an
63 63 # infinit looping when given invalid resources
64 64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65 65
66 66
67 67 def get_ctx(repo, ref):
68 68 try:
69 69 ctx = repo[ref]
70 70 except ProgrammingError:
71 71 # we're unable to find the rev using a regular lookup, we fallback
72 72 # to slower, but backward compat revsymbol usage
73 73 ctx = revsymbol(repo, ref)
74 74 except (LookupError, RepoLookupError):
75 75 # Similar case as above but only for refs that are not numeric
76 76 if isinstance(ref, (int, long)):
77 77 raise
78 78 ctx = revsymbol(repo, ref)
79 79 return ctx
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto._capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto._capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(orig, repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 return calc_capabilities(orig, repo, proto)
56 56 else:
57 57 logger.debug('Extension largefiles disabled')
58 58 return orig(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo, subrepoutil
66 66 from vcsserver.exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepoutil.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 119 raise SubrepoMergeException()()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,205 +1,205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import re
21 21 import os
22 22 import sys
23 23 import datetime
24 24 import logging
25 25 import pkg_resources
26 26
27 27 import vcsserver
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 def get_git_hooks_path(repo_path, bare):
33 33 hooks_path = os.path.join(repo_path, 'hooks')
34 34 if not bare:
35 35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36 36
37 37 return hooks_path
38 38
39 39
40 40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
41 41 """
42 42 Creates a RhodeCode hook inside a git repository
43 43
44 44 :param repo_path: path to repository
45 45 :param executable: binary executable to put in the hooks
46 46 :param force_create: Create even if same name hook exists
47 47 """
48 48 executable = executable or sys.executable
49 49 hooks_path = get_git_hooks_path(repo_path, bare)
50 50
51 51 if not os.path.isdir(hooks_path):
52 52 os.makedirs(hooks_path, mode=0o777)
53 53
54 54 tmpl_post = pkg_resources.resource_string(
55 55 'vcsserver', '/'.join(
56 56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
57 57 tmpl_pre = pkg_resources.resource_string(
58 58 'vcsserver', '/'.join(
59 59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
60 60
61 61 path = '' # not used for now
62 62 timestamp = datetime.datetime.utcnow().isoformat()
63 63
64 64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
65 65 log.debug('Installing git hook in repo %s', repo_path)
66 66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
67 67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
68 68
69 69 if _rhodecode_hook or force_create:
70 70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 71 try:
72 72 with open(_hook_file, 'wb') as f:
73 73 template = template.replace(
74 74 '_TMPL_', vcsserver.__version__)
75 75 template = template.replace('_DATE_', timestamp)
76 76 template = template.replace('_ENV_', executable)
77 77 template = template.replace('_PATH_', path)
78 78 f.write(template)
79 79 os.chmod(_hook_file, 0o755)
80 80 except IOError:
81 81 log.exception('error writing hook file %s', _hook_file)
82 82 else:
83 83 log.debug('skipping writing hook file')
84 84
85 85 return True
86 86
87 87
88 88 def get_svn_hooks_path(repo_path):
89 89 hooks_path = os.path.join(repo_path, 'hooks')
90 90
91 91 return hooks_path
92 92
93 93
94 94 def install_svn_hooks(repo_path, executable=None, force_create=False):
95 95 """
96 96 Creates RhodeCode hooks inside a svn repository
97 97
98 98 :param repo_path: path to repository
99 99 :param executable: binary executable to put in the hooks
100 100 :param force_create: Create even if same name hook exists
101 101 """
102 102 executable = executable or sys.executable
103 103 hooks_path = get_svn_hooks_path(repo_path)
104 104 if not os.path.isdir(hooks_path):
105 105 os.makedirs(hooks_path, mode=0o777)
106 106
107 107 tmpl_post = pkg_resources.resource_string(
108 108 'vcsserver', '/'.join(
109 109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
110 110 tmpl_pre = pkg_resources.resource_string(
111 111 'vcsserver', '/'.join(
112 112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
113 113
114 114 path = '' # not used for now
115 115 timestamp = datetime.datetime.utcnow().isoformat()
116 116
117 117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
118 118 log.debug('Installing svn hook in repo %s', repo_path)
119 119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
120 120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
121 121
122 122 if _rhodecode_hook or force_create:
123 123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
124 124
125 125 try:
126 126 with open(_hook_file, 'wb') as f:
127 127 template = template.replace(
128 128 '_TMPL_', vcsserver.__version__)
129 129 template = template.replace('_DATE_', timestamp)
130 130 template = template.replace('_ENV_', executable)
131 131 template = template.replace('_PATH_', path)
132 132
133 133 f.write(template)
134 134 os.chmod(_hook_file, 0o755)
135 135 except IOError:
136 136 log.exception('error writing hook file %s', _hook_file)
137 137 else:
138 138 log.debug('skipping writing hook file')
139 139
140 140 return True
141 141
142 142
143 143 def get_version_from_hook(hook_path):
144 144 version = ''
145 145 hook_content = read_hook_content(hook_path)
146 146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 147 if matches:
148 148 try:
149 149 version = matches.groups()[0]
150 150 log.debug('got version %s from hooks.', version)
151 151 except Exception:
152 152 log.exception("Exception while reading the hook version.")
153 153 return version.replace("'", "")
154 154
155 155
156 156 def check_rhodecode_hook(hook_path):
157 157 """
158 158 Check if the hook was created by RhodeCode
159 159 """
160 160 if not os.path.exists(hook_path):
161 161 return True
162 162
163 163 log.debug('hook exists, checking if it is from RhodeCode')
164 164
165 165 version = get_version_from_hook(hook_path)
166 166 if version:
167 167 return True
168 168
169 169 return False
170 170
171 171
172 172 def read_hook_content(hook_path):
173 173 content = ''
174 174 if os.path.isfile(hook_path):
175 175 with open(hook_path, 'rb') as f:
176 176 content = f.read()
177 177 return content
178 178
179 179
180 180 def get_git_pre_hook_version(repo_path, bare):
181 181 hooks_path = get_git_hooks_path(repo_path, bare)
182 182 _hook_file = os.path.join(hooks_path, 'pre-receive')
183 183 version = get_version_from_hook(_hook_file)
184 184 return version
185 185
186 186
187 187 def get_git_post_hook_version(repo_path, bare):
188 188 hooks_path = get_git_hooks_path(repo_path, bare)
189 189 _hook_file = os.path.join(hooks_path, 'post-receive')
190 190 version = get_version_from_hook(_hook_file)
191 191 return version
192 192
193 193
194 194 def get_svn_pre_hook_version(repo_path):
195 195 hooks_path = get_svn_hooks_path(repo_path)
196 196 _hook_file = os.path.join(hooks_path, 'pre-commit')
197 197 version = get_version_from_hook(_hook_file)
198 198 return version
199 199
200 200
201 201 def get_svn_post_hook_version(repo_path):
202 202 hooks_path = get_svn_hooks_path(repo_path)
203 203 _hook_file = os.path.join(hooks_path, 'post-commit')
204 204 version = get_version_from_hook(_hook_file)
205 205 return version
@@ -1,729 +1,729 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55
56 56 response_data = response.read()
57 57
58 58 try:
59 59 return json.loads(response_data)
60 60 except Exception:
61 61 log.exception('Failed to decode hook response json data. '
62 62 'response_code:%s, raw_data:%s',
63 63 response.status, response_data)
64 64 raise
65 65
66 66 def _serialize(self, hook_name, extras):
67 67 data = {
68 68 'method': hook_name,
69 69 'extras': extras
70 70 }
71 71 return json.dumps(data)
72 72
73 73
74 74 class HooksDummyClient(object):
75 75 def __init__(self, hooks_module):
76 76 self._hooks_module = importlib.import_module(hooks_module)
77 77
78 78 def __call__(self, hook_name, extras):
79 79 with self._hooks_module.Hooks() as hooks:
80 80 return getattr(hooks, hook_name)(extras)
81 81
82 82
83 83 class HooksShadowRepoClient(object):
84 84
85 85 def __call__(self, hook_name, extras):
86 86 return {'output': '', 'status': 0}
87 87
88 88
89 89 class RemoteMessageWriter(object):
90 90 """Writer base class."""
91 91 def write(self, message):
92 92 raise NotImplementedError()
93 93
94 94
95 95 class HgMessageWriter(RemoteMessageWriter):
96 96 """Writer that knows how to send messages to mercurial clients."""
97 97
98 98 def __init__(self, ui):
99 99 self.ui = ui
100 100
101 101 def write(self, message):
102 102 # TODO: Check why the quiet flag is set by default.
103 103 old = self.ui.quiet
104 104 self.ui.quiet = False
105 105 self.ui.status(message.encode('utf-8'))
106 106 self.ui.quiet = old
107 107
108 108
109 109 class GitMessageWriter(RemoteMessageWriter):
110 110 """Writer that knows how to send messages to git clients."""
111 111
112 112 def __init__(self, stdout=None):
113 113 self.stdout = stdout or sys.stdout
114 114
115 115 def write(self, message):
116 116 self.stdout.write(message.encode('utf-8'))
117 117
118 118
119 119 class SvnMessageWriter(RemoteMessageWriter):
120 120 """Writer that knows how to send messages to svn clients."""
121 121
122 122 def __init__(self, stderr=None):
123 123 # SVN needs data sent to stderr for back-to-client messaging
124 124 self.stderr = stderr or sys.stderr
125 125
126 126 def write(self, message):
127 127 self.stderr.write(message.encode('utf-8'))
128 128
129 129
130 130 def _handle_exception(result):
131 131 exception_class = result.get('exception')
132 132 exception_traceback = result.get('exception_traceback')
133 133
134 134 if exception_traceback:
135 135 log.error('Got traceback from remote call:%s', exception_traceback)
136 136
137 137 if exception_class == 'HTTPLockedRC':
138 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 139 elif exception_class == 'HTTPBranchProtected':
140 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 141 elif exception_class == 'RepositoryError':
142 142 raise exceptions.VcsException()(*result['exception_args'])
143 143 elif exception_class:
144 144 raise Exception('Got remote exception "%s" with args "%s"' %
145 145 (exception_class, result['exception_args']))
146 146
147 147
148 148 def _get_hooks_client(extras):
149 149 hooks_uri = extras.get('hooks_uri')
150 150 is_shadow_repo = extras.get('is_shadow_repo')
151 151 if hooks_uri:
152 152 return HooksHttpClient(extras['hooks_uri'])
153 153 elif is_shadow_repo:
154 154 return HooksShadowRepoClient()
155 155 else:
156 156 return HooksDummyClient(extras['hooks_module'])
157 157
158 158
159 159 def _call_hook(hook_name, extras, writer):
160 160 hooks_client = _get_hooks_client(extras)
161 161 log.debug('Hooks, using client:%s', hooks_client)
162 162 result = hooks_client(hook_name, extras)
163 163 log.debug('Hooks got result: %s', result)
164 164
165 165 _handle_exception(result)
166 166 writer.write(result['output'])
167 167
168 168 return result['status']
169 169
170 170
171 171 def _extras_from_ui(ui):
172 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 173 if not hook_data:
174 174 # maybe it's inside environ ?
175 175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 176 if env_hook_data:
177 177 hook_data = env_hook_data
178 178
179 179 extras = {}
180 180 if hook_data:
181 181 extras = json.loads(hook_data)
182 182 return extras
183 183
184 184
185 185 def _rev_range_hash(repo, node, check_heads=False):
186 186 from vcsserver.hgcompat import get_ctx
187 187
188 188 commits = []
189 189 revs = []
190 190 start = get_ctx(repo, node).rev()
191 191 end = len(repo)
192 192 for rev in range(start, end):
193 193 revs.append(rev)
194 194 ctx = get_ctx(repo, rev)
195 195 commit_id = mercurial.node.hex(ctx.node())
196 196 branch = ctx.branch()
197 197 commits.append((commit_id, branch))
198 198
199 199 parent_heads = []
200 200 if check_heads:
201 201 parent_heads = _check_heads(repo, start, end, revs)
202 202 return commits, parent_heads
203 203
204 204
205 205 def _check_heads(repo, start, end, commits):
206 206 from vcsserver.hgcompat import get_ctx
207 207 changelog = repo.changelog
208 208 parents = set()
209 209
210 210 for new_rev in commits:
211 211 for p in changelog.parentrevs(new_rev):
212 212 if p == mercurial.node.nullrev:
213 213 continue
214 214 if p < start:
215 215 parents.add(p)
216 216
217 217 for p in parents:
218 218 branch = get_ctx(repo, p).branch()
219 219 # The heads descending from that parent, on the same branch
220 220 parent_heads = set([p])
221 221 reachable = set([p])
222 222 for x in xrange(p + 1, end):
223 223 if get_ctx(repo, x).branch() != branch:
224 224 continue
225 225 for pp in changelog.parentrevs(x):
226 226 if pp in reachable:
227 227 reachable.add(x)
228 228 parent_heads.discard(pp)
229 229 parent_heads.add(x)
230 230 # More than one head? Suggest merging
231 231 if len(parent_heads) > 1:
232 232 return list(parent_heads)
233 233
234 234 return []
235 235
236 236
237 237 def _get_git_env():
238 238 env = {}
239 239 for k, v in os.environ.items():
240 240 if k.startswith('GIT'):
241 241 env[k] = v
242 242
243 243 # serialized version
244 244 return [(k, v) for k, v in env.items()]
245 245
246 246
247 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 248 env = {}
249 249 for k, v in os.environ.items():
250 250 if k.startswith('HG'):
251 251 env[k] = v
252 252
253 253 env['HG_NODE'] = old_rev
254 254 env['HG_NODE_LAST'] = new_rev
255 255 env['HG_TXNID'] = txnid
256 256 env['HG_PENDING'] = repo_path
257 257
258 258 return [(k, v) for k, v in env.items()]
259 259
260 260
261 261 def repo_size(ui, repo, **kwargs):
262 262 extras = _extras_from_ui(ui)
263 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264 264
265 265
266 266 def pre_pull(ui, repo, **kwargs):
267 267 extras = _extras_from_ui(ui)
268 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269 269
270 270
271 271 def pre_pull_ssh(ui, repo, **kwargs):
272 272 extras = _extras_from_ui(ui)
273 273 if extras and extras.get('SSH'):
274 274 return pre_pull(ui, repo, **kwargs)
275 275 return 0
276 276
277 277
278 278 def post_pull(ui, repo, **kwargs):
279 279 extras = _extras_from_ui(ui)
280 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281 281
282 282
283 283 def post_pull_ssh(ui, repo, **kwargs):
284 284 extras = _extras_from_ui(ui)
285 285 if extras and extras.get('SSH'):
286 286 return post_pull(ui, repo, **kwargs)
287 287 return 0
288 288
289 289
290 290 def pre_push(ui, repo, node=None, **kwargs):
291 291 """
292 292 Mercurial pre_push hook
293 293 """
294 294 extras = _extras_from_ui(ui)
295 295 detect_force_push = extras.get('detect_force_push')
296 296
297 297 rev_data = []
298 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 299 branches = collections.defaultdict(list)
300 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 301 for commit_id, branch in commits:
302 302 branches[branch].append(commit_id)
303 303
304 304 for branch, commits in branches.items():
305 305 old_rev = kwargs.get('node_last') or commits[0]
306 306 rev_data.append({
307 307 'total_commits': len(commits),
308 308 'old_rev': old_rev,
309 309 'new_rev': commits[-1],
310 310 'ref': '',
311 311 'type': 'branch',
312 312 'name': branch,
313 313 })
314 314
315 315 for push_ref in rev_data:
316 316 push_ref['multiple_heads'] = _heads
317 317
318 318 repo_path = os.path.join(
319 319 extras.get('repo_store', ''), extras.get('repository', ''))
320 320 push_ref['hg_env'] = _get_hg_env(
321 321 old_rev=push_ref['old_rev'],
322 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 323 repo_path=repo_path)
324 324
325 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 326 extras['commit_ids'] = rev_data
327 327
328 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329 329
330 330
331 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 332 extras = _extras_from_ui(ui)
333 333 if extras.get('SSH'):
334 334 return pre_push(ui, repo, node, **kwargs)
335 335
336 336 return 0
337 337
338 338
339 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 340 """
341 341 Mercurial pre_push hook for SSH
342 342 """
343 343 extras = _extras_from_ui(ui)
344 344 if extras.get('SSH'):
345 345 permission = extras['SSH_PERMISSIONS']
346 346
347 347 if 'repository.write' == permission or 'repository.admin' == permission:
348 348 return 0
349 349
350 350 # non-zero ret code
351 351 return 1
352 352
353 353 return 0
354 354
355 355
356 356 def post_push(ui, repo, node, **kwargs):
357 357 """
358 358 Mercurial post_push hook
359 359 """
360 360 extras = _extras_from_ui(ui)
361 361
362 362 commit_ids = []
363 363 branches = []
364 364 bookmarks = []
365 365 tags = []
366 366
367 367 commits, _heads = _rev_range_hash(repo, node)
368 368 for commit_id, branch in commits:
369 369 commit_ids.append(commit_id)
370 370 if branch not in branches:
371 371 branches.append(branch)
372 372
373 373 if hasattr(ui, '_rc_pushkey_branches'):
374 374 bookmarks = ui._rc_pushkey_branches
375 375
376 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 377 extras['commit_ids'] = commit_ids
378 378 extras['new_refs'] = {
379 379 'branches': branches,
380 380 'bookmarks': bookmarks,
381 381 'tags': tags
382 382 }
383 383
384 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385 385
386 386
387 387 def post_push_ssh(ui, repo, node, **kwargs):
388 388 """
389 389 Mercurial post_push hook for SSH
390 390 """
391 391 if _extras_from_ui(ui).get('SSH'):
392 392 return post_push(ui, repo, node, **kwargs)
393 393 return 0
394 394
395 395
396 396 def key_push(ui, repo, **kwargs):
397 397 from vcsserver.hgcompat import get_ctx
398 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 399 # store new bookmarks in our UI object propagated later to post_push
400 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 401 return
402 402
403 403
404 404 # backward compat
405 405 log_pull_action = post_pull
406 406
407 407 # backward compat
408 408 log_push_action = post_push
409 409
410 410
411 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 412 """
413 413 Old hook name: keep here for backward compatibility.
414 414
415 415 This is only required when the installed git hooks are not upgraded.
416 416 """
417 417 pass
418 418
419 419
420 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 421 """
422 422 Old hook name: keep here for backward compatibility.
423 423
424 424 This is only required when the installed git hooks are not upgraded.
425 425 """
426 426 pass
427 427
428 428
429 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430 430
431 431
432 432 def git_pre_pull(extras):
433 433 """
434 434 Pre pull hook.
435 435
436 436 :param extras: dictionary containing the keys defined in simplevcs
437 437 :type extras: dict
438 438
439 439 :return: status code of the hook. 0 for success.
440 440 :rtype: int
441 441 """
442 442 if 'pull' not in extras['hooks']:
443 443 return HookResponse(0, '')
444 444
445 445 stdout = io.BytesIO()
446 446 try:
447 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 448 except Exception as error:
449 449 status = 128
450 450 stdout.write('ERROR: %s\n' % str(error))
451 451
452 452 return HookResponse(status, stdout.getvalue())
453 453
454 454
455 455 def git_post_pull(extras):
456 456 """
457 457 Post pull hook.
458 458
459 459 :param extras: dictionary containing the keys defined in simplevcs
460 460 :type extras: dict
461 461
462 462 :return: status code of the hook. 0 for success.
463 463 :rtype: int
464 464 """
465 465 if 'pull' not in extras['hooks']:
466 466 return HookResponse(0, '')
467 467
468 468 stdout = io.BytesIO()
469 469 try:
470 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 471 except Exception as error:
472 472 status = 128
473 473 stdout.write('ERROR: %s\n' % error)
474 474
475 475 return HookResponse(status, stdout.getvalue())
476 476
477 477
478 478 def _parse_git_ref_lines(revision_lines):
479 479 rev_data = []
480 480 for revision_line in revision_lines or []:
481 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 482 ref_data = ref.split('/', 2)
483 483 if ref_data[1] in ('tags', 'heads'):
484 484 rev_data.append({
485 485 # NOTE(marcink):
486 486 # we're unable to tell total_commits for git at this point
487 487 # but we set the variable for consistency with GIT
488 488 'total_commits': -1,
489 489 'old_rev': old_rev,
490 490 'new_rev': new_rev,
491 491 'ref': ref,
492 492 'type': ref_data[1],
493 493 'name': ref_data[2],
494 494 })
495 495 return rev_data
496 496
497 497
498 498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 499 """
500 500 Pre push hook.
501 501
502 502 :param extras: dictionary containing the keys defined in simplevcs
503 503 :type extras: dict
504 504
505 505 :return: status code of the hook. 0 for success.
506 506 :rtype: int
507 507 """
508 508 extras = json.loads(env['RC_SCM_DATA'])
509 509 rev_data = _parse_git_ref_lines(revision_lines)
510 510 if 'push' not in extras['hooks']:
511 511 return 0
512 512 empty_commit_id = '0' * 40
513 513
514 514 detect_force_push = extras.get('detect_force_push')
515 515
516 516 for push_ref in rev_data:
517 517 # store our git-env which holds the temp store
518 518 push_ref['git_env'] = _get_git_env()
519 519 push_ref['pruned_sha'] = ''
520 520 if not detect_force_push:
521 521 # don't check for forced-push when we don't need to
522 522 continue
523 523
524 524 type_ = push_ref['type']
525 525 new_branch = push_ref['old_rev'] == empty_commit_id
526 526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 527 if type_ == 'heads' and not (new_branch or delete_branch):
528 528 old_rev = push_ref['old_rev']
529 529 new_rev = push_ref['new_rev']
530 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 531 stdout, stderr = subprocessio.run_command(
532 532 cmd, env=os.environ.copy())
533 533 # means we're having some non-reachable objects, this forced push was used
534 534 if stdout:
535 535 push_ref['pruned_sha'] = stdout.splitlines()
536 536
537 537 extras['hook_type'] = 'pre_receive'
538 538 extras['commit_ids'] = rev_data
539 539 return _call_hook('pre_push', extras, GitMessageWriter())
540 540
541 541
542 542 def git_post_receive(unused_repo_path, revision_lines, env):
543 543 """
544 544 Post push hook.
545 545
546 546 :param extras: dictionary containing the keys defined in simplevcs
547 547 :type extras: dict
548 548
549 549 :return: status code of the hook. 0 for success.
550 550 :rtype: int
551 551 """
552 552 extras = json.loads(env['RC_SCM_DATA'])
553 553 if 'push' not in extras['hooks']:
554 554 return 0
555 555
556 556 rev_data = _parse_git_ref_lines(revision_lines)
557 557
558 558 git_revs = []
559 559
560 560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 561 # subcommand sets the PATH environment variable so that it point to the
562 562 # correct version of the git executable.
563 563 empty_commit_id = '0' * 40
564 564 branches = []
565 565 tags = []
566 566 for push_ref in rev_data:
567 567 type_ = push_ref['type']
568 568
569 569 if type_ == 'heads':
570 570 if push_ref['old_rev'] == empty_commit_id:
571 571 # starting new branch case
572 572 if push_ref['name'] not in branches:
573 573 branches.append(push_ref['name'])
574 574
575 575 # Fix up head revision if needed
576 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 577 try:
578 578 subprocessio.run_command(cmd, env=os.environ.copy())
579 579 except Exception:
580 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 581 'refs/heads/%s' % push_ref['name']]
582 582 print("Setting default branch to %s" % push_ref['name'])
583 583 subprocessio.run_command(cmd, env=os.environ.copy())
584 584
585 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 586 '--format=%(refname)', 'refs/heads/*']
587 587 stdout, stderr = subprocessio.run_command(
588 588 cmd, env=os.environ.copy())
589 589 heads = stdout
590 590 heads = heads.replace(push_ref['ref'], '')
591 591 heads = ' '.join(head for head
592 592 in heads.splitlines() if head) or '.'
593 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 595 '--not', heads]
596 596 stdout, stderr = subprocessio.run_command(
597 597 cmd, env=os.environ.copy())
598 598 git_revs.extend(stdout.splitlines())
599 599 elif push_ref['new_rev'] == empty_commit_id:
600 600 # delete branch case
601 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 602 else:
603 603 if push_ref['name'] not in branches:
604 604 branches.append(push_ref['name'])
605 605
606 606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 607 '{old_rev}..{new_rev}'.format(**push_ref),
608 608 '--reverse', '--pretty=format:%H']
609 609 stdout, stderr = subprocessio.run_command(
610 610 cmd, env=os.environ.copy())
611 611 git_revs.extend(stdout.splitlines())
612 612 elif type_ == 'tags':
613 613 if push_ref['name'] not in tags:
614 614 tags.append(push_ref['name'])
615 615 git_revs.append('tag=>%s' % push_ref['name'])
616 616
617 617 extras['hook_type'] = 'post_receive'
618 618 extras['commit_ids'] = git_revs
619 619 extras['new_refs'] = {
620 620 'branches': branches,
621 621 'bookmarks': [],
622 622 'tags': tags,
623 623 }
624 624
625 625 if 'repo_size' in extras['hooks']:
626 626 try:
627 627 _call_hook('repo_size', extras, GitMessageWriter())
628 628 except:
629 629 pass
630 630
631 631 return _call_hook('post_push', extras, GitMessageWriter())
632 632
633 633
634 634 def _get_extras_from_txn_id(path, txn_id):
635 635 extras = {}
636 636 try:
637 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 638 '-t', txn_id,
639 639 '--revprop', path, 'rc-scm-extras']
640 640 stdout, stderr = subprocessio.run_command(
641 641 cmd, env=os.environ.copy())
642 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 643 except Exception:
644 644 log.exception('Failed to extract extras info from txn_id')
645 645
646 646 return extras
647 647
648 648
649 649 def _get_extras_from_commit_id(commit_id, path):
650 650 extras = {}
651 651 try:
652 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 653 '-r', commit_id,
654 654 '--revprop', path, 'rc-scm-extras']
655 655 stdout, stderr = subprocessio.run_command(
656 656 cmd, env=os.environ.copy())
657 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 658 except Exception:
659 659 log.exception('Failed to extract extras info from commit_id')
660 660
661 661 return extras
662 662
663 663
664 664 def svn_pre_commit(repo_path, commit_data, env):
665 665 path, txn_id = commit_data
666 666 branches = []
667 667 tags = []
668 668
669 669 if env.get('RC_SCM_DATA'):
670 670 extras = json.loads(env['RC_SCM_DATA'])
671 671 else:
672 672 # fallback method to read from TXN-ID stored data
673 673 extras = _get_extras_from_txn_id(path, txn_id)
674 674 if not extras:
675 675 return 0
676 676
677 677 extras['hook_type'] = 'pre_commit'
678 678 extras['commit_ids'] = [txn_id]
679 679 extras['txn_id'] = txn_id
680 680 extras['new_refs'] = {
681 681 'total_commits': 1,
682 682 'branches': branches,
683 683 'bookmarks': [],
684 684 'tags': tags,
685 685 }
686 686
687 687 return _call_hook('pre_push', extras, SvnMessageWriter())
688 688
689 689
690 690 def svn_post_commit(repo_path, commit_data, env):
691 691 """
692 692 commit_data is path, rev, txn_id
693 693 """
694 694 if len(commit_data) == 3:
695 695 path, commit_id, txn_id = commit_data
696 696 elif len(commit_data) == 2:
697 697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 698 'Some functionality might be limited')
699 699 path, commit_id = commit_data
700 700 txn_id = None
701 701
702 702 branches = []
703 703 tags = []
704 704
705 705 if env.get('RC_SCM_DATA'):
706 706 extras = json.loads(env['RC_SCM_DATA'])
707 707 else:
708 708 # fallback method to read from TXN-ID stored data
709 709 extras = _get_extras_from_commit_id(commit_id, path)
710 710 if not extras:
711 711 return 0
712 712
713 713 extras['hook_type'] = 'post_commit'
714 714 extras['commit_ids'] = [commit_id]
715 715 extras['txn_id'] = txn_id
716 716 extras['new_refs'] = {
717 717 'branches': branches,
718 718 'bookmarks': [],
719 719 'tags': tags,
720 720 'total_commits': 1,
721 721 }
722 722
723 723 if 'repo_size' in extras['hooks']:
724 724 try:
725 725 _call_hook('repo_size', extras, SvnMessageWriter())
726 726 except Exception:
727 727 pass
728 728
729 729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,688 +1,688 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 28 from cStringIO import StringIO
29 29
30 30 import simplejson as json
31 31 import msgpack
32 32 from pyramid.config import Configurator
33 33 from pyramid.settings import asbool, aslist
34 34 from pyramid.wsgi import wsgiapp
35 35 from pyramid.compat import configparser
36 36 from pyramid.response import Response
37 37
38 38 from vcsserver.utils import safe_int
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 44
45 45 try:
46 46 locale.setlocale(locale.LC_ALL, '')
47 47 except locale.Error as e:
48 48 log.error(
49 49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 50 os.environ['LC_ALL'] = 'C'
51 51
52 52 import vcsserver
53 53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 56 from vcsserver.echo_stub.echo_app import EchoApp
57 57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 58 from vcsserver.lib.exc_tracking import store_exception
59 59 from vcsserver.server import VcsServer
60 60
61 61 try:
62 62 from vcsserver.git import GitFactory, GitRemote
63 63 except ImportError:
64 64 GitFactory = None
65 65 GitRemote = None
66 66
67 67 try:
68 68 from vcsserver.hg import MercurialFactory, HgRemote
69 69 except ImportError:
70 70 MercurialFactory = None
71 71 HgRemote = None
72 72
73 73 try:
74 74 from vcsserver.svn import SubversionFactory, SvnRemote
75 75 except ImportError:
76 76 SubversionFactory = None
77 77 SvnRemote = None
78 78
79 79
80 80 def _is_request_chunked(environ):
81 81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 82 return stream
83 83
84 84
85 85 def _int_setting(settings, name, default):
86 86 settings[name] = int(settings.get(name, default))
87 87 return settings[name]
88 88
89 89
90 90 def _bool_setting(settings, name, default):
91 91 input_val = settings.get(name, default)
92 92 if isinstance(input_val, unicode):
93 93 input_val = input_val.encode('utf8')
94 94 settings[name] = asbool(input_val)
95 95 return settings[name]
96 96
97 97
98 98 def _list_setting(settings, name, default):
99 99 raw_value = settings.get(name, default)
100 100
101 101 # Otherwise we assume it uses pyramids space/newline separation.
102 102 settings[name] = aslist(raw_value)
103 103 return settings[name]
104 104
105 105
106 106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 107 value = settings.get(name, default)
108 108
109 109 if default_when_empty and not value:
110 110 # use default value when value is empty
111 111 value = default
112 112
113 113 if lower:
114 114 value = value.lower()
115 115 settings[name] = value
116 116 return settings[name]
117 117
118 118
119 119 class VCS(object):
120 120 def __init__(self, locale_conf=None, cache_config=None):
121 121 self.locale = locale_conf
122 122 self.cache_config = cache_config
123 123 self._configure_locale()
124 124
125 125 if GitFactory and GitRemote:
126 126 git_factory = GitFactory()
127 127 self._git_remote = GitRemote(git_factory)
128 128 else:
129 129 log.info("Git client import failed")
130 130
131 131 if MercurialFactory and HgRemote:
132 132 hg_factory = MercurialFactory()
133 133 self._hg_remote = HgRemote(hg_factory)
134 134 else:
135 135 log.info("Mercurial client import failed")
136 136
137 137 if SubversionFactory and SvnRemote:
138 138 svn_factory = SubversionFactory()
139 139
140 140 # hg factory is used for svn url validation
141 141 hg_factory = MercurialFactory()
142 142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
143 143 else:
144 144 log.info("Subversion client import failed")
145 145
146 146 self._vcsserver = VcsServer()
147 147
148 148 def _configure_locale(self):
149 149 if self.locale:
150 150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
151 151 else:
152 152 log.info(
153 153 'Configuring locale subsystem based on environment variables')
154 154 try:
155 155 # If self.locale is the empty string, then the locale
156 156 # module will use the environment variables. See the
157 157 # documentation of the package `locale`.
158 158 locale.setlocale(locale.LC_ALL, self.locale)
159 159
160 160 language_code, encoding = locale.getlocale()
161 161 log.info(
162 162 'Locale set to language code "%s" with encoding "%s".',
163 163 language_code, encoding)
164 164 except locale.Error:
165 165 log.exception(
166 166 'Cannot set locale, not configuring the locale system')
167 167
168 168
169 169 class WsgiProxy(object):
170 170 def __init__(self, wsgi):
171 171 self.wsgi = wsgi
172 172
173 173 def __call__(self, environ, start_response):
174 174 input_data = environ['wsgi.input'].read()
175 175 input_data = msgpack.unpackb(input_data)
176 176
177 177 error = None
178 178 try:
179 179 data, status, headers = self.wsgi.handle(
180 180 input_data['environment'], input_data['input_data'],
181 181 *input_data['args'], **input_data['kwargs'])
182 182 except Exception as e:
183 183 data, status, headers = [], None, None
184 184 error = {
185 185 'message': str(e),
186 186 '_vcs_kind': getattr(e, '_vcs_kind', None)
187 187 }
188 188
189 189 start_response(200, {})
190 190 return self._iterator(error, status, headers, data)
191 191
192 192 def _iterator(self, error, status, headers, data):
193 193 initial_data = [
194 194 error,
195 195 status,
196 196 headers,
197 197 ]
198 198
199 199 for d in chain(initial_data, data):
200 200 yield msgpack.packb(d)
201 201
202 202
203 203 def not_found(request):
204 204 return {'status': '404 NOT FOUND'}
205 205
206 206
207 207 class VCSViewPredicate(object):
208 208 def __init__(self, val, config):
209 209 self.remotes = val
210 210
211 211 def text(self):
212 212 return 'vcs view method = %s' % (self.remotes.keys(),)
213 213
214 214 phash = text
215 215
216 216 def __call__(self, context, request):
217 217 """
218 218 View predicate that returns true if given backend is supported by
219 219 defined remotes.
220 220 """
221 221 backend = request.matchdict.get('backend')
222 222 return backend in self.remotes
223 223
224 224
225 225 class HTTPApplication(object):
226 226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
227 227
228 228 remote_wsgi = remote_wsgi
229 229 _use_echo_app = False
230 230
231 231 def __init__(self, settings=None, global_config=None):
232 232 self._sanitize_settings_and_apply_defaults(settings)
233 233
234 234 self.config = Configurator(settings=settings)
235 235 self.global_config = global_config
236 236 self.config.include('vcsserver.lib.rc_cache')
237 237
238 238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 240 self._remotes = {
241 241 'hg': vcs._hg_remote,
242 242 'git': vcs._git_remote,
243 243 'svn': vcs._svn_remote,
244 244 'server': vcs._vcsserver,
245 245 }
246 246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 247 self._use_echo_app = True
248 248 log.warning("Using EchoApp for VCS operations.")
249 249 self.remote_wsgi = remote_wsgi_stub
250 250
251 251 self._configure_settings(global_config, settings)
252 252 self._configure()
253 253
254 254 def _configure_settings(self, global_config, app_settings):
255 255 """
256 256 Configure the settings module.
257 257 """
258 258 settings_merged = global_config.copy()
259 259 settings_merged.update(app_settings)
260 260
261 261 git_path = app_settings.get('git_path', None)
262 262 if git_path:
263 263 settings.GIT_EXECUTABLE = git_path
264 264 binary_dir = app_settings.get('core.binary_dir', None)
265 265 if binary_dir:
266 266 settings.BINARY_DIR = binary_dir
267 267
268 268 # Store the settings to make them available to other modules.
269 269 vcsserver.PYRAMID_SETTINGS = settings_merged
270 270 vcsserver.CONFIG = settings_merged
271 271
272 272 def _sanitize_settings_and_apply_defaults(self, settings):
273 273 temp_store = tempfile.gettempdir()
274 274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
275 275
276 276 # save default, cache dir, and use it for all backends later.
277 277 default_cache_dir = _string_setting(
278 278 settings,
279 279 'cache_dir',
280 280 default_cache_dir, lower=False, default_when_empty=True)
281 281
282 282 # ensure we have our dir created
283 283 if not os.path.isdir(default_cache_dir):
284 284 os.makedirs(default_cache_dir, mode=0o755)
285 285
286 286 # exception store cache
287 287 _string_setting(
288 288 settings,
289 289 'exception_tracker.store_path',
290 290 temp_store, lower=False, default_when_empty=True)
291 291
292 292 # repo_object cache
293 293 _string_setting(
294 294 settings,
295 295 'rc_cache.repo_object.backend',
296 296 'dogpile.cache.rc.file_namespace', lower=False)
297 297 _int_setting(
298 298 settings,
299 299 'rc_cache.repo_object.expiration_time',
300 300 30 * 24 * 60 * 60)
301 301 _string_setting(
302 302 settings,
303 303 'rc_cache.repo_object.arguments.filename',
304 304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
305 305
306 306 def _configure(self):
307 307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
308 308
309 309 self.config.add_route('service', '/_service')
310 310 self.config.add_route('status', '/status')
311 311 self.config.add_route('hg_proxy', '/proxy/hg')
312 312 self.config.add_route('git_proxy', '/proxy/git')
313 313
314 314 # rpc methods
315 315 self.config.add_route('vcs', '/{backend}')
316 316
317 317 # streaming rpc remote methods
318 318 self.config.add_route('vcs_stream', '/{backend}/stream')
319 319
320 320 # vcs operations clone/push as streaming
321 321 self.config.add_route('stream_git', '/stream/git/*repo_name')
322 322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
323 323
324 324 self.config.add_view(self.status_view, route_name='status', renderer='json')
325 325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
326 326
327 327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
328 328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
329 329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
330 330 vcs_view=self._remotes)
331 331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 332 vcs_view=self._remotes)
333 333
334 334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
335 335 self.config.add_view(self.git_stream(), route_name='stream_git')
336 336
337 337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
338 338
339 339 self.config.add_notfound_view(not_found, renderer='json')
340 340
341 341 self.config.add_view(self.handle_vcs_exception, context=Exception)
342 342
343 343 self.config.add_tween(
344 344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
345 345 )
346 346 self.config.add_request_method(
347 347 'vcsserver.lib.request_counter.get_request_counter',
348 348 'request_count')
349 349
350 350 def wsgi_app(self):
351 351 return self.config.make_wsgi_app()
352 352
353 353 def _vcs_view_params(self, request):
354 354 remote = self._remotes[request.matchdict['backend']]
355 355 payload = msgpack.unpackb(request.body, use_list=True)
356 356 method = payload.get('method')
357 357 params = payload['params']
358 358 wire = params.get('wire')
359 359 args = params.get('args')
360 360 kwargs = params.get('kwargs')
361 361 context_uid = None
362 362
363 363 if wire:
364 364 try:
365 365 wire['context'] = context_uid = uuid.UUID(wire['context'])
366 366 except KeyError:
367 367 pass
368 368 args.insert(0, wire)
369 369 repo_state_uid = wire.get('repo_state_uid') if wire else None
370 370
371 371 # NOTE(marcink): trading complexity for slight performance
372 372 if log.isEnabledFor(logging.DEBUG):
373 373 no_args_methods = [
374 374 'archive_repo'
375 375 ]
376 376 if method in no_args_methods:
377 377 call_args = ''
378 378 else:
379 379 call_args = args[1:]
380 380
381 381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 382 method, call_args, kwargs, context_uid, repo_state_uid)
383 383
384 384 return payload, remote, method, args, kwargs
385 385
386 386 def vcs_view(self, request):
387 387
388 388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 389 payload_id = payload.get('id')
390 390
391 391 try:
392 392 resp = getattr(remote, method)(*args, **kwargs)
393 393 except Exception as e:
394 394 exc_info = list(sys.exc_info())
395 395 exc_type, exc_value, exc_traceback = exc_info
396 396
397 397 org_exc = getattr(e, '_org_exc', None)
398 398 org_exc_name = None
399 399 org_exc_tb = ''
400 400 if org_exc:
401 401 org_exc_name = org_exc.__class__.__name__
402 402 org_exc_tb = getattr(e, '_org_exc_tb', '')
403 403 # replace our "faked" exception with our org
404 404 exc_info[0] = org_exc.__class__
405 405 exc_info[1] = org_exc
406 406
407 407 should_store_exc = True
408 408 if org_exc:
409 409 def get_exc_fqn(_exc_obj):
410 410 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
411 411 return module_name + '.' + org_exc_name
412 412
413 413 exc_fqn = get_exc_fqn(org_exc)
414 414
415 415 if exc_fqn in ['mercurial.error.RepoLookupError',
416 416 'vcsserver.exceptions.RefNotFoundException']:
417 417 should_store_exc = False
418 418
419 419 if should_store_exc:
420 420 store_exception(id(exc_info), exc_info)
421 421
422 422 tb_info = ''.join(
423 423 traceback.format_exception(exc_type, exc_value, exc_traceback))
424 424
425 425 type_ = e.__class__.__name__
426 426 if type_ not in self.ALLOWED_EXCEPTIONS:
427 427 type_ = None
428 428
429 429 resp = {
430 430 'id': payload_id,
431 431 'error': {
432 432 'message': e.message,
433 433 'traceback': tb_info,
434 434 'org_exc': org_exc_name,
435 435 'org_exc_tb': org_exc_tb,
436 436 'type': type_
437 437 }
438 438 }
439 439 try:
440 440 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
441 441 except AttributeError:
442 442 pass
443 443 else:
444 444 resp = {
445 445 'id': payload_id,
446 446 'result': resp
447 447 }
448 448
449 449 return resp
450 450
451 451 def vcs_stream_view(self, request):
452 452 payload, remote, method, args, kwargs = self._vcs_view_params(request)
453 453 # this method has a stream: marker we remove it here
454 454 method = method.split('stream:')[-1]
455 455 chunk_size = safe_int(payload.get('chunk_size')) or 4096
456 456
457 457 try:
458 458 resp = getattr(remote, method)(*args, **kwargs)
459 459 except Exception as e:
460 460 raise
461 461
462 462 def get_chunked_data(method_resp):
463 463 stream = StringIO(method_resp)
464 464 while 1:
465 465 chunk = stream.read(chunk_size)
466 466 if not chunk:
467 467 break
468 468 yield chunk
469 469
470 470 response = Response(app_iter=get_chunked_data(resp))
471 471 response.content_type = 'application/octet-stream'
472 472
473 473 return response
474 474
475 475 def status_view(self, request):
476 476 import vcsserver
477 477 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
478 478 'pid': os.getpid()}
479 479
480 480 def service_view(self, request):
481 481 import vcsserver
482 482
483 483 payload = msgpack.unpackb(request.body, use_list=True)
484 484 server_config, app_config = {}, {}
485 485
486 486 try:
487 487 path = self.global_config['__file__']
488 488 config = configparser.RawConfigParser()
489 489
490 490 config.read(path)
491 491
492 492 if config.has_section('server:main'):
493 493 server_config = dict(config.items('server:main'))
494 494 if config.has_section('app:main'):
495 495 app_config = dict(config.items('app:main'))
496 496
497 497 except Exception:
498 498 log.exception('Failed to read .ini file for display')
499 499
500 500 environ = os.environ.items()
501 501
502 502 resp = {
503 503 'id': payload.get('id'),
504 504 'result': dict(
505 505 version=vcsserver.__version__,
506 506 config=server_config,
507 507 app_config=app_config,
508 508 environ=environ,
509 509 payload=payload,
510 510 )
511 511 }
512 512 return resp
513 513
514 514 def _msgpack_renderer_factory(self, info):
515 515 def _render(value, system):
516 516 request = system.get('request')
517 517 if request is not None:
518 518 response = request.response
519 519 ct = response.content_type
520 520 if ct == response.default_content_type:
521 521 response.content_type = 'application/x-msgpack'
522 522 return msgpack.packb(value)
523 523 return _render
524 524
525 525 def set_env_from_config(self, environ, config):
526 526 dict_conf = {}
527 527 try:
528 528 for elem in config:
529 529 if elem[0] == 'rhodecode':
530 530 dict_conf = json.loads(elem[2])
531 531 break
532 532 except Exception:
533 533 log.exception('Failed to fetch SCM CONFIG')
534 534 return
535 535
536 536 username = dict_conf.get('username')
537 537 if username:
538 538 environ['REMOTE_USER'] = username
539 539 # mercurial specific, some extension api rely on this
540 540 environ['HGUSER'] = username
541 541
542 542 ip = dict_conf.get('ip')
543 543 if ip:
544 544 environ['REMOTE_HOST'] = ip
545 545
546 546 if _is_request_chunked(environ):
547 547 # set the compatibility flag for webob
548 548 environ['wsgi.input_terminated'] = True
549 549
550 550 def hg_proxy(self):
551 551 @wsgiapp
552 552 def _hg_proxy(environ, start_response):
553 553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
554 554 return app(environ, start_response)
555 555 return _hg_proxy
556 556
557 557 def git_proxy(self):
558 558 @wsgiapp
559 559 def _git_proxy(environ, start_response):
560 560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
561 561 return app(environ, start_response)
562 562 return _git_proxy
563 563
564 564 def hg_stream(self):
565 565 if self._use_echo_app:
566 566 @wsgiapp
567 567 def _hg_stream(environ, start_response):
568 568 app = EchoApp('fake_path', 'fake_name', None)
569 569 return app(environ, start_response)
570 570 return _hg_stream
571 571 else:
572 572 @wsgiapp
573 573 def _hg_stream(environ, start_response):
574 574 log.debug('http-app: handling hg stream')
575 575 repo_path = environ['HTTP_X_RC_REPO_PATH']
576 576 repo_name = environ['HTTP_X_RC_REPO_NAME']
577 577 packed_config = base64.b64decode(
578 578 environ['HTTP_X_RC_REPO_CONFIG'])
579 579 config = msgpack.unpackb(packed_config)
580 580 app = scm_app.create_hg_wsgi_app(
581 581 repo_path, repo_name, config)
582 582
583 583 # Consistent path information for hgweb
584 584 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
585 585 environ['REPO_NAME'] = repo_name
586 586 self.set_env_from_config(environ, config)
587 587
588 588 log.debug('http-app: starting app handler '
589 589 'with %s and process request', app)
590 590 return app(environ, ResponseFilter(start_response))
591 591 return _hg_stream
592 592
593 593 def git_stream(self):
594 594 if self._use_echo_app:
595 595 @wsgiapp
596 596 def _git_stream(environ, start_response):
597 597 app = EchoApp('fake_path', 'fake_name', None)
598 598 return app(environ, start_response)
599 599 return _git_stream
600 600 else:
601 601 @wsgiapp
602 602 def _git_stream(environ, start_response):
603 603 log.debug('http-app: handling git stream')
604 604 repo_path = environ['HTTP_X_RC_REPO_PATH']
605 605 repo_name = environ['HTTP_X_RC_REPO_NAME']
606 606 packed_config = base64.b64decode(
607 607 environ['HTTP_X_RC_REPO_CONFIG'])
608 608 config = msgpack.unpackb(packed_config)
609 609
610 610 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
611 611 self.set_env_from_config(environ, config)
612 612
613 613 content_type = environ.get('CONTENT_TYPE', '')
614 614
615 615 path = environ['PATH_INFO']
616 616 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
617 617 log.debug(
618 618 'LFS: Detecting if request `%s` is LFS server path based '
619 619 'on content type:`%s`, is_lfs:%s',
620 620 path, content_type, is_lfs_request)
621 621
622 622 if not is_lfs_request:
623 623 # fallback detection by path
624 624 if GIT_LFS_PROTO_PAT.match(path):
625 625 is_lfs_request = True
626 626 log.debug(
627 627 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
628 628 path, is_lfs_request)
629 629
630 630 if is_lfs_request:
631 631 app = scm_app.create_git_lfs_wsgi_app(
632 632 repo_path, repo_name, config)
633 633 else:
634 634 app = scm_app.create_git_wsgi_app(
635 635 repo_path, repo_name, config)
636 636
637 637 log.debug('http-app: starting app handler '
638 638 'with %s and process request', app)
639 639
640 640 return app(environ, start_response)
641 641
642 642 return _git_stream
643 643
644 644 def handle_vcs_exception(self, exception, request):
645 645 _vcs_kind = getattr(exception, '_vcs_kind', '')
646 646 if _vcs_kind == 'repo_locked':
647 647 # Get custom repo-locked status code if present.
648 648 status_code = request.headers.get('X-RC-Locked-Status-Code')
649 649 return HTTPRepoLocked(
650 650 title=exception.message, status_code=status_code)
651 651
652 652 elif _vcs_kind == 'repo_branch_protected':
653 653 # Get custom repo-branch-protected status code if present.
654 654 return HTTPRepoBranchProtected(title=exception.message)
655 655
656 656 exc_info = request.exc_info
657 657 store_exception(id(exc_info), exc_info)
658 658
659 659 traceback_info = 'unavailable'
660 660 if request.exc_info:
661 661 exc_type, exc_value, exc_tb = request.exc_info
662 662 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
663 663
664 664 log.error(
665 665 'error occurred handling this request for path: %s, \n tb: %s',
666 666 request.path, traceback_info)
667 667 raise exception
668 668
669 669
670 670 class ResponseFilter(object):
671 671
672 672 def __init__(self, start_response):
673 673 self._start_response = start_response
674 674
675 675 def __call__(self, status, response_headers, exc_info=None):
676 676 headers = tuple(
677 677 (h, v) for h, v in response_headers
678 678 if not wsgiref.util.is_hop_by_hop(h))
679 679 return self._start_response(status, headers, exc_info)
680 680
681 681
682 682 def main(global_config, **settings):
683 683 if MercurialFactory:
684 684 hgpatches.patch_largefiles_capabilities()
685 685 hgpatches.patch_subrepo_type_mapping()
686 686
687 687 app = HTTPApplication(settings=settings, global_config=global_config)
688 688 return app.wsgi_app()
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,169 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import os
22 22 import time
23 23 import datetime
24 24 import msgpack
25 25 import logging
26 26 import traceback
27 27 import tempfile
28 28
29 29 from pyramid import compat
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
34 34 global_prefix = 'vcsserver'
35 35 exc_store_dir_name = 'rc_exception_store_v1'
36 36
37 37
38 38 def exc_serialize(exc_id, tb, exc_type):
39 39
40 40 data = {
41 41 'version': 'v1',
42 42 'exc_id': exc_id,
43 43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 44 'exc_timestamp': repr(time.time()),
45 45 'exc_message': tb,
46 46 'exc_type': exc_type,
47 47 }
48 48 return msgpack.packb(data), data
49 49
50 50
51 51 def exc_unserialize(tb):
52 52 return msgpack.unpackb(tb)
53 53
54 54
55 55 def get_exc_store():
56 56 """
57 57 Get and create exception store if it's not existing
58 58 """
59 59 import vcsserver as app
60 60
61 61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
62 62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
63 63
64 64 _exc_store_path = os.path.abspath(_exc_store_path)
65 65 if not os.path.isdir(_exc_store_path):
66 66 os.makedirs(_exc_store_path)
67 67 log.debug('Initializing exceptions store at %s', _exc_store_path)
68 68 return _exc_store_path
69 69
70 70
71 71 def _store_exception(exc_id, exc_info, prefix):
72 72 exc_type, exc_value, exc_traceback = exc_info
73 73
74 74 tb = ''.join(traceback.format_exception(
75 75 exc_type, exc_value, exc_traceback, None))
76 76
77 77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78 78
79 79 if detailed_tb:
80 80 if isinstance(detailed_tb, compat.string_types):
81 81 remote_tb = [detailed_tb]
82 82
83 83 tb += (
84 84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 85 '{}\n'
86 86 '+++ END SOURCE EXCEPTION +++\n'
87 87 ''.format('\n'.join(remote_tb))
88 88 )
89 89
90 90 # Avoid that remote_tb also appears in the frame
91 91 del remote_tb
92 92
93 93 exc_type_name = exc_type.__name__
94 94 exc_store_path = get_exc_store()
95 95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
96 96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
97 97 if not os.path.isdir(exc_store_path):
98 98 os.makedirs(exc_store_path)
99 99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
100 100 with open(stored_exc_path, 'wb') as f:
101 101 f.write(exc_data)
102 102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
103 103
104 104
105 105 def store_exception(exc_id, exc_info, prefix=global_prefix):
106 106 """
107 107 Example usage::
108 108
109 109 exc_info = sys.exc_info()
110 110 store_exception(id(exc_info), exc_info)
111 111 """
112 112
113 113 try:
114 114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
115 115 except Exception:
116 116 log.exception('Failed to store exception `%s` information', exc_id)
117 117 # there's no way this can fail, it will crash server badly if it does.
118 118 pass
119 119
120 120
121 121 def _find_exc_file(exc_id, prefix=global_prefix):
122 122 exc_store_path = get_exc_store()
123 123 if prefix:
124 124 exc_id = '{}_{}'.format(exc_id, prefix)
125 125 else:
126 126 # search without a prefix
127 127 exc_id = '{}'.format(exc_id)
128 128
129 129 # we need to search the store for such start pattern as above
130 130 for fname in os.listdir(exc_store_path):
131 131 if fname.startswith(exc_id):
132 132 exc_id = os.path.join(exc_store_path, fname)
133 133 break
134 134 continue
135 135 else:
136 136 exc_id = None
137 137
138 138 return exc_id
139 139
140 140
141 141 def _read_exception(exc_id, prefix):
142 142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
143 143 if exc_id_file_path:
144 144 with open(exc_id_file_path, 'rb') as f:
145 145 return exc_unserialize(f.read())
146 146 else:
147 147 log.debug('Exception File `%s` not found', exc_id_file_path)
148 148 return None
149 149
150 150
151 151 def read_exception(exc_id, prefix=global_prefix):
152 152 try:
153 153 return _read_exception(exc_id=exc_id, prefix=prefix)
154 154 except Exception:
155 155 log.exception('Failed to read exception `%s` information', exc_id)
156 156 # there's no way this can fail, it will crash server badly if it does.
157 157 return None
158 158
159 159
160 160 def delete_exception(exc_id, prefix=global_prefix):
161 161 try:
162 162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
163 163 if exc_id_file_path:
164 164 os.remove(exc_id_file_path)
165 165
166 166 except Exception:
167 167 log.exception('Failed to remove exception `%s` information', exc_id)
168 168 # there's no way this can fail, it will crash server badly if it does.
169 169 pass
@@ -1,65 +1,65 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import logging
22 22
23 23 from repoze.lru import LRUCache
24 24
25 25 from vcsserver.utils import safe_str
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class LRUDict(LRUCache):
31 31 """
32 32 Wrapper to provide partial dict access
33 33 """
34 34
35 35 def __setitem__(self, key, value):
36 36 return self.put(key, value)
37 37
38 38 def __getitem__(self, key):
39 39 return self.get(key)
40 40
41 41 def __contains__(self, key):
42 42 return bool(self.get(key))
43 43
44 44 def __delitem__(self, key):
45 45 del self.data[key]
46 46
47 47 def keys(self):
48 48 return self.data.keys()
49 49
50 50
51 51 class LRUDictDebug(LRUDict):
52 52 """
53 53 Wrapper to provide some debug options
54 54 """
55 55 def _report_keys(self):
56 56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 57 # trick for pformat print it more nicely
58 58 fmt = '\n'
59 59 for cnt, elem in enumerate(self.keys()):
60 60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62 62
63 63 def __getitem__(self, key):
64 64 self._report_keys()
65 65 return self.get(key)
@@ -1,72 +1,72 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 20
21 21 register_backend(
22 22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 23 "LRUMemoryBackend")
24 24
25 25 register_backend(
26 26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 27 "FileNamespaceBackend")
28 28
29 29 register_backend(
30 30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 31 "RedisPickleBackend")
32 32
33 33 register_backend(
34 34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 35 "RedisMsgPackBackend")
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40 from . import region_meta
41 41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
42 42
43 43
44 44 def configure_dogpile_cache(settings):
45 45 cache_dir = settings.get('cache_dir')
46 46 if cache_dir:
47 47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48 48
49 49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50 50
51 51 # inspect available namespaces
52 52 avail_regions = set()
53 53 for key in rc_cache_data.keys():
54 54 namespace_name = key.split('.', 1)[0]
55 55 avail_regions.add(namespace_name)
56 56 log.debug('dogpile: found following cache regions: %s', avail_regions)
57 57
58 58 # register them into namespace
59 59 for region_name in avail_regions:
60 60 new_region = make_region(
61 61 name=region_name,
62 62 function_key_generator=None
63 63 )
64 64
65 65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
66 66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
68 68 region_meta.dogpile_cache_regions[region_name] = new_region
69 69
70 70
71 71 def includeme(config):
72 72 configure_dogpile_cache(config.registry.settings)
@@ -1,253 +1,253 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 23 import redis
24 24
25 25 from dogpile.cache.api import CachedValue
26 26 from dogpile.cache.backends import memory as memory_backend
27 27 from dogpile.cache.backends import file as file_backend
28 28 from dogpile.cache.backends import redis as redis_backend
29 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 30 from dogpile.cache.util import memoized_property
31 31
32 32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33 33
34 34
35 35 _default_max_size = 1024
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 41 key_prefix = 'lru_mem_backend'
42 42 pickle_values = False
43 43
44 44 def __init__(self, arguments):
45 45 max_size = arguments.pop('max_size', _default_max_size)
46 46
47 47 LRUDictClass = LRUDict
48 48 if arguments.pop('log_key_count', None):
49 49 LRUDictClass = LRUDictDebug
50 50
51 51 arguments['cache_dict'] = LRUDictClass(max_size)
52 52 super(LRUMemoryBackend, self).__init__(arguments)
53 53
54 54 def delete(self, key):
55 55 try:
56 56 del self._cache[key]
57 57 except KeyError:
58 58 # we don't care if key isn't there at deletion
59 59 pass
60 60
61 61 def delete_multi(self, keys):
62 62 for key in keys:
63 63 self.delete(key)
64 64
65 65
66 66 class PickleSerializer(object):
67 67
68 68 def _dumps(self, value, safe=False):
69 69 try:
70 70 return compat.pickle.dumps(value)
71 71 except Exception:
72 72 if safe:
73 73 return NO_VALUE
74 74 else:
75 75 raise
76 76
77 77 def _loads(self, value, safe=True):
78 78 try:
79 79 return compat.pickle.loads(value)
80 80 except Exception:
81 81 if safe:
82 82 return NO_VALUE
83 83 else:
84 84 raise
85 85
86 86
87 87 class MsgPackSerializer(object):
88 88
89 89 def _dumps(self, value, safe=False):
90 90 try:
91 91 return msgpack.packb(value)
92 92 except Exception:
93 93 if safe:
94 94 return NO_VALUE
95 95 else:
96 96 raise
97 97
98 98 def _loads(self, value, safe=True):
99 99 """
100 100 pickle maintained the `CachedValue` wrapper of the tuple
101 101 msgpack does not, so it must be added back in.
102 102 """
103 103 try:
104 104 value = msgpack.unpackb(value, use_list=False)
105 105 return CachedValue(*value)
106 106 except Exception:
107 107 if safe:
108 108 return NO_VALUE
109 109 else:
110 110 raise
111 111
112 112
113 113 import fcntl
114 114 flock_org = fcntl.flock
115 115
116 116
117 117 class CustomLockFactory(FileLock):
118 118
119 119 pass
120 120
121 121
122 122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 123 key_prefix = 'file_backend'
124 124
125 125 def __init__(self, arguments):
126 126 arguments['lock_factory'] = CustomLockFactory
127 127 super(FileNamespaceBackend, self).__init__(arguments)
128 128
129 129 def __repr__(self):
130 130 return '{} `{}`'.format(self.__class__, self.filename)
131 131
132 132 def list_keys(self, prefix=''):
133 133 prefix = '{}:{}'.format(self.key_prefix, prefix)
134 134
135 135 def cond(v):
136 136 if not prefix:
137 137 return True
138 138
139 139 if v.startswith(prefix):
140 140 return True
141 141 return False
142 142
143 143 with self._dbm_file(True) as dbm:
144 144
145 145 return filter(cond, dbm.keys())
146 146
147 147 def get_store(self):
148 148 return self.filename
149 149
150 150 def get(self, key):
151 151 with self._dbm_file(False) as dbm:
152 152 if hasattr(dbm, 'get'):
153 153 value = dbm.get(key, NO_VALUE)
154 154 else:
155 155 # gdbm objects lack a .get method
156 156 try:
157 157 value = dbm[key]
158 158 except KeyError:
159 159 value = NO_VALUE
160 160 if value is not NO_VALUE:
161 161 value = self._loads(value)
162 162 return value
163 163
164 164 def set(self, key, value):
165 165 with self._dbm_file(True) as dbm:
166 166 dbm[key] = self._dumps(value)
167 167
168 168 def set_multi(self, mapping):
169 169 with self._dbm_file(True) as dbm:
170 170 for key, value in mapping.items():
171 171 dbm[key] = self._dumps(value)
172 172
173 173
174 174 class BaseRedisBackend(redis_backend.RedisBackend):
175 175
176 176 def _create_client(self):
177 177 args = {}
178 178
179 179 if self.url is not None:
180 180 args.update(url=self.url)
181 181
182 182 else:
183 183 args.update(
184 184 host=self.host, password=self.password,
185 185 port=self.port, db=self.db
186 186 )
187 187
188 188 connection_pool = redis.ConnectionPool(**args)
189 189
190 190 return redis.StrictRedis(connection_pool=connection_pool)
191 191
192 192 def list_keys(self, prefix=''):
193 193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 194 return self.client.keys(prefix)
195 195
196 196 def get_store(self):
197 197 return self.client.connection_pool
198 198
199 199 def get(self, key):
200 200 value = self.client.get(key)
201 201 if value is None:
202 202 return NO_VALUE
203 203 return self._loads(value)
204 204
205 205 def get_multi(self, keys):
206 206 if not keys:
207 207 return []
208 208 values = self.client.mget(keys)
209 209 loads = self._loads
210 210 return [
211 211 loads(v) if v is not None else NO_VALUE
212 212 for v in values]
213 213
214 214 def set(self, key, value):
215 215 if self.redis_expiration_time:
216 216 self.client.setex(key, self.redis_expiration_time,
217 217 self._dumps(value))
218 218 else:
219 219 self.client.set(key, self._dumps(value))
220 220
221 221 def set_multi(self, mapping):
222 222 dumps = self._dumps
223 223 mapping = dict(
224 224 (k, dumps(v))
225 225 for k, v in mapping.items()
226 226 )
227 227
228 228 if not self.redis_expiration_time:
229 229 self.client.mset(mapping)
230 230 else:
231 231 pipe = self.client.pipeline()
232 232 for key, value in mapping.items():
233 233 pipe.setex(key, self.redis_expiration_time, value)
234 234 pipe.execute()
235 235
236 236 def get_mutex(self, key):
237 237 u = redis_backend.u
238 238 if self.distributed_lock:
239 239 lock_key = u('_lock_{0}').format(key)
240 240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
242 242 else:
243 243 return None
244 244
245 245
246 246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 247 key_prefix = 'redis_pickle_backend'
248 248 pass
249 249
250 250
251 251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 252 key_prefix = 'redis_msgpack_backend'
253 253 pass
@@ -1,26 +1,26 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import tempfile
20 20
21 21 dogpile_config_defaults = {
22 22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 23 }
24 24
25 25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 26 dogpile_cache_regions = {}
@@ -1,153 +1,153 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import functools
21 21 from decorator import decorate
22 22
23 23 from dogpile.cache import CacheRegion
24 24 from dogpile.cache.util import compat
25 25
26 26 from vcsserver.utils import safe_str, sha1
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 class RhodeCodeCacheRegion(CacheRegion):
33 33
34 34 def conditional_cache_on_arguments(
35 35 self, namespace=None,
36 36 expiration_time=None,
37 37 should_cache_fn=None,
38 38 to_str=compat.string_type,
39 39 function_key_generator=None,
40 40 condition=True):
41 41 """
42 42 Custom conditional decorator, that will not touch any dogpile internals if
43 43 condition isn't meet. This works a bit different than should_cache_fn
44 44 And it's faster in cases we don't ever want to compute cached values
45 45 """
46 46 expiration_time_is_callable = compat.callable(expiration_time)
47 47
48 48 if function_key_generator is None:
49 49 function_key_generator = self.function_key_generator
50 50
51 51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52 52
53 53 if not condition:
54 54 log.debug('Calling un-cached func:%s', user_func.func_name)
55 55 return user_func(*arg, **kw)
56 56
57 57 key = key_generator(*arg, **kw)
58 58
59 59 timeout = expiration_time() if expiration_time_is_callable \
60 60 else expiration_time
61 61
62 62 log.debug('Calling cached fn:%s', user_func.func_name)
63 63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64 64
65 65 def cache_decorator(user_func):
66 66 if to_str is compat.string_type:
67 67 # backwards compatible
68 68 key_generator = function_key_generator(namespace, user_func)
69 69 else:
70 70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
71 71
72 72 def refresh(*arg, **kw):
73 73 """
74 74 Like invalidate, but regenerates the value instead
75 75 """
76 76 key = key_generator(*arg, **kw)
77 77 value = user_func(*arg, **kw)
78 78 self.set(key, value)
79 79 return value
80 80
81 81 def invalidate(*arg, **kw):
82 82 key = key_generator(*arg, **kw)
83 83 self.delete(key)
84 84
85 85 def set_(value, *arg, **kw):
86 86 key = key_generator(*arg, **kw)
87 87 self.set(key, value)
88 88
89 89 def get(*arg, **kw):
90 90 key = key_generator(*arg, **kw)
91 91 return self.get(key)
92 92
93 93 user_func.set = set_
94 94 user_func.invalidate = invalidate
95 95 user_func.get = get
96 96 user_func.refresh = refresh
97 97 user_func.key_generator = key_generator
98 98 user_func.original = user_func
99 99
100 100 # Use `decorate` to preserve the signature of :param:`user_func`.
101 101
102 102 return decorate(user_func, functools.partial(
103 103 get_or_create_for_user_func, key_generator))
104 104
105 105 return cache_decorator
106 106
107 107
108 108 def make_region(*arg, **kw):
109 109 return RhodeCodeCacheRegion(*arg, **kw)
110 110
111 111
112 112 def get_default_cache_settings(settings, prefixes=None):
113 113 prefixes = prefixes or []
114 114 cache_settings = {}
115 115 for key in settings.keys():
116 116 for prefix in prefixes:
117 117 if key.startswith(prefix):
118 118 name = key.split(prefix)[1].strip()
119 119 val = settings[key]
120 120 if isinstance(val, compat.string_types):
121 121 val = val.strip()
122 122 cache_settings[name] = val
123 123 return cache_settings
124 124
125 125
126 126 def compute_key_from_params(*args):
127 127 """
128 128 Helper to compute key from given params to be used in cache manager
129 129 """
130 130 return sha1("_".join(map(safe_str, args)))
131 131
132 132
133 133 def backend_key_generator(backend):
134 134 """
135 135 Special wrapper that also sends over the backend to the key generator
136 136 """
137 137 def wrapper(namespace, fn):
138 138 return key_generator(backend, namespace, fn)
139 139 return wrapper
140 140
141 141
142 142 def key_generator(backend, namespace, fn):
143 143 fname = fn.__name__
144 144
145 145 def generate_key(*args):
146 146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 147 namespace_pref = namespace or 'default_namespace'
148 148 arg_key = compute_key_from_params(*args)
149 149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
150 150
151 151 return final_key
152 152
153 153 return generate_key
@@ -1,27 +1,27 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 counter = 0
22 22
23 23
24 24 def get_request_counter(request):
25 25 global counter
26 26 counter += 1
27 27 return counter
@@ -1,386 +1,386 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Handles the Git smart protocol."""
19 19
20 20 import os
21 21 import socket
22 22 import logging
23 23
24 24 import simplejson as json
25 25 import dulwich.protocol
26 26 from webob import Request, Response, exc
27 27
28 28 from vcsserver import hooks, subprocessio
29 29
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class FileWrapper(object):
35 35 """File wrapper that ensures how much data is read from it."""
36 36
37 37 def __init__(self, fd, content_length):
38 38 self.fd = fd
39 39 self.content_length = content_length
40 40 self.remain = content_length
41 41
42 42 def read(self, size):
43 43 if size <= self.remain:
44 44 try:
45 45 data = self.fd.read(size)
46 46 except socket.error:
47 47 raise IOError(self)
48 48 self.remain -= size
49 49 elif self.remain:
50 50 data = self.fd.read(self.remain)
51 51 self.remain = 0
52 52 else:
53 53 data = None
54 54 return data
55 55
56 56 def __repr__(self):
57 57 return '<FileWrapper %s len: %s, read: %s>' % (
58 58 self.fd, self.content_length, self.content_length - self.remain
59 59 )
60 60
61 61
62 62 class GitRepository(object):
63 63 """WSGI app for handling Git smart protocol endpoints."""
64 64
65 65 git_folder_signature = frozenset(
66 66 ('config', 'head', 'info', 'objects', 'refs'))
67 67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 68 valid_accepts = frozenset(('application/x-%s-result' %
69 69 c for c in commands))
70 70
71 71 # The last bytes are the SHA1 of the first 12 bytes.
72 72 EMPTY_PACK = (
73 73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 75 )
76 76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77 77
78 78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 79 extras):
80 80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 81 valid_dir_signature = self.git_folder_signature.issubset(files)
82 82
83 83 if not valid_dir_signature:
84 84 raise OSError('%s missing git signature' % content_path)
85 85
86 86 self.content_path = content_path
87 87 self.repo_name = repo_name
88 88 self.extras = extras
89 89 self.git_path = git_path
90 90 self.update_server_info = update_server_info
91 91
92 92 def _get_fixedpath(self, path):
93 93 """
94 94 Small fix for repo_path
95 95
96 96 :param path:
97 97 """
98 98 path = path.split(self.repo_name, 1)[-1]
99 99 if path.startswith('.git'):
100 100 # for bare repos we still get the .git prefix inside, we skip it
101 101 # here, and remove from the service command
102 102 path = path[4:]
103 103
104 104 return path.strip('/')
105 105
106 106 def inforefs(self, request, unused_environ):
107 107 """
108 108 WSGI Response producer for HTTP GET Git Smart
109 109 HTTP /info/refs request.
110 110 """
111 111
112 112 git_command = request.GET.get('service')
113 113 if git_command not in self.commands:
114 114 log.debug('command %s not allowed', git_command)
115 115 return exc.HTTPForbidden()
116 116
117 117 # please, resist the urge to add '\n' to git capture and increment
118 118 # line count by 1.
119 119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 120 # a part of protocol.
121 121 # The code in Git client not only does NOT need '\n', but actually
122 122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 123 # It reads binary, per number of bytes specified.
124 124 # if you do add '\n' as part of data, count it.
125 125 server_advert = '# service=%s\n' % git_command
126 126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 127 try:
128 128 gitenv = dict(os.environ)
129 129 # forget all configs
130 130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 132 '--advertise-refs', self.content_path]
133 133 out = subprocessio.SubprocessIOChunker(
134 134 command,
135 135 env=gitenv,
136 136 starting_values=[packet_len + server_advert + '0000'],
137 137 shell=False
138 138 )
139 139 except EnvironmentError:
140 140 log.exception('Error processing command')
141 141 raise exc.HTTPExpectationFailed()
142 142
143 143 resp = Response()
144 144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 145 resp.charset = None
146 146 resp.app_iter = out
147 147
148 148 return resp
149 149
150 150 def _get_want_capabilities(self, request):
151 151 """Read the capabilities found in the first want line of the request."""
152 152 pos = request.body_file_seekable.tell()
153 153 first_line = request.body_file_seekable.readline()
154 154 request.body_file_seekable.seek(pos)
155 155
156 156 return frozenset(
157 157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 158
159 159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 160 """
161 161 Construct a response with an empty PACK file.
162 162
163 163 We use an empty PACK file, as that would trigger the failure of the pull
164 164 or clone command.
165 165
166 166 We also print in the error output a message explaining why the command
167 167 was aborted.
168 168
169 169 If aditionally, the user is accepting messages we send them the output
170 170 of the pre-pull hook.
171 171
172 172 Note that for clients not supporting side-band we just send them the
173 173 emtpy PACK file.
174 174 """
175 175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 176 response = []
177 177 proto = dulwich.protocol.Protocol(None, response.append)
178 178 proto.write_pkt_line('NAK\n')
179 179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 180 capabilities)
181 181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 182 # produces a fatal error in the client:
183 183 # fatal: error in sideband demultiplexer
184 184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 185 proto.write_sideband(1, self.EMPTY_PACK)
186 186
187 187 # writes 0000
188 188 proto.write_pkt_line(None)
189 189
190 190 return response
191 191 else:
192 192 return [self.EMPTY_PACK]
193 193
194 194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 195 """
196 196 Write the data to the proto's sideband number 2.
197 197
198 198 We do not use dulwich's write_sideband directly as it only supports
199 199 side-band-64k.
200 200 """
201 201 if not data:
202 202 return
203 203
204 204 # N.B.(skreft): The values below are explained in the pack protocol
205 205 # documentation, section Packfile Data.
206 206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 207 if 'side-band-64k' in capabilities:
208 208 chunk_size = 65515
209 209 elif 'side-band' in capabilities:
210 210 chunk_size = 995
211 211 else:
212 212 return
213 213
214 214 chunker = (
215 215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216 216
217 217 for chunk in chunker:
218 218 proto.write_sideband(2, chunk)
219 219
220 220 def _get_messages(self, data, capabilities):
221 221 """Return a list with packets for sending data in sideband number 2."""
222 222 response = []
223 223 proto = dulwich.protocol.Protocol(None, response.append)
224 224
225 225 self._write_sideband_to_proto(data, proto, capabilities)
226 226
227 227 return response
228 228
229 229 def _inject_messages_to_response(self, response, capabilities,
230 230 start_messages, end_messages):
231 231 """
232 232 Given a list response we inject the pre/post-pull messages.
233 233
234 234 We only inject the messages if the client supports sideband, and the
235 235 response has the format:
236 236 0008NAK\n...0000
237 237
238 238 Note that we do not check the no-progress capability as by default, git
239 239 sends it, which effectively would block all messages.
240 240 """
241 241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 242 return response
243 243
244 244 if not start_messages and not end_messages:
245 245 return response
246 246
247 247 # make a list out of response if it's an iterator
248 248 # so we can investigate it for message injection.
249 249 if hasattr(response, '__iter__'):
250 250 response = list(response)
251 251
252 252 if (not response[0].startswith('0008NAK\n') or
253 253 not response[-1].endswith('0000')):
254 254 return response
255 255
256 256 new_response = ['0008NAK\n']
257 257 new_response.extend(self._get_messages(start_messages, capabilities))
258 258 if len(response) == 1:
259 259 new_response.append(response[0][8:-4])
260 260 else:
261 261 new_response.append(response[0][8:])
262 262 new_response.extend(response[1:-1])
263 263 new_response.append(response[-1][:-4])
264 264 new_response.extend(self._get_messages(end_messages, capabilities))
265 265 new_response.append('0000')
266 266
267 267 return new_response
268 268
269 269 def backend(self, request, environ):
270 270 """
271 271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 272 Reads commands and data from HTTP POST's body.
273 273 returns an iterator obj with contents of git command's
274 274 response to stdout
275 275 """
276 276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 277 # we probably want to have the same mechanism used by mercurial and
278 278 # simplevcs.
279 279 # For that we would need to parse the output of the command looking for
280 280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 281 # pygrack. However, that would interfere with the streaming.
282 282 #
283 283 # Now the output of a blocked push is:
284 284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 285 # POST git-receive-pack (1047 bytes)
286 286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290 290
291 291 git_command = self._get_fixedpath(request.path_info)
292 292 if git_command not in self.commands:
293 293 log.debug('command %s not allowed', git_command)
294 294 return exc.HTTPForbidden()
295 295
296 296 capabilities = None
297 297 if git_command == 'git-upload-pack':
298 298 capabilities = self._get_want_capabilities(request)
299 299
300 300 if 'CONTENT_LENGTH' in environ:
301 301 inputstream = FileWrapper(request.body_file_seekable,
302 302 request.content_length)
303 303 else:
304 304 inputstream = request.body_file_seekable
305 305
306 306 resp = Response()
307 307 resp.content_type = ('application/x-%s-result' %
308 308 git_command.encode('utf8'))
309 309 resp.charset = None
310 310
311 311 pre_pull_messages = ''
312 312 if git_command == 'git-upload-pack':
313 313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 314 if status != 0:
315 315 resp.app_iter = self._build_failed_pre_pull_response(
316 316 capabilities, pre_pull_messages)
317 317 return resp
318 318
319 319 gitenv = dict(os.environ)
320 320 # forget all configs
321 321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 324 self.content_path]
325 325 log.debug('handling cmd %s', cmd)
326 326
327 327 out = subprocessio.SubprocessIOChunker(
328 328 cmd,
329 329 inputstream=inputstream,
330 330 env=gitenv,
331 331 cwd=self.content_path,
332 332 shell=False,
333 333 fail_on_stderr=False,
334 334 fail_on_return_code=False
335 335 )
336 336
337 337 if self.update_server_info and git_command == 'git-receive-pack':
338 338 # We need to fully consume the iterator here, as the
339 339 # update-server-info command needs to be run after the push.
340 340 out = list(out)
341 341
342 342 # Updating refs manually after each push.
343 343 # This is required as some clients are exposing Git repos internally
344 344 # with the dumb protocol.
345 345 cmd = [self.git_path, 'update-server-info']
346 346 log.debug('handling cmd %s', cmd)
347 347 output = subprocessio.SubprocessIOChunker(
348 348 cmd,
349 349 inputstream=inputstream,
350 350 env=gitenv,
351 351 cwd=self.content_path,
352 352 shell=False,
353 353 fail_on_stderr=False,
354 354 fail_on_return_code=False
355 355 )
356 356 # Consume all the output so the subprocess finishes
357 357 for _ in output:
358 358 pass
359 359
360 360 if git_command == 'git-upload-pack':
361 361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 362 resp.app_iter = self._inject_messages_to_response(
363 363 out, capabilities, pre_pull_messages, post_pull_messages)
364 364 else:
365 365 resp.app_iter = out
366 366
367 367 return resp
368 368
369 369 def __call__(self, environ, start_response):
370 370 request = Request(environ)
371 371 _path = self._get_fixedpath(request.path_info)
372 372 if _path.startswith('info/refs'):
373 373 app = self.inforefs
374 374 else:
375 375 app = self.backend
376 376
377 377 try:
378 378 resp = app(request, environ)
379 379 except exc.HTTPException as error:
380 380 log.exception('HTTP Error')
381 381 resp = error
382 382 except Exception:
383 383 log.exception('Unknown error')
384 384 resp = exc.HTTPInternalServerError()
385 385
386 386 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from vcsserver import scm_app, wsgi_app_caller
19 19
20 20
21 21 class GitRemoteWsgi(object):
22 22 def handle(self, environ, input_data, *args, **kwargs):
23 23 app = wsgi_app_caller.WSGIAppCaller(
24 24 scm_app.create_git_wsgi_app(*args, **kwargs))
25 25
26 26 return app.handle(environ, input_data)
27 27
28 28
29 29 class HgRemoteWsgi(object):
30 30 def handle(self, environ, input_data, *args, **kwargs):
31 31 app = wsgi_app_caller.WSGIAppCaller(
32 32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33 33
34 34 return app.handle(environ, input_data)
@@ -1,235 +1,235 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 24 import mercurial.wireprotoserver
25 25 import mercurial.hgweb.common
26 26 import mercurial.hgweb.hgweb_mod
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 76 from mercurial.hgweb import request as requestmod
77 77 req = requestmod.parserequestfromenv(environ)
78 78 res = requestmod.wsgiresponse(req, start_response)
79 79 gen = self.run_wsgi(req, res)
80 80
81 81 first_chunk = None
82 82
83 83 try:
84 84 data = gen.next()
85 85
86 86 def first_chunk():
87 87 yield data
88 88 except StopIteration:
89 89 pass
90 90
91 91 if first_chunk:
92 92 return itertools.chain(first_chunk(), gen)
93 93 return gen
94 94
95 95 def _runwsgi(self, req, res, repo):
96 96
97 97 cmd = req.qsparams.get('cmd', '')
98 98 if not mercurial.wireprotoserver.iscmd(cmd):
99 99 # NOTE(marcink): for unsupported commands, we return bad request
100 100 # internally from HG
101 101 from mercurial.hgweb.common import statusmessage
102 102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 103 res.setbodybytes('')
104 104 return res.sendresponse()
105 105
106 106 return super(HgWeb, self)._runwsgi(req, res, repo)
107 107
108 108
109 109 def make_hg_ui_from_config(repo_config):
110 110 baseui = mercurial.ui.ui()
111 111
112 112 # clean the baseui object
113 113 baseui._ocfg = mercurial.config.config()
114 114 baseui._ucfg = mercurial.config.config()
115 115 baseui._tcfg = mercurial.config.config()
116 116
117 117 for section, option, value in repo_config:
118 118 baseui.setconfig(section, option, value)
119 119
120 120 # make our hgweb quiet so it doesn't print output
121 121 baseui.setconfig('ui', 'quiet', 'true')
122 122
123 123 return baseui
124 124
125 125
126 126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 127 path = os.path.join(repo_path, '.hg', 'hgrc')
128 128
129 129 if not os.path.isfile(path):
130 130 log.debug('hgrc file is not present at %s, skipping...', path)
131 131 return
132 132 log.debug('reading hgrc from %s', path)
133 133 cfg = mercurial.config.config()
134 134 cfg.read(path)
135 135 for section in HG_UI_SECTIONS:
136 136 for k, v in cfg.items(section):
137 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 138 baseui.setconfig(section, k, v)
139 139
140 140
141 141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 142 """
143 143 Prepares a WSGI application to handle Mercurial requests.
144 144
145 145 :param config: is a list of 3-item tuples representing a ConfigObject
146 146 (it is the serialized version of the config object).
147 147 """
148 148 log.debug("Creating Mercurial WSGI application")
149 149
150 150 baseui = make_hg_ui_from_config(config)
151 151 update_hg_ui_from_hgrc(baseui, repo_path)
152 152
153 153 try:
154 154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 155 except mercurial.error.RequirementError as e:
156 156 raise exceptions.RequirementException(e)(e)
157 157
158 158
159 159 class GitHandler(object):
160 160 """
161 161 Handler for Git operations like push/pull etc
162 162 """
163 163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 164 extras):
165 165 if not os.path.isdir(repo_location):
166 166 raise OSError(repo_location)
167 167 self.content_path = repo_location
168 168 self.repo_name = repo_name
169 169 self.repo_location = repo_location
170 170 self.extras = extras
171 171 self.git_path = git_path
172 172 self.update_server_info = update_server_info
173 173
174 174 def __call__(self, environ, start_response):
175 175 app = webob.exc.HTTPNotFound()
176 176 candidate_paths = (
177 177 self.content_path, os.path.join(self.content_path, '.git'))
178 178
179 179 for content_path in candidate_paths:
180 180 try:
181 181 app = pygrack.GitRepository(
182 182 self.repo_name, content_path, self.git_path,
183 183 self.update_server_info, self.extras)
184 184 break
185 185 except OSError:
186 186 continue
187 187
188 188 return app(environ, start_response)
189 189
190 190
191 191 def create_git_wsgi_app(repo_path, repo_name, config):
192 192 """
193 193 Creates a WSGI application to handle Git requests.
194 194
195 195 :param config: is a dictionary holding the extras.
196 196 """
197 197 git_path = settings.GIT_EXECUTABLE
198 198 update_server_info = config.pop('git_update_server_info')
199 199 app = GitHandler(
200 200 repo_path, repo_name, git_path, update_server_info, config)
201 201
202 202 return app
203 203
204 204
205 205 class GitLFSHandler(object):
206 206 """
207 207 Handler for Git LFS operations
208 208 """
209 209
210 210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 211 extras):
212 212 if not os.path.isdir(repo_location):
213 213 raise OSError(repo_location)
214 214 self.content_path = repo_location
215 215 self.repo_name = repo_name
216 216 self.repo_location = repo_location
217 217 self.extras = extras
218 218 self.git_path = git_path
219 219 self.update_server_info = update_server_info
220 220
221 221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
222 222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
223 223 return app
224 224
225 225
226 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 227 git_path = settings.GIT_EXECUTABLE
228 228 update_server_info = config.pop('git_update_server_info')
229 229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
232 232 app = GitLFSHandler(
233 233 repo_path, repo_name, git_path, update_server_info, config)
234 234
235 235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,78 +1,78 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import gc
19 19 import logging
20 20 import os
21 21 import time
22 22
23 23
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class VcsServer(object):
28 28 """
29 29 Exposed remote interface of the vcsserver itself.
30 30
31 31 This object can be used to manage the server remotely. Right now the main
32 32 use case is to allow to shut down the server.
33 33 """
34 34
35 35 _shutdown = False
36 36
37 37 def shutdown(self):
38 38 self._shutdown = True
39 39
40 40 def ping(self):
41 41 """
42 42 Utility to probe a server connection.
43 43 """
44 44 log.debug("Received server ping.")
45 45
46 46 def echo(self, data):
47 47 """
48 48 Utility for performance testing.
49 49
50 50 Allows to pass in arbitrary data and will return this data.
51 51 """
52 52 log.debug("Received server echo.")
53 53 return data
54 54
55 55 def sleep(self, seconds):
56 56 """
57 57 Utility to simulate long running server interaction.
58 58 """
59 59 log.debug("Sleeping %s seconds", seconds)
60 60 time.sleep(seconds)
61 61
62 62 def get_pid(self):
63 63 """
64 64 Allows to discover the PID based on a proxy object.
65 65 """
66 66 return os.getpid()
67 67
68 68 def run_gc(self):
69 69 """
70 70 Allows to trigger the garbage collector.
71 71
72 72 Main intention is to support statistics gathering during test runs.
73 73 """
74 74 freed_objects = gc.collect()
75 75 return {
76 76 'freed_objects': freed_objects,
77 77 'garbage': len(gc.garbage),
78 78 }
@@ -1,22 +1,22 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
20 20 SVN_EXECUTABLE = 'svn'
21 21 SVNLOOK_EXECUTABLE = 'svnlook'
22 22 BINARY_DIR = ''
@@ -1,791 +1,791 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39 from vcsserver.vcs_base import RemoteBase
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 svn_compatible_versions_map = {
45 45 'pre-1.4-compatible': '1.3',
46 46 'pre-1.5-compatible': '1.4',
47 47 'pre-1.6-compatible': '1.5',
48 48 'pre-1.8-compatible': '1.7',
49 49 'pre-1.9-compatible': '1.8',
50 50 }
51 51
52 52 current_compatible_version = '1.12'
53 53
54 54
55 55 def reraise_safe_exceptions(func):
56 56 """Decorator for converting svn exceptions to something neutral."""
57 57 def wrapper(*args, **kwargs):
58 58 try:
59 59 return func(*args, **kwargs)
60 60 except Exception as e:
61 61 if not hasattr(e, '_vcs_kind'):
62 62 log.exception("Unhandled exception in svn remote call")
63 63 raise_from_original(exceptions.UnhandledException(e))
64 64 raise
65 65 return wrapper
66 66
67 67
68 68 class SubversionFactory(RepoFactory):
69 69 repo_type = 'svn'
70 70
71 71 def _create_repo(self, wire, create, compatible_version):
72 72 path = svn.core.svn_path_canonicalize(wire['path'])
73 73 if create:
74 74 fs_config = {'compatible-version': current_compatible_version}
75 75 if compatible_version:
76 76
77 77 compatible_version_string = \
78 78 svn_compatible_versions_map.get(compatible_version) \
79 79 or compatible_version
80 80 fs_config['compatible-version'] = compatible_version_string
81 81
82 82 log.debug('Create SVN repo with config "%s"', fs_config)
83 83 repo = svn.repos.create(path, "", "", None, fs_config)
84 84 else:
85 85 repo = svn.repos.open(path)
86 86
87 87 log.debug('Got SVN object: %s', repo)
88 88 return repo
89 89
90 90 def repo(self, wire, create=False, compatible_version=None):
91 91 """
92 92 Get a repository instance for the given path.
93 93 """
94 94 return self._create_repo(wire, create, compatible_version)
95 95
96 96
97 97 NODE_TYPE_MAPPING = {
98 98 svn.core.svn_node_file: 'file',
99 99 svn.core.svn_node_dir: 'dir',
100 100 }
101 101
102 102
103 103 class SvnRemote(RemoteBase):
104 104
105 105 def __init__(self, factory, hg_factory=None):
106 106 self._factory = factory
107 107 # TODO: Remove once we do not use internal Mercurial objects anymore
108 108 # for subversion
109 109 self._hg_factory = hg_factory
110 110
111 111 @reraise_safe_exceptions
112 112 def discover_svn_version(self):
113 113 try:
114 114 import svn.core
115 115 svn_ver = svn.core.SVN_VERSION
116 116 except ImportError:
117 117 svn_ver = None
118 118 return svn_ver
119 119
120 120 @reraise_safe_exceptions
121 121 def is_empty(self, wire):
122 122
123 123 try:
124 124 return self.lookup(wire, -1) == 0
125 125 except Exception:
126 126 log.exception("failed to read object_store")
127 127 return False
128 128
129 129 def check_url(self, url, config_items):
130 130 # this can throw exception if not installed, but we detect this
131 131 from hgsubversion import svnrepo
132 132
133 133 baseui = self._hg_factory._create_config(config_items)
134 134 # uuid function get's only valid UUID from proper repo, else
135 135 # throws exception
136 136 try:
137 137 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 138 except Exception:
139 139 tb = traceback.format_exc()
140 140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 141 raise URLError(
142 142 '"%s" is not a valid Subversion source url.' % (url, ))
143 143 return True
144 144
145 145 def is_path_valid_repository(self, wire, path):
146 146
147 147 # NOTE(marcink): short circuit the check for SVN repo
148 148 # the repos.open might be expensive to check, but we have one cheap
149 149 # pre condition that we can use, to check for 'format' file
150 150
151 151 if not os.path.isfile(os.path.join(path, 'format')):
152 152 return False
153 153
154 154 try:
155 155 svn.repos.open(path)
156 156 except svn.core.SubversionException:
157 157 tb = traceback.format_exc()
158 158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 159 return False
160 160 return True
161 161
162 162 @reraise_safe_exceptions
163 163 def verify(self, wire,):
164 164 repo_path = wire['path']
165 165 if not self.is_path_valid_repository(wire, repo_path):
166 166 raise Exception(
167 167 "Path %s is not a valid Subversion repository." % repo_path)
168 168
169 169 cmd = ['svnadmin', 'info', repo_path]
170 170 stdout, stderr = subprocessio.run_command(cmd)
171 171 return stdout
172 172
173 173 def lookup(self, wire, revision):
174 174 if revision not in [-1, None, 'HEAD']:
175 175 raise NotImplementedError
176 176 repo = self._factory.repo(wire)
177 177 fs_ptr = svn.repos.fs(repo)
178 178 head = svn.fs.youngest_rev(fs_ptr)
179 179 return head
180 180
181 181 def lookup_interval(self, wire, start_ts, end_ts):
182 182 repo = self._factory.repo(wire)
183 183 fsobj = svn.repos.fs(repo)
184 184 start_rev = None
185 185 end_rev = None
186 186 if start_ts:
187 187 start_ts_svn = apr_time_t(start_ts)
188 188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 189 else:
190 190 start_rev = 1
191 191 if end_ts:
192 192 end_ts_svn = apr_time_t(end_ts)
193 193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 194 else:
195 195 end_rev = svn.fs.youngest_rev(fsobj)
196 196 return start_rev, end_rev
197 197
198 198 def revision_properties(self, wire, revision):
199 199
200 200 cache_on, context_uid, repo_id = self._cache_on(wire)
201 201 @self.region.conditional_cache_on_arguments(condition=cache_on)
202 202 def _revision_properties(_repo_id, _revision):
203 203 repo = self._factory.repo(wire)
204 204 fs_ptr = svn.repos.fs(repo)
205 205 return svn.fs.revision_proplist(fs_ptr, revision)
206 206 return _revision_properties(repo_id, revision)
207 207
208 208 def revision_changes(self, wire, revision):
209 209
210 210 repo = self._factory.repo(wire)
211 211 fsobj = svn.repos.fs(repo)
212 212 rev_root = svn.fs.revision_root(fsobj, revision)
213 213
214 214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 216 base_dir = ""
217 217 send_deltas = False
218 218 svn.repos.replay2(
219 219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 220 editor_ptr, editor_baton, None)
221 221
222 222 added = []
223 223 changed = []
224 224 removed = []
225 225
226 226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 227 for path, change in editor.changes.iteritems():
228 228 # TODO: Decide what to do with directory nodes. Subversion can add
229 229 # empty directories.
230 230
231 231 if change.item_kind == svn.core.svn_node_dir:
232 232 continue
233 233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 234 added.append(path)
235 235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 236 svn.repos.CHANGE_ACTION_REPLACE]:
237 237 changed.append(path)
238 238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 239 removed.append(path)
240 240 else:
241 241 raise NotImplementedError(
242 242 "Action %s not supported on path %s" % (
243 243 change.action, path))
244 244
245 245 changes = {
246 246 'added': added,
247 247 'changed': changed,
248 248 'removed': removed,
249 249 }
250 250 return changes
251 251
252 252 @reraise_safe_exceptions
253 253 def node_history(self, wire, path, revision, limit):
254 254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 257 cross_copies = False
258 258 repo = self._factory.repo(wire)
259 259 fsobj = svn.repos.fs(repo)
260 260 rev_root = svn.fs.revision_root(fsobj, revision)
261 261
262 262 history_revisions = []
263 263 history = svn.fs.node_history(rev_root, path)
264 264 history = svn.fs.history_prev(history, cross_copies)
265 265 while history:
266 266 __, node_revision = svn.fs.history_location(history)
267 267 history_revisions.append(node_revision)
268 268 if limit and len(history_revisions) >= limit:
269 269 break
270 270 history = svn.fs.history_prev(history, cross_copies)
271 271 return history_revisions
272 272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273 273
274 274 def node_properties(self, wire, path, revision):
275 275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 276 @self.region.conditional_cache_on_arguments(condition=cache_on)
277 277 def _node_properties(_repo_id, _path, _revision):
278 278 repo = self._factory.repo(wire)
279 279 fsobj = svn.repos.fs(repo)
280 280 rev_root = svn.fs.revision_root(fsobj, revision)
281 281 return svn.fs.node_proplist(rev_root, path)
282 282 return _node_properties(repo_id, path, revision)
283 283
284 284 def file_annotate(self, wire, path, revision):
285 285 abs_path = 'file://' + urllib.pathname2url(
286 286 vcspath.join(wire['path'], path))
287 287 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 288
289 289 start_rev = svn_opt_revision_value_t(0)
290 290 peg_rev = svn_opt_revision_value_t(revision)
291 291 end_rev = peg_rev
292 292
293 293 annotations = []
294 294
295 295 def receiver(line_no, revision, author, date, line, pool):
296 296 annotations.append((line_no, revision, line))
297 297
298 298 # TODO: Cannot use blame5, missing typemap function in the swig code
299 299 try:
300 300 svn.client.blame2(
301 301 file_uri, peg_rev, start_rev, end_rev,
302 302 receiver, svn.client.create_context())
303 303 except svn.core.SubversionException as exc:
304 304 log.exception("Error during blame operation.")
305 305 raise Exception(
306 306 "Blame not supported or file does not exist at path %s. "
307 307 "Error %s." % (path, exc))
308 308
309 309 return annotations
310 310
311 311 def get_node_type(self, wire, path, revision=None):
312 312
313 313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 315 def _get_node_type(_repo_id, _path, _revision):
316 316 repo = self._factory.repo(wire)
317 317 fs_ptr = svn.repos.fs(repo)
318 318 if _revision is None:
319 319 _revision = svn.fs.youngest_rev(fs_ptr)
320 320 root = svn.fs.revision_root(fs_ptr, _revision)
321 321 node = svn.fs.check_path(root, path)
322 322 return NODE_TYPE_MAPPING.get(node, None)
323 323 return _get_node_type(repo_id, path, revision)
324 324
325 325 def get_nodes(self, wire, path, revision=None):
326 326
327 327 cache_on, context_uid, repo_id = self._cache_on(wire)
328 328 @self.region.conditional_cache_on_arguments(condition=cache_on)
329 329 def _get_nodes(_repo_id, _path, _revision):
330 330 repo = self._factory.repo(wire)
331 331 fsobj = svn.repos.fs(repo)
332 332 if _revision is None:
333 333 _revision = svn.fs.youngest_rev(fsobj)
334 334 root = svn.fs.revision_root(fsobj, _revision)
335 335 entries = svn.fs.dir_entries(root, path)
336 336 result = []
337 337 for entry_path, entry_info in entries.iteritems():
338 338 result.append(
339 339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 340 return result
341 341 return _get_nodes(repo_id, path, revision)
342 342
343 343 def get_file_content(self, wire, path, rev=None):
344 344 repo = self._factory.repo(wire)
345 345 fsobj = svn.repos.fs(repo)
346 346 if rev is None:
347 347 rev = svn.fs.youngest_revision(fsobj)
348 348 root = svn.fs.revision_root(fsobj, rev)
349 349 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 350 return content.read()
351 351
352 352 def get_file_size(self, wire, path, revision=None):
353 353
354 354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 355 @self.region.conditional_cache_on_arguments(condition=cache_on)
356 356 def _get_file_size(_repo_id, _path, _revision):
357 357 repo = self._factory.repo(wire)
358 358 fsobj = svn.repos.fs(repo)
359 359 if _revision is None:
360 360 _revision = svn.fs.youngest_revision(fsobj)
361 361 root = svn.fs.revision_root(fsobj, _revision)
362 362 size = svn.fs.file_length(root, path)
363 363 return size
364 364 return _get_file_size(repo_id, path, revision)
365 365
366 366 def create_repository(self, wire, compatible_version=None):
367 367 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 368 self._factory.repo(wire, create=True,
369 369 compatible_version=compatible_version)
370 370
371 371 def get_url_and_credentials(self, src_url):
372 372 obj = urlparse.urlparse(src_url)
373 373 username = obj.username or None
374 374 password = obj.password or None
375 375 return username, password, src_url
376 376
377 377 def import_remote_repository(self, wire, src_url):
378 378 repo_path = wire['path']
379 379 if not self.is_path_valid_repository(wire, repo_path):
380 380 raise Exception(
381 381 "Path %s is not a valid Subversion repository." % repo_path)
382 382
383 383 username, password, src_url = self.get_url_and_credentials(src_url)
384 384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 385 '--trust-server-cert-failures=unknown-ca']
386 386 if username and password:
387 387 rdump_cmd += ['--username', username, '--password', password]
388 388 rdump_cmd += [src_url]
389 389
390 390 rdump = subprocess.Popen(
391 391 rdump_cmd,
392 392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 393 load = subprocess.Popen(
394 394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395 395
396 396 # TODO: johbo: This can be a very long operation, might be better
397 397 # to track some kind of status and provide an api to check if the
398 398 # import is done.
399 399 rdump.wait()
400 400 load.wait()
401 401
402 402 log.debug('Return process ended with code: %s', rdump.returncode)
403 403 if rdump.returncode != 0:
404 404 errors = rdump.stderr.read()
405 405 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 406 rdump.returncode, errors)
407 407 reason = 'UNKNOWN'
408 408 if 'svnrdump: E230001:' in errors:
409 409 reason = 'INVALID_CERTIFICATE'
410 410
411 411 if reason == 'UNKNOWN':
412 412 reason = 'UNKNOWN:{}'.format(errors)
413 413 raise Exception(
414 414 'Failed to dump the remote repository from %s. Reason:%s' % (
415 415 src_url, reason))
416 416 if load.returncode != 0:
417 417 raise Exception(
418 418 'Failed to load the dump of remote repository from %s.' %
419 419 (src_url, ))
420 420
421 421 def commit(self, wire, message, author, timestamp, updated, removed):
422 422 assert isinstance(message, str)
423 423 assert isinstance(author, str)
424 424
425 425 repo = self._factory.repo(wire)
426 426 fsobj = svn.repos.fs(repo)
427 427
428 428 rev = svn.fs.youngest_rev(fsobj)
429 429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 430 txn_root = svn.fs.txn_root(txn)
431 431
432 432 for node in updated:
433 433 TxnNodeProcessor(node, txn_root).update()
434 434 for node in removed:
435 435 TxnNodeProcessor(node, txn_root).remove()
436 436
437 437 commit_id = svn.repos.fs_commit_txn(repo, txn)
438 438
439 439 if timestamp:
440 440 apr_time = apr_time_t(timestamp)
441 441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443 443
444 444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 445 return commit_id
446 446
447 447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 448 ignore_whitespace=False, context=3):
449 449
450 450 wire.update(cache=False)
451 451 repo = self._factory.repo(wire)
452 452 diff_creator = SvnDiffer(
453 453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 454 try:
455 455 return diff_creator.generate_diff()
456 456 except svn.core.SubversionException as e:
457 457 log.exception(
458 458 "Error during diff operation operation. "
459 459 "Path might not exist %s, %s" % (path1, path2))
460 460 return ""
461 461
462 462 @reraise_safe_exceptions
463 463 def is_large_file(self, wire, path):
464 464 return False
465 465
466 466 @reraise_safe_exceptions
467 467 def is_binary(self, wire, rev, path):
468 468 cache_on, context_uid, repo_id = self._cache_on(wire)
469 469
470 470 @self.region.conditional_cache_on_arguments(condition=cache_on)
471 471 def _is_binary(_repo_id, _rev, _path):
472 472 raw_bytes = self.get_file_content(wire, path, rev)
473 473 return raw_bytes and '\0' in raw_bytes
474 474
475 475 return _is_binary(repo_id, rev, path)
476 476
477 477 @reraise_safe_exceptions
478 478 def run_svn_command(self, wire, cmd, **opts):
479 479 path = wire.get('path', None)
480 480
481 481 if path and os.path.isdir(path):
482 482 opts['cwd'] = path
483 483
484 484 safe_call = False
485 485 if '_safe' in opts:
486 486 safe_call = True
487 487
488 488 svnenv = os.environ.copy()
489 489 svnenv.update(opts.pop('extra_env', {}))
490 490
491 491 _opts = {'env': svnenv, 'shell': False}
492 492
493 493 try:
494 494 _opts.update(opts)
495 495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496 496
497 497 return ''.join(p), ''.join(p.error)
498 498 except (EnvironmentError, OSError) as err:
499 499 cmd = ' '.join(cmd) # human friendly CMD
500 500 tb_err = ("Couldn't run svn command (%s).\n"
501 501 "Original error was:%s\n"
502 502 "Call options:%s\n"
503 503 % (cmd, err, _opts))
504 504 log.exception(tb_err)
505 505 if safe_call:
506 506 return '', err
507 507 else:
508 508 raise exceptions.VcsException()(tb_err)
509 509
510 510 @reraise_safe_exceptions
511 511 def install_hooks(self, wire, force=False):
512 512 from vcsserver.hook_utils import install_svn_hooks
513 513 repo_path = wire['path']
514 514 binary_dir = settings.BINARY_DIR
515 515 executable = None
516 516 if binary_dir:
517 517 executable = os.path.join(binary_dir, 'python')
518 518 return install_svn_hooks(
519 519 repo_path, executable=executable, force_create=force)
520 520
521 521 @reraise_safe_exceptions
522 522 def get_hooks_info(self, wire):
523 523 from vcsserver.hook_utils import (
524 524 get_svn_pre_hook_version, get_svn_post_hook_version)
525 525 repo_path = wire['path']
526 526 return {
527 527 'pre_version': get_svn_pre_hook_version(repo_path),
528 528 'post_version': get_svn_post_hook_version(repo_path),
529 529 }
530 530
531 531
532 532 class SvnDiffer(object):
533 533 """
534 534 Utility to create diffs based on difflib and the Subversion api
535 535 """
536 536
537 537 binary_content = False
538 538
539 539 def __init__(
540 540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 541 ignore_whitespace, context):
542 542 self.repo = repo
543 543 self.ignore_whitespace = ignore_whitespace
544 544 self.context = context
545 545
546 546 fsobj = svn.repos.fs(repo)
547 547
548 548 self.tgt_rev = tgt_rev
549 549 self.tgt_path = tgt_path or ''
550 550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552 552
553 553 self.src_rev = src_rev
554 554 self.src_path = src_path or self.tgt_path
555 555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557 557
558 558 self._validate()
559 559
560 560 def _validate(self):
561 561 if (self.tgt_kind != svn.core.svn_node_none and
562 562 self.src_kind != svn.core.svn_node_none and
563 563 self.src_kind != self.tgt_kind):
564 564 # TODO: johbo: proper error handling
565 565 raise Exception(
566 566 "Source and target are not compatible for diff generation. "
567 567 "Source type: %s, target type: %s" %
568 568 (self.src_kind, self.tgt_kind))
569 569
570 570 def generate_diff(self):
571 571 buf = StringIO.StringIO()
572 572 if self.tgt_kind == svn.core.svn_node_dir:
573 573 self._generate_dir_diff(buf)
574 574 else:
575 575 self._generate_file_diff(buf)
576 576 return buf.getvalue()
577 577
578 578 def _generate_dir_diff(self, buf):
579 579 editor = DiffChangeEditor()
580 580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 581 svn.repos.dir_delta2(
582 582 self.src_root,
583 583 self.src_path,
584 584 '', # src_entry
585 585 self.tgt_root,
586 586 self.tgt_path,
587 587 editor_ptr, editor_baton,
588 588 authorization_callback_allow_all,
589 589 False, # text_deltas
590 590 svn.core.svn_depth_infinity, # depth
591 591 False, # entry_props
592 592 False, # ignore_ancestry
593 593 )
594 594
595 595 for path, __, change in sorted(editor.changes):
596 596 self._generate_node_diff(
597 597 buf, change, path, self.tgt_path, path, self.src_path)
598 598
599 599 def _generate_file_diff(self, buf):
600 600 change = None
601 601 if self.src_kind == svn.core.svn_node_none:
602 602 change = "add"
603 603 elif self.tgt_kind == svn.core.svn_node_none:
604 604 change = "delete"
605 605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 606 src_base, src_path = vcspath.split(self.src_path)
607 607 self._generate_node_diff(
608 608 buf, change, tgt_path, tgt_base, src_path, src_base)
609 609
610 610 def _generate_node_diff(
611 611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612 612
613 613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 614 # makes consistent behaviour with git/hg to return empty diff if
615 615 # we compare same revisions
616 616 return
617 617
618 618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 619 src_full_path = vcspath.join(src_base, src_path)
620 620
621 621 self.binary_content = False
622 622 mime_type = self._get_mime_type(tgt_full_path)
623 623
624 624 if mime_type and not mime_type.startswith('text'):
625 625 self.binary_content = True
626 626 buf.write("=" * 67 + '\n')
627 627 buf.write("Cannot display: file marked as a binary type.\n")
628 628 buf.write("svn:mime-type = %s\n" % mime_type)
629 629 buf.write("Index: %s\n" % (tgt_path, ))
630 630 buf.write("=" * 67 + '\n')
631 631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 632 'tgt_path': tgt_path})
633 633
634 634 if change == 'add':
635 635 # TODO: johbo: SVN is missing a zero here compared to git
636 636 buf.write("new file mode 10644\n")
637 637
638 638 #TODO(marcink): intro to binary detection of svn patches
639 639 # if self.binary_content:
640 640 # buf.write('GIT binary patch\n')
641 641
642 642 buf.write("--- /dev/null\t(revision 0)\n")
643 643 src_lines = []
644 644 else:
645 645 if change == 'delete':
646 646 buf.write("deleted file mode 10644\n")
647 647
648 648 #TODO(marcink): intro to binary detection of svn patches
649 649 # if self.binary_content:
650 650 # buf.write('GIT binary patch\n')
651 651
652 652 buf.write("--- a/%s\t(revision %s)\n" % (
653 653 src_path, self.src_rev))
654 654 src_lines = self._svn_readlines(self.src_root, src_full_path)
655 655
656 656 if change == 'delete':
657 657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 658 tgt_lines = []
659 659 else:
660 660 buf.write("+++ b/%s\t(revision %s)\n" % (
661 661 tgt_path, self.tgt_rev))
662 662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663 663
664 664 if not self.binary_content:
665 665 udiff = svn_diff.unified_diff(
666 666 src_lines, tgt_lines, context=self.context,
667 667 ignore_blank_lines=self.ignore_whitespace,
668 668 ignore_case=False,
669 669 ignore_space_changes=self.ignore_whitespace)
670 670 buf.writelines(udiff)
671 671
672 672 def _get_mime_type(self, path):
673 673 try:
674 674 mime_type = svn.fs.node_prop(
675 675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 676 except svn.core.SubversionException:
677 677 mime_type = svn.fs.node_prop(
678 678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 679 return mime_type
680 680
681 681 def _svn_readlines(self, fs_root, node_path):
682 682 if self.binary_content:
683 683 return []
684 684 node_kind = svn.fs.check_path(fs_root, node_path)
685 685 if node_kind not in (
686 686 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 687 return []
688 688 content = svn.core.Stream(
689 689 svn.fs.file_contents(fs_root, node_path)).read()
690 690 return content.splitlines(True)
691 691
692 692
693 693 class DiffChangeEditor(svn.delta.Editor):
694 694 """
695 695 Records changes between two given revisions
696 696 """
697 697
698 698 def __init__(self):
699 699 self.changes = []
700 700
701 701 def delete_entry(self, path, revision, parent_baton, pool=None):
702 702 self.changes.append((path, None, 'delete'))
703 703
704 704 def add_file(
705 705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 706 file_pool=None):
707 707 self.changes.append((path, 'file', 'add'))
708 708
709 709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 710 self.changes.append((path, 'file', 'change'))
711 711
712 712
713 713 def authorization_callback_allow_all(root, path, pool):
714 714 return True
715 715
716 716
717 717 class TxnNodeProcessor(object):
718 718 """
719 719 Utility to process the change of one node within a transaction root.
720 720
721 721 It encapsulates the knowledge of how to add, update or remove
722 722 a node for a given transaction root. The purpose is to support the method
723 723 `SvnRemote.commit`.
724 724 """
725 725
726 726 def __init__(self, node, txn_root):
727 727 assert isinstance(node['path'], str)
728 728
729 729 self.node = node
730 730 self.txn_root = txn_root
731 731
732 732 def update(self):
733 733 self._ensure_parent_dirs()
734 734 self._add_file_if_node_does_not_exist()
735 735 self._update_file_content()
736 736 self._update_file_properties()
737 737
738 738 def remove(self):
739 739 svn.fs.delete(self.txn_root, self.node['path'])
740 740 # TODO: Clean up directory if empty
741 741
742 742 def _ensure_parent_dirs(self):
743 743 curdir = vcspath.dirname(self.node['path'])
744 744 dirs_to_create = []
745 745 while not self._svn_path_exists(curdir):
746 746 dirs_to_create.append(curdir)
747 747 curdir = vcspath.dirname(curdir)
748 748
749 749 for curdir in reversed(dirs_to_create):
750 750 log.debug('Creating missing directory "%s"', curdir)
751 751 svn.fs.make_dir(self.txn_root, curdir)
752 752
753 753 def _svn_path_exists(self, path):
754 754 path_status = svn.fs.check_path(self.txn_root, path)
755 755 return path_status != svn.core.svn_node_none
756 756
757 757 def _add_file_if_node_does_not_exist(self):
758 758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 759 if kind == svn.core.svn_node_none:
760 760 svn.fs.make_file(self.txn_root, self.node['path'])
761 761
762 762 def _update_file_content(self):
763 763 assert isinstance(self.node['content'], str)
764 764 handler, baton = svn.fs.apply_textdelta(
765 765 self.txn_root, self.node['path'], None, None)
766 766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767 767
768 768 def _update_file_properties(self):
769 769 properties = self.node.get('properties', {})
770 770 for key, value in properties.iteritems():
771 771 svn.fs.change_node_prop(
772 772 self.txn_root, self.node['path'], key, value)
773 773
774 774
775 775 def apr_time_t(timestamp):
776 776 """
777 777 Convert a Python timestamp into APR timestamp type apr_time_t
778 778 """
779 779 return timestamp * 1E6
780 780
781 781
782 782 def svn_opt_revision_value_t(num):
783 783 """
784 784 Put `num` into a `svn_opt_revision_value_t` structure.
785 785 """
786 786 value = svn.core.svn_opt_revision_value_t()
787 787 value.number = num
788 788 revision = svn.core.svn_opt_revision_t()
789 789 revision.kind = svn.core.svn_opt_revision_number
790 790 revision.value = value
791 791 return revision
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 43 print('Using vcsserver port %s' % (port, ))
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72 72
73 73
74 74 def no_newline_id_generator(test_name):
75 75 """
76 76 Generates a test name without spaces or newlines characters. Used for
77 77 nicer output of progress of test
78 78 """
79 79 org_name = test_name
80 80 test_name = str(test_name)\
81 81 .replace('\n', '_N') \
82 82 .replace('\r', '_N') \
83 83 .replace('\t', '_T') \
84 84 .replace(' ', '_S')
85 85
86 86 return test_name or 'test-with-empty-name'
@@ -1,160 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102
103 103 class TestReraiseSafeExceptions(object):
104 104
105 105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 106 factory = Mock()
107 107 git_remote = git.GitRemote(factory)
108 108
109 109 def fake_function():
110 110 return None
111 111
112 112 decorator = git.reraise_safe_exceptions(fake_function)
113 113
114 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 115 for method_name, method in methods:
116 116 if not method_name.startswith('_'):
117 117 assert method.im_func.__code__ == decorator.__code__
118 118
119 119 @pytest.mark.parametrize('side_effect, expected_type', [
120 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 124 (dulwich.errors.HangupException(), 'error'),
125 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 126 ])
127 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 128 @git.reraise_safe_exceptions
129 129 def fake_method():
130 130 raise side_effect
131 131
132 132 with pytest.raises(Exception) as exc_info:
133 133 fake_method()
134 134 assert type(exc_info.value) == Exception
135 135 assert exc_info.value._vcs_kind == expected_type
136 136
137 137
138 138 class TestDulwichRepoWrapper(object):
139 139 def test_calls_close_on_delete(self):
140 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 141 with isdir_patcher:
142 142 repo = git.Repo('/tmp/abcde')
143 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 144 del repo
145 145 close_mock.assert_called_once_with()
146 146
147 147
148 148 class TestGitFactory(object):
149 149 def test_create_repo_returns_dulwich_wrapper(self):
150 150
151 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 152 mock.side_effect = {'repo_objects': ''}
153 153 factory = git.GitFactory()
154 154 wire = {
155 155 'path': '/tmp/abcde'
156 156 }
157 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 158 with isdir_patcher:
159 159 result = factory._create_repo(wire, True)
160 160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestDiff(object):
30 30 def test_raising_safe_exception_when_lookup_failed(self):
31 31
32 32 factory = Mock()
33 33 hg_remote = hg.HgRemote(factory)
34 34 with patch('mercurial.patch.diff') as diff_mock:
35 35 diff_mock.side_effect = LookupError(
36 36 'deadbeef', 'index', 'message')
37 37 with pytest.raises(Exception) as exc_info:
38 38 hg_remote.diff(
39 39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 40 file_filter=None, opt_git=True, opt_ignorews=True,
41 41 context=3)
42 42 assert type(exc_info.value) == Exception
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44
45 45
46 46 class TestReraiseSafeExceptions(object):
47 47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 48 factory = Mock()
49 49 hg_remote = hg.HgRemote(factory)
50 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 51 decorator = hg.reraise_safe_exceptions(None)
52 52 for method_name, method in methods:
53 53 if not method_name.startswith('_'):
54 54 assert method.im_func.__code__ == decorator.__code__
55 55
56 56 @pytest.mark.parametrize('side_effect, expected_type', [
57 57 (hgcompat.Abort(), 'abort'),
58 58 (hgcompat.InterventionRequired(), 'abort'),
59 59 (hgcompat.RepoLookupError(), 'lookup'),
60 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 61 (hgcompat.RepoError(), 'error'),
62 62 (hgcompat.RequirementError(), 'requirement'),
63 63 ])
64 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 65 @hg.reraise_safe_exceptions
66 66 def fake_method():
67 67 raise side_effect
68 68
69 69 with pytest.raises(Exception) as exc_info:
70 70 fake_method()
71 71 assert type(exc_info.value) == Exception
72 72 assert exc_info.value._vcs_kind == expected_type
73 73
74 74 def test_keeps_original_traceback(self):
75 75 @hg.reraise_safe_exceptions
76 76 def fake_method():
77 77 try:
78 78 raise hgcompat.Abort()
79 79 except:
80 80 self.original_traceback = traceback.format_tb(
81 81 sys.exc_info()[2])
82 82 raise
83 83
84 84 try:
85 85 fake_method()
86 86 except Exception:
87 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88 88
89 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 90 assert new_traceback_tail == self.original_traceback
91 91
92 92 def test_maps_unknow_exceptions_to_unhandled(self):
93 93 @hg.reraise_safe_exceptions
94 94 def stub_method():
95 95 raise ValueError('stub')
96 96
97 97 with pytest.raises(Exception) as exc_info:
98 98 stub_method()
99 99 assert exc_info.value._vcs_kind == 'unhandled'
100 100
101 101 def test_does_not_map_known_exceptions(self):
102 102 @hg.reraise_safe_exceptions
103 103 def stub_method():
104 104 raise exceptions.LookupException()('stub')
105 105
106 106 with pytest.raises(Exception) as exc_info:
107 107 stub_method()
108 108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,124 +1,124 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import hgcompat, hgpatches
22 22
23 23
24 24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25 25
26 26
27 27 def test_patch_largefiles_capabilities_applies_patch(
28 28 patched_capabilities):
29 29 lfproto = hgcompat.largefiles.proto
30 30 hgpatches.patch_largefiles_capabilities()
31 31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32 32
33 33
34 34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 36 orig_capabilities):
37 37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
38 38 hgcompat.largefiles.proto, stub_extensions)
39 39
40 40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
41 41
42 42 stub_extensions.assert_called_once_with(stub_ui)
43 43 assert LARGEFILES_CAPABILITY not in caps
44 44
45 45
46 46 def test_dynamic_capabilities_ignores_updated_capabilities(
47 47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 48 orig_capabilities):
49 49 stub_extensions.return_value = [('largefiles', mock.Mock())]
50 50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
51 51 hgcompat.largefiles.proto, stub_extensions)
52 52
53 53 # This happens when the extension is loaded for the first time, important
54 54 # to ensure that an updated function is correctly picked up.
55 55 hgcompat.largefiles.proto._capabilities = mock.Mock(
56 56 side_effect=Exception('Must not be called'))
57 57
58 58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
59 59
60 60
61 61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
62 62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 63 orig_capabilities):
64 64 stub_extensions.return_value = [('largefiles', mock.Mock())]
65 65
66 66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
67 67 hgcompat.largefiles.proto, stub_extensions)
68 68
69 69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
70 70
71 71 stub_extensions.assert_called_once_with(stub_ui)
72 72 assert LARGEFILES_CAPABILITY in caps
73 73
74 74
75 75 def test_hgsubversion_import():
76 76 from hgsubversion import svnrepo
77 77 assert svnrepo
78 78
79 79
80 80 @pytest.fixture
81 81 def patched_capabilities(request):
82 82 """
83 83 Patch in `capabilitiesorig` and restore both capability functions.
84 84 """
85 85 lfproto = hgcompat.largefiles.proto
86 86 orig_capabilities = lfproto._capabilities
87 87
88 88 @request.addfinalizer
89 89 def restore():
90 90 lfproto._capabilities = orig_capabilities
91 91
92 92
93 93 @pytest.fixture
94 94 def stub_repo(stub_ui):
95 95 repo = mock.Mock()
96 96 repo.ui = stub_ui
97 97 return repo
98 98
99 99
100 100 @pytest.fixture
101 101 def stub_proto(stub_ui):
102 102 proto = mock.Mock()
103 103 proto.ui = stub_ui
104 104 return proto
105 105
106 106
107 107 @pytest.fixture
108 108 def orig_capabilities():
109 109 from mercurial.wireprotov1server import wireprotocaps
110 110
111 111 def _capabilities(repo, proto):
112 112 return wireprotocaps
113 113 return _capabilities
114 114
115 115
116 116 @pytest.fixture
117 117 def stub_ui():
118 118 return hgcompat.ui.ui()
119 119
120 120
121 121 @pytest.fixture
122 122 def stub_extensions():
123 123 extensions = mock.Mock(return_value=tuple())
124 124 return extensions
@@ -1,241 +1,241 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import contextlib
19 19 import io
20 20 import threading
21 21 from BaseHTTPServer import BaseHTTPRequestHandler
22 22 from SocketServer import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 27 import simplejson as json
28 28
29 29 from vcsserver import hooks
30 30
31 31
32 32 def get_hg_ui(extras=None):
33 33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 34 extras = extras or {}
35 35 required_extras = {
36 36 'username': '',
37 37 'repository': '',
38 38 'locked_by': '',
39 39 'scm': '',
40 40 'make_lock': '',
41 41 'action': '',
42 42 'ip': '',
43 43 'hooks_uri': 'fake_hooks_uri',
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
51 51
52 52 def test_git_pre_receive_is_disabled():
53 53 extras = {'hooks': ['pull']}
54 54 response = hooks.git_pre_receive(None, None,
55 55 {'RC_SCM_DATA': json.dumps(extras)})
56 56
57 57 assert response == 0
58 58
59 59
60 60 def test_git_post_receive_is_disabled():
61 61 extras = {'hooks': ['pull']}
62 62 response = hooks.git_post_receive(None, '',
63 63 {'RC_SCM_DATA': json.dumps(extras)})
64 64
65 65 assert response == 0
66 66
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70 70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 71 hooks.git_post_receive(
72 72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 75 expected_calls = [
76 76 mock.call('repo_size', extras, mock.ANY),
77 77 mock.call('post_push', extras, mock.ANY),
78 78 ]
79 79 assert call_hook_mock.call_args_list == expected_calls
80 80
81 81
82 82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 83 extras = {'hooks': ['push']}
84 84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 85 hooks.git_post_receive(
86 86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 89 expected_calls = [
90 90 mock.call('post_push', extras, mock.ANY)
91 91 ]
92 92 assert call_hook_mock.call_args_list == expected_calls
93 93
94 94
95 95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 96 extras = {'hooks': ['push', 'repo_size']}
97 97 status = 0
98 98
99 99 def side_effect(name, *args, **kwargs):
100 100 if name == 'repo_size':
101 101 raise Exception('Fake exception')
102 102 else:
103 103 return status
104 104
105 105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 106 call_hook_mock.side_effect = side_effect
107 107 result = hooks.git_post_receive(
108 108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 109 assert result == status
110 110
111 111
112 112 def test_git_pre_pull_is_disabled():
113 113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114 114
115 115
116 116 def test_git_post_pull_is_disabled():
117 117 assert (
118 118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119 119
120 120
121 121 class TestGetHooksClient(object):
122 122
123 123 def test_returns_http_client_when_protocol_matches(self):
124 124 hooks_uri = 'localhost:8000'
125 125 result = hooks._get_hooks_client({
126 126 'hooks_uri': hooks_uri,
127 127 'hooks_protocol': 'http'
128 128 })
129 129 assert isinstance(result, hooks.HooksHttpClient)
130 130 assert result.hooks_uri == hooks_uri
131 131
132 132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 133 fake_module = mock.Mock()
134 134 import_patcher = mock.patch.object(
135 135 hooks.importlib, 'import_module', return_value=fake_module)
136 136 fake_module_name = 'fake.module'
137 137 with import_patcher as import_mock:
138 138 result = hooks._get_hooks_client(
139 139 {'hooks_module': fake_module_name})
140 140
141 141 import_mock.assert_called_once_with(fake_module_name)
142 142 assert isinstance(result, hooks.HooksDummyClient)
143 143 assert result._hooks_module == fake_module
144 144
145 145
146 146 class TestHooksHttpClient(object):
147 147 def test_init_sets_hooks_uri(self):
148 148 uri = 'localhost:3000'
149 149 client = hooks.HooksHttpClient(uri)
150 150 assert client.hooks_uri == uri
151 151
152 152 def test_serialize_returns_json_string(self):
153 153 client = hooks.HooksHttpClient('localhost:3000')
154 154 hook_name = 'test'
155 155 extras = {
156 156 'first': 1,
157 157 'second': 'two'
158 158 }
159 159 result = client._serialize(hook_name, extras)
160 160 expected_result = json.dumps({
161 161 'method': hook_name,
162 162 'extras': extras
163 163 })
164 164 assert result == expected_result
165 165
166 166 def test_call_queries_http_server(self, http_mirror):
167 167 client = hooks.HooksHttpClient(http_mirror.uri)
168 168 hook_name = 'test'
169 169 extras = {
170 170 'first': 1,
171 171 'second': 'two'
172 172 }
173 173 result = client(hook_name, extras)
174 174 expected_result = {
175 175 'method': hook_name,
176 176 'extras': extras
177 177 }
178 178 assert result == expected_result
179 179
180 180
181 181 class TestHooksDummyClient(object):
182 182 def test_init_imports_hooks_module(self):
183 183 hooks_module_name = 'rhodecode.fake.module'
184 184 hooks_module = mock.MagicMock()
185 185
186 186 import_patcher = mock.patch.object(
187 187 hooks.importlib, 'import_module', return_value=hooks_module)
188 188 with import_patcher as import_mock:
189 189 client = hooks.HooksDummyClient(hooks_module_name)
190 190 import_mock.assert_called_once_with(hooks_module_name)
191 191 assert client._hooks_module == hooks_module
192 192
193 193 def test_call_returns_hook_result(self):
194 194 hooks_module_name = 'rhodecode.fake.module'
195 195 hooks_module = mock.MagicMock()
196 196 import_patcher = mock.patch.object(
197 197 hooks.importlib, 'import_module', return_value=hooks_module)
198 198 with import_patcher:
199 199 client = hooks.HooksDummyClient(hooks_module_name)
200 200
201 201 result = client('post_push', {})
202 202 hooks_module.Hooks.assert_called_once_with()
203 203 assert result == hooks_module.Hooks().__enter__().post_push()
204 204
205 205
206 206 @pytest.fixture
207 207 def http_mirror(request):
208 208 server = MirrorHttpServer()
209 209 request.addfinalizer(server.stop)
210 210 return server
211 211
212 212
213 213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 214 def do_POST(self):
215 215 length = int(self.headers['Content-Length'])
216 216 body = self.rfile.read(length).decode('utf-8')
217 217 self.send_response(200)
218 218 self.end_headers()
219 219 self.wfile.write(body)
220 220
221 221
222 222 class MirrorHttpServer(object):
223 223 ip_address = '127.0.0.1'
224 224 port = 0
225 225
226 226 def __init__(self):
227 227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 228 _, self.port = self._daemon.server_address
229 229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 230 self._thread.daemon = True
231 231 self._thread.start()
232 232
233 233 def stop(self):
234 234 self._daemon.shutdown()
235 235 self._thread.join()
236 236 self._daemon = None
237 237 self._thread = None
238 238
239 239 @property
240 240 def uri(self):
241 241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,206 +1,206 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import stat
21 21 import pytest
22 22 import vcsserver
23 23 import tempfile
24 24 from vcsserver import hook_utils
25 25 from vcsserver.tests.fixture import no_newline_id_generator
26 26 from vcsserver.utils import AttributeDict
27 27
28 28
29 29 class TestCheckRhodecodeHook(object):
30 30
31 31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 33 with open(hook, 'wb') as f:
34 34 f.write('dummy test')
35 35 result = hook_utils.check_rhodecode_hook(hook)
36 36 assert result is False
37 37
38 38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 40 result = hook_utils.check_rhodecode_hook(hook)
41 41 assert result
42 42
43 43 @pytest.mark.parametrize("file_content, expected_result", [
44 44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 45 ("RC_HOOK = '3.3.3'\n", False),
46 46 ], ids=no_newline_id_generator)
47 47 def test_signatures(self, file_content, expected_result, tmpdir):
48 48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 49 with open(hook, 'wb') as f:
50 50 f.write(file_content)
51 51
52 52 result = hook_utils.check_rhodecode_hook(hook)
53 53
54 54 assert result is expected_result
55 55
56 56
57 57 class BaseInstallHooks(object):
58 58 HOOK_FILES = ()
59 59
60 60 def _check_hook_file_mode(self, file_path):
61 61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 62 stat_info = os.stat(file_path)
63 63
64 64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 65 expected_mode = int('755', 8)
66 66 assert expected_mode == file_mode
67 67
68 68 def _check_hook_file_content(self, file_path, executable):
69 69 executable = executable or sys.executable
70 70 with open(file_path, 'rt') as hook_file:
71 71 content = hook_file.read()
72 72
73 73 expected_env = '#!{}'.format(executable)
74 74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 75 vcsserver.__version__)
76 76 assert content.strip().startswith(expected_env)
77 77 assert expected_rc_version in content
78 78
79 79 def _create_fake_hook(self, file_path, content):
80 80 with open(file_path, 'w') as hook_file:
81 81 hook_file.write(content)
82 82
83 83 def create_dummy_repo(self, repo_type):
84 84 tmpdir = tempfile.mkdtemp()
85 85 repo = AttributeDict()
86 86 if repo_type == 'git':
87 87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 88 os.makedirs(repo.path)
89 89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 90 repo.bare = True
91 91
92 92 elif repo_type == 'svn':
93 93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 94 os.makedirs(repo.path)
95 95 os.makedirs(os.path.join(repo.path, 'hooks'))
96 96
97 97 return repo
98 98
99 99 def check_hooks(self, repo_path, repo_bare=True):
100 100 for file_name in self.HOOK_FILES:
101 101 if repo_bare:
102 102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 103 else:
104 104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 105 self._check_hook_file_mode(file_path)
106 106 self._check_hook_file_content(file_path, sys.executable)
107 107
108 108
109 109 class TestInstallGitHooks(BaseInstallHooks):
110 110 HOOK_FILES = ('pre-receive', 'post-receive')
111 111
112 112 def test_hooks_are_installed(self):
113 113 repo = self.create_dummy_repo('git')
114 114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 115 assert result
116 116 self.check_hooks(repo.path, repo.bare)
117 117
118 118 def test_hooks_are_replaced(self):
119 119 repo = self.create_dummy_repo('git')
120 120 hooks_path = os.path.join(repo.path, 'hooks')
121 121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 122 self._create_fake_hook(
123 123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124 124
125 125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 126 assert result
127 127 self.check_hooks(repo.path, repo.bare)
128 128
129 129 def test_non_rc_hooks_are_not_replaced(self):
130 130 repo = self.create_dummy_repo('git')
131 131 hooks_path = os.path.join(repo.path, 'hooks')
132 132 non_rc_content = 'echo "non rc hook"\n'
133 133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 134 self._create_fake_hook(
135 135 file_path, content=non_rc_content)
136 136
137 137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 138 assert result
139 139
140 140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 141 with open(file_path, 'rt') as hook_file:
142 142 content = hook_file.read()
143 143 assert content == non_rc_content
144 144
145 145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 146 repo = self.create_dummy_repo('git')
147 147 hooks_path = os.path.join(repo.path, 'hooks')
148 148 non_rc_content = 'echo "non rc hook"\n'
149 149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 150 self._create_fake_hook(
151 151 file_path, content=non_rc_content)
152 152
153 153 result = hook_utils.install_git_hooks(
154 154 repo.path, repo.bare, force_create=True)
155 155 assert result
156 156 self.check_hooks(repo.path, repo.bare)
157 157
158 158
159 159 class TestInstallSvnHooks(BaseInstallHooks):
160 160 HOOK_FILES = ('pre-commit', 'post-commit')
161 161
162 162 def test_hooks_are_installed(self):
163 163 repo = self.create_dummy_repo('svn')
164 164 result = hook_utils.install_svn_hooks(repo.path)
165 165 assert result
166 166 self.check_hooks(repo.path)
167 167
168 168 def test_hooks_are_replaced(self):
169 169 repo = self.create_dummy_repo('svn')
170 170 hooks_path = os.path.join(repo.path, 'hooks')
171 171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 172 self._create_fake_hook(
173 173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174 174
175 175 result = hook_utils.install_svn_hooks(repo.path)
176 176 assert result
177 177 self.check_hooks(repo.path)
178 178
179 179 def test_non_rc_hooks_are_not_replaced(self):
180 180 repo = self.create_dummy_repo('svn')
181 181 hooks_path = os.path.join(repo.path, 'hooks')
182 182 non_rc_content = 'echo "non rc hook"\n'
183 183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 184 self._create_fake_hook(
185 185 file_path, content=non_rc_content)
186 186
187 187 result = hook_utils.install_svn_hooks(repo.path)
188 188 assert result
189 189
190 190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 191 with open(file_path, 'rt') as hook_file:
192 192 content = hook_file.read()
193 193 assert content == non_rc_content
194 194
195 195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 196 repo = self.create_dummy_repo('svn')
197 197 hooks_path = os.path.join(repo.path, 'hooks')
198 198 non_rc_content = 'echo "non rc hook"\n'
199 199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 200 self._create_fake_hook(
201 201 file_path, content=non_rc_content)
202 202
203 203 result = hook_utils.install_svn_hooks(
204 204 repo.path, force_create=True)
205 205 assert result
206 206 self.check_hooks(repo.path, )
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import http_main
22 22 from vcsserver.base import obfuscate_qs
23 23
24 24
25 25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 28 http_main.main({})
29 29 patch_largefiles_capabilities.assert_called_once_with()
30 30
31 31
32 32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 34 @mock.patch(
35 35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 36 mock.Mock(side_effect=Exception("Must not be called")))
37 37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 38 http_main.main({})
39 39
40 40
41 41 @pytest.mark.parametrize('given, expected', [
42 42 ('bad', 'bad'),
43 43 ('query&foo=bar', 'query&foo=bar'),
44 44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 45 ('a;b;c;query&foo=bar&auth_token=secret',
46 46 'a&b&c&query&foo=bar&auth_token=*****'),
47 47 ('', ''),
48 48 (None, None),
49 49 ('foo=bar', 'foo=bar'),
50 50 ('auth_token=secret', 'auth_token=*****'),
51 51 ('auth_token=secret&api_key=secret2',
52 52 'auth_token=*****&api_key=*****'),
53 53 ('auth_token=secret&api_key=secret2&param=value',
54 54 'auth_token=*****&api_key=*****&param=value'),
55 55 ])
56 56 def test_obfuscate_qs(given, expected):
57 57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19
20 20 import dulwich.protocol
21 21 import mock
22 22 import pytest
23 23 import webob
24 24 import webtest
25 25
26 26 from vcsserver import hooks, pygrack
27 27
28 28 # pylint: disable=redefined-outer-name,protected-access
29 29
30 30
31 31 @pytest.fixture()
32 32 def pygrack_instance(tmpdir):
33 33 """
34 34 Creates a pygrack app instance.
35 35
36 36 Right now, it does not much helpful regarding the passed directory.
37 37 It just contains the required folders to pass the signature test.
38 38 """
39 39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 40 tmpdir.mkdir(dir_name)
41 41
42 42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43 43
44 44
45 45 @pytest.fixture()
46 46 def pygrack_app(pygrack_instance):
47 47 """
48 48 Creates a pygrack app wrapped in webtest.TestApp.
49 49 """
50 50 return webtest.TestApp(pygrack_instance)
51 51
52 52
53 53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 403
58 58
59 59
60 60 def test_invalid_endpoint_returns_403(pygrack_app):
61 61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62 62
63 63 assert response.status_int == 403
64 64
65 65
66 66 @pytest.mark.parametrize('sideband', [
67 67 'side-band-64k',
68 68 'side-band',
69 69 'side-band no-progress',
70 70 ])
71 71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 72 request = ''.join([
73 73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 74 'multi_ack %s ofs-delta\n' % sideband,
75 75 '0000',
76 76 '0009done\n',
77 77 ])
78 78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 79 return_value=hooks.HookResponse(1, 'foo')):
80 80 response = pygrack_app.post(
81 81 '/git-upload-pack', params=request,
82 82 content_type='application/x-git-upload-pack')
83 83
84 84 data = io.BytesIO(response.body)
85 85 proto = dulwich.protocol.Protocol(data.read, None)
86 86 packets = list(proto.read_pkt_seq())
87 87
88 88 expected_packets = [
89 89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 91 ]
92 92 assert packets == expected_packets
93 93
94 94
95 95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 96 request = ''.join([
97 97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 98 'multi_ack ofs-delta\n'
99 99 '0000',
100 100 '0009done\n',
101 101 ])
102 102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 103 return_value=hooks.HookResponse(1, 'foo')):
104 104 response = pygrack_app.post(
105 105 '/git-upload-pack', params=request,
106 106 content_type='application/x-git-upload-pack')
107 107
108 108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109 109
110 110
111 111 def test_pull_has_hook_messages(pygrack_app):
112 112 request = ''.join([
113 113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 114 'multi_ack side-band-64k ofs-delta\n'
115 115 '0000',
116 116 '0009done\n',
117 117 ])
118 118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 119 return_value=hooks.HookResponse(0, 'foo')):
120 120 with mock.patch('vcsserver.hooks.git_post_pull',
121 121 return_value=hooks.HookResponse(1, 'bar')):
122 122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 123 return_value=['0008NAK\n0009subp\n0000']):
124 124 response = pygrack_app.post(
125 125 '/git-upload-pack', params=request,
126 126 content_type='application/x-git-upload-pack')
127 127
128 128 data = io.BytesIO(response.body)
129 129 proto = dulwich.protocol.Protocol(data.read, None)
130 130 packets = list(proto.read_pkt_seq())
131 131
132 132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133 133
134 134
135 135 def test_get_want_capabilities(pygrack_instance):
136 136 data = io.BytesIO(
137 137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139 139
140 140 request = webob.Request({
141 141 'wsgi.input': data,
142 142 'REQUEST_METHOD': 'POST',
143 143 'webob.is_body_seekable': True
144 144 })
145 145
146 146 capabilities = pygrack_instance._get_want_capabilities(request)
147 147
148 148 assert capabilities == frozenset(
149 149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 150 assert data.tell() == 0
151 151
152 152
153 153 @pytest.mark.parametrize('data,capabilities,expected', [
154 154 ('foo', [], []),
155 155 ('', ['side-band-64k'], []),
156 156 ('', ['side-band'], []),
157 157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 158 ('foo', ['side-band'], ['0008\x02foo']),
159 159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 163 ], ids=[
164 164 'foo-empty',
165 165 'empty-64k', 'empty',
166 166 'foo-64k', 'foo',
167 167 'f-1000-64k', 'f-1000',
168 168 'f-65520-64k', 'f-65520'])
169 169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 170 messages = pygrack_instance._get_messages(data, capabilities)
171 171
172 172 assert messages == expected
173 173
174 174
175 175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 176 # Unexpected response
177 177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 178 # No sideband
179 179 ('no-sideband', [], 'foo', 'bar'),
180 180 # No messages
181 181 ('no-messages', ['side-band-64k'], '', ''),
182 182 ])
183 183 def test_inject_messages_to_response_nothing_to_do(
184 184 pygrack_instance, response, capabilities, pre_pull_messages,
185 185 post_pull_messages):
186 186 new_response = pygrack_instance._inject_messages_to_response(
187 187 response, capabilities, pre_pull_messages, post_pull_messages)
188 188
189 189 assert new_response == response
190 190
191 191
192 192 @pytest.mark.parametrize('capabilities', [
193 193 ['side-band'],
194 194 ['side-band-64k'],
195 195 ])
196 196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 197 capabilities):
198 198 response = ['0008NAK\n0009subp\n0000']
199 199 new_response = pygrack_instance._inject_messages_to_response(
200 200 response, capabilities, 'foo', 'bar')
201 201
202 202 expected_response = [
203 203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204 204
205 205 assert new_response == expected_response
206 206
207 207
208 208 @pytest.mark.parametrize('capabilities', [
209 209 ['side-band'],
210 210 ['side-band-64k'],
211 211 ])
212 212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 213 capabilities):
214 214 response = [
215 215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 216 new_response = pygrack_instance._inject_messages_to_response(
217 217 response, capabilities, 'foo', 'bar')
218 218
219 219 expected_response = [
220 220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 221 '000asubp4\n', '0008\x02bar', '0000'
222 222 ]
223 223
224 224 assert new_response == expected_response
225 225
226 226
227 227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229 229
230 230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231 231
232 232
233 233 @pytest.mark.parametrize('capabilities', [
234 234 ['side-band'],
235 235 ['side-band-64k'],
236 236 ['side-band-64k', 'no-progress'],
237 237 ])
238 238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 239 response = pygrack_instance._build_failed_pre_pull_response(
240 240 capabilities, 'foo')
241 241
242 242 expected_response = [
243 243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 245 pygrack.GitRepository.EMPTY_PACK),
246 246 '0000',
247 247 ]
248 248
249 249 assert response == expected_response
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mercurial.hg
21 21 import mercurial.ui
22 22 import mercurial.error
23 23 import mock
24 24 import pytest
25 25 import webtest
26 26
27 27 from vcsserver import scm_app
28 28
29 29
30 30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 32 app = webtest.TestApp(scm_app.HgWeb(repo))
33 33
34 34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 35
36 36 assert response.status_int == 400
37 37
38 38
39 39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 41 config = (
42 42 ('paths', 'default', ''),
43 43 )
44 44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 46 with pytest.raises(Exception):
47 47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48 48
49 49
50 50 def test_git_returns_not_found(tmpdir):
51 51 app = webtest.TestApp(
52 52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53 53
54 54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 404
58 58
59 59
60 60 def test_git(tmpdir):
61 61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 62 tmpdir.mkdir(dir_name)
63 63
64 64 app = webtest.TestApp(
65 65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66 66
67 67 # We set service to git-upload-packs to trigger a 403
68 68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 69 expect_errors=True)
70 70
71 71 assert response.status_int == 403
72 72
73 73
74 74 def test_git_fallbacks_to_git_folder(tmpdir):
75 75 tmpdir.mkdir('.git')
76 76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 77 tmpdir.mkdir(os.path.join('.git', dir_name))
78 78
79 79 app = webtest.TestApp(
80 80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81 81
82 82 # We set service to git-upload-packs to trigger a 403
83 83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 84 expect_errors=True)
85 85
86 86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from vcsserver.server import VcsServer
24 24
25 25
26 26 def test_provides_the_pid(server):
27 27 pid = server.get_pid()
28 28 assert pid == os.getpid()
29 29
30 30
31 31 def test_allows_to_trigger_the_garbage_collector(server):
32 32 with mock.patch('gc.collect') as collect:
33 33 server.run_gc()
34 34 assert collect.called
35 35
36 36
37 37 @pytest.fixture
38 38 def server():
39 39 return VcsServer()
@@ -1,155 +1,155 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21
22 22 import pytest
23 23
24 24 from vcsserver import subprocessio
25 25
26 26
27 27 class KindaFilelike(object): # pragma: no cover
28 28
29 29 def __init__(self, data, size):
30 30 chunks = size / len(data)
31 31
32 32 self.stream = self._get_stream(data, chunks)
33 33
34 34 def _get_stream(self, data, chunks):
35 35 for x in xrange(chunks):
36 36 yield data
37 37
38 38 def read(self, n):
39 39
40 40 buffer_stream = ''
41 41 for chunk in self.stream:
42 42 buffer_stream += chunk
43 43 if len(buffer_stream) >= n:
44 44 break
45 45
46 46 # self.stream = self.bytes[n:]
47 47 return buffer_stream
48 48
49 49
50 50 @pytest.fixture(scope='module')
51 51 def environ():
52 52 """Delete coverage variables, as they make the tests fail."""
53 53 env = dict(os.environ)
54 54 for key in env.keys():
55 55 if key.startswith('COV_CORE_'):
56 56 del env[key]
57 57
58 58 return env
59 59
60 60
61 61 def _get_python_args(script):
62 62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63 63
64 64
65 65 def test_raise_exception_on_non_zero_return_code(environ):
66 66 args = _get_python_args('sys.exit(1)')
67 67 with pytest.raises(EnvironmentError):
68 68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
69 69
70 70
71 71 def test_does_not_fail_on_non_zero_return_code(environ):
72 72 args = _get_python_args('sys.exit(1)')
73 73 output = ''.join(
74 74 subprocessio.SubprocessIOChunker(
75 75 args, shell=False, fail_on_return_code=False, env=environ
76 76 )
77 77 )
78 78
79 79 assert output == ''
80 80
81 81
82 82 def test_raise_exception_on_stderr(environ):
83 83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
84 84 with pytest.raises(EnvironmentError) as excinfo:
85 85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
86 86
87 87 assert 'exited due to an error:\nX' in str(excinfo.value)
88 88
89 89
90 90 def test_does_not_fail_on_stderr(environ):
91 91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
92 92 output = ''.join(
93 93 subprocessio.SubprocessIOChunker(
94 94 args, shell=False, fail_on_stderr=False, env=environ
95 95 )
96 96 )
97 97
98 98 assert output == ''
99 99
100 100
101 101 @pytest.mark.parametrize('size', [1, 10 ** 5])
102 102 def test_output_with_no_input(size, environ):
103 103 print(type(environ))
104 104 data = 'X'
105 105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
106 106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
107 107
108 108 assert output == data * size
109 109
110 110
111 111 @pytest.mark.parametrize('size', [1, 10 ** 5])
112 112 def test_output_with_no_input_does_not_fail(size, environ):
113 113 data = 'X'
114 114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 115 output = ''.join(
116 116 subprocessio.SubprocessIOChunker(
117 117 args, shell=False, fail_on_return_code=False, env=environ
118 118 )
119 119 )
120 120
121 121 print("{} {}".format(len(data * size), len(output)))
122 122 assert output == data * size
123 123
124 124
125 125 @pytest.mark.parametrize('size', [1, 10 ** 5])
126 126 def test_output_with_input(size, environ):
127 127 data_len = size
128 128 inputstream = KindaFilelike('X', size)
129 129
130 130 # This acts like the cat command.
131 131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 132 output = ''.join(
133 133 subprocessio.SubprocessIOChunker(
134 134 args, shell=False, inputstream=inputstream, env=environ
135 135 )
136 136 )
137 137
138 138 assert len(output) == data_len
139 139
140 140
141 141 @pytest.mark.parametrize('size', [1, 10 ** 5])
142 142 def test_output_with_input_skipping_iterator(size, environ):
143 143 data_len = size
144 144 inputstream = KindaFilelike('X', size)
145 145
146 146 # This acts like the cat command.
147 147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148 148
149 149 # Note: assigning the chunker makes sure that it is not deleted too early
150 150 chunker = subprocessio.SubprocessIOChunker(
151 151 args, shell=False, inputstream=inputstream, env=environ
152 152 )
153 153 output = ''.join(chunker.output)
154 154
155 155 assert len(output) == data_len
@@ -1,87 +1,87 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23
24 24 class MockPopen(object):
25 25 def __init__(self, stderr):
26 26 self.stdout = io.BytesIO('')
27 27 self.stderr = io.BytesIO(stderr)
28 28 self.returncode = 1
29 29
30 30 def wait(self):
31 31 pass
32 32
33 33
34 34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 37 ])
38 38
39 39
40 40 @pytest.mark.parametrize('stderr,expected_reason', [
41 41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 44 @pytest.mark.xfail(sys.platform == "cygwin",
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 48 factory = mock.Mock()
49 49 factory.repo = mock.Mock(return_value=mock.Mock())
50 50
51 51 remote = svn.SvnRemote(factory)
52 52 remote.is_path_valid_repository = lambda wire, path: True
53 53
54 54 with mock.patch('subprocess.Popen',
55 55 return_value=MockPopen(stderr)):
56 56 with pytest.raises(Exception) as excinfo:
57 57 remote.import_remote_repository({'path': 'path'}, 'url')
58 58
59 59 expected_error_args = (
60 60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
61 61
62 62 assert excinfo.value.args == expected_error_args
63 63
64 64
65 65 def test_svn_libraries_can_be_imported():
66 66 import svn
67 67 import svn.client
68 68 assert svn.client is not None
69 69
70 70
71 71 @pytest.mark.parametrize('example_url, parts', [
72 72 ('http://server.com', (None, None, 'http://server.com')),
73 73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
74 74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 75 ('<script>', (None, None, '<script>')),
76 76 ('http://', (None, None, 'http://')),
77 77 ])
78 78 def test_username_password_extraction_from_url(example_url, parts):
79 79 from vcsserver import svn
80 80
81 81 factory = mock.Mock()
82 82 factory.repo = mock.Mock(return_value=mock.Mock())
83 83
84 84 remote = svn.SvnRemote(factory)
85 85 remote.is_path_valid_repository = lambda wire, path: True
86 86
87 87 assert remote.get_url_and_credentials(example_url) == parts
@@ -1,96 +1,96 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import wsgiref.simple_server
19 19 import wsgiref.validate
20 20
21 21 from vcsserver import wsgi_app_caller
22 22
23 23
24 24 # pylint: disable=protected-access,too-many-public-methods
25 25
26 26
27 27 @wsgiref.validate.validator
28 28 def demo_app(environ, start_response):
29 29 """WSGI app used for testing."""
30 30 data = [
31 31 'Hello World!\n',
32 32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 33 ]
34 34 for key, value in sorted(environ.items()):
35 35 data.append('%s=%s\n' % (key, value))
36 36
37 37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 38 write('Old school write method\n')
39 39 write('***********************\n')
40 40 return data
41 41
42 42
43 43 BASE_ENVIRON = {
44 44 'REQUEST_METHOD': 'GET',
45 45 'SERVER_NAME': 'localhost',
46 46 'SERVER_PORT': '80',
47 47 'SCRIPT_NAME': '',
48 48 'PATH_INFO': '/',
49 49 'QUERY_STRING': '',
50 50 'foo.var': 'bla',
51 51 }
52 52
53 53
54 54 def test_complete_environ():
55 55 environ = dict(BASE_ENVIRON)
56 56 data = "data"
57 57 wsgi_app_caller._complete_environ(environ, data)
58 58 wsgiref.validate.check_environ(environ)
59 59
60 60 assert data == environ['wsgi.input'].read()
61 61
62 62
63 63 def test_start_response():
64 64 start_response = wsgi_app_caller._StartResponse()
65 65 status = '200 OK'
66 66 headers = [('Content-Type', 'text/plain')]
67 67 start_response(status, headers)
68 68
69 69 assert status == start_response.status
70 70 assert headers == start_response.headers
71 71
72 72
73 73 def test_start_response_with_error():
74 74 start_response = wsgi_app_caller._StartResponse()
75 75 status = '500 Internal Server Error'
76 76 headers = [('Content-Type', 'text/plain')]
77 77 start_response(status, headers, (None, None, None))
78 78
79 79 assert status == start_response.status
80 80 assert headers == start_response.headers
81 81
82 82
83 83 def test_wsgi_app_caller():
84 84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 85 environ = dict(BASE_ENVIRON)
86 86 input_data = 'some text'
87 87 responses, status, headers = caller.handle(environ, input_data)
88 88 response = ''.join(responses)
89 89
90 90 assert status == '200 OK'
91 91 assert headers == [('Content-Type', 'text/plain')]
92 92 assert response.startswith(
93 93 'Old school write method\n***********************\n')
94 94 assert 'Hello World!\n' in response
95 95 assert 'foo.var=bla\n' in response
96 96 assert 'input_data=%s\n' % input_data in response
@@ -1,19 +1,19 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -1,64 +1,64 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import logging
20 20
21 21 import vcsserver
22 22 from vcsserver.utils import safe_str
23 23
24 24
25 25 log = logging.getLogger(__name__)
26 26
27 27
28 28 def get_access_path(request):
29 29 environ = request.environ
30 30 return environ.get('PATH_INFO')
31 31
32 32
33 33 def get_user_agent(environ):
34 34 return environ.get('HTTP_USER_AGENT')
35 35
36 36
37 37 class RequestWrapperTween(object):
38 38 def __init__(self, handler, registry):
39 39 self.handler = handler
40 40 self.registry = registry
41 41
42 42 # one-time configuration code goes here
43 43
44 44 def __call__(self, request):
45 45 start = time.time()
46 46 try:
47 47 response = self.handler(request)
48 48 finally:
49 49 end = time.time()
50 50 total = end - start
51 51 count = request.request_count()
52 52 _ver_ = vcsserver.__version__
53 53 log.info(
54 54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
56 56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
57 57
58 58 return response
59 59
60 60
61 61 def includeme(config):
62 62 config.add_tween(
63 63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
64 64 )
@@ -1,110 +1,110 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 18 import hashlib
19 19
20 20 log = logging.getLogger(__name__)
21 21
22 22
23 23 def safe_int(val, default=None):
24 24 """
25 25 Returns int() of val if val is not convertable to int use default
26 26 instead
27 27
28 28 :param val:
29 29 :param default:
30 30 """
31 31
32 32 try:
33 33 val = int(val)
34 34 except (ValueError, TypeError):
35 35 val = default
36 36
37 37 return val
38 38
39 39
40 40 def safe_str(unicode_, to_encoding=None):
41 41 """
42 42 safe str function. Does few trick to turn unicode_ into string
43 43
44 44 :param unicode_: unicode to encode
45 45 :param to_encoding: encode to this type UTF8 default
46 46 :rtype: str
47 47 :returns: str object
48 48 """
49 49 to_encoding = to_encoding or ['utf8']
50 50 # if it's not basestr cast to str
51 51 if not isinstance(unicode_, basestring):
52 52 return str(unicode_)
53 53
54 54 if isinstance(unicode_, str):
55 55 return unicode_
56 56
57 57 if not isinstance(to_encoding, (list, tuple)):
58 58 to_encoding = [to_encoding]
59 59
60 60 for enc in to_encoding:
61 61 try:
62 62 return unicode_.encode(enc)
63 63 except UnicodeEncodeError:
64 64 pass
65 65
66 66 return unicode_.encode(to_encoding[0], 'replace')
67 67
68 68
69 69 def safe_unicode(str_, from_encoding=None):
70 70 """
71 71 safe unicode function. Does few trick to turn str_ into unicode
72 72
73 73 :param str_: string to decode
74 74 :param from_encoding: encode from this type UTF8 default
75 75 :rtype: unicode
76 76 :returns: unicode object
77 77 """
78 78 from_encoding = from_encoding or ['utf8']
79 79
80 80 if isinstance(str_, unicode):
81 81 return str_
82 82
83 83 if not isinstance(from_encoding, (list, tuple)):
84 84 from_encoding = [from_encoding]
85 85
86 86 try:
87 87 return unicode(str_)
88 88 except UnicodeDecodeError:
89 89 pass
90 90
91 91 for enc in from_encoding:
92 92 try:
93 93 return unicode(str_, enc)
94 94 except UnicodeDecodeError:
95 95 pass
96 96
97 97 return unicode(str_, from_encoding[0], 'replace')
98 98
99 99
100 100 class AttributeDict(dict):
101 101 def __getattr__(self, attr):
102 102 return self.get(attr, None)
103 103 __setattr__ = dict.__setitem__
104 104 __delattr__ = dict.__delitem__
105 105
106 106
107 107 def sha1(val):
108 108 return hashlib.sha1(val).hexdigest()
109 109
110 110
@@ -1,32 +1,32 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 class RemoteBase(object):
20 20 EMPTY_COMMIT = '0' * 40
21 21
22 22 @property
23 23 def region(self):
24 24 return self._factory._cache_region
25 25
26 26 def _cache_on(self, wire):
27 27 context = wire.get('context', '')
28 28 context_uid = '{}'.format(context)
29 29 repo_id = wire.get('repo_id', '')
30 30 cache = wire.get('cache', True)
31 31 cache_on = context and cache
32 32 return cache_on, context_uid, repo_id
@@ -1,116 +1,116 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Extract the responses of a WSGI app."""
19 19
20 20 __all__ = ('WSGIAppCaller',)
21 21
22 22 import io
23 23 import logging
24 24 import os
25 25
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29 DEV_NULL = open(os.devnull)
30 30
31 31
32 32 def _complete_environ(environ, input_data):
33 33 """Update the missing wsgi.* variables of a WSGI environment.
34 34
35 35 :param environ: WSGI environment to update
36 36 :type environ: dict
37 37 :param input_data: data to be read by the app
38 38 :type input_data: str
39 39 """
40 40 environ.update({
41 41 'wsgi.version': (1, 0),
42 42 'wsgi.url_scheme': 'http',
43 43 'wsgi.multithread': True,
44 44 'wsgi.multiprocess': True,
45 45 'wsgi.run_once': False,
46 46 'wsgi.input': io.BytesIO(input_data),
47 47 'wsgi.errors': DEV_NULL,
48 48 })
49 49
50 50
51 51 # pylint: disable=too-few-public-methods
52 52 class _StartResponse(object):
53 53 """Save the arguments of a start_response call."""
54 54
55 55 __slots__ = ['status', 'headers', 'content']
56 56
57 57 def __init__(self):
58 58 self.status = None
59 59 self.headers = None
60 60 self.content = []
61 61
62 62 def __call__(self, status, headers, exc_info=None):
63 63 # TODO(skreft): do something meaningful with the exc_info
64 64 exc_info = None # avoid dangling circular reference
65 65 self.status = status
66 66 self.headers = headers
67 67
68 68 return self.write
69 69
70 70 def write(self, content):
71 71 """Write method returning when calling this object.
72 72
73 73 All the data written is then available in content.
74 74 """
75 75 self.content.append(content)
76 76
77 77
78 78 class WSGIAppCaller(object):
79 79 """Calls a WSGI app."""
80 80
81 81 def __init__(self, app):
82 82 """
83 83 :param app: WSGI app to call
84 84 """
85 85 self.app = app
86 86
87 87 def handle(self, environ, input_data):
88 88 """Process a request with the WSGI app.
89 89
90 90 The returned data of the app is fully consumed into a list.
91 91
92 92 :param environ: WSGI environment to update
93 93 :type environ: dict
94 94 :param input_data: data to be read by the app
95 95 :type input_data: str
96 96
97 97 :returns: a tuple with the contents, status and headers
98 98 :rtype: (list<str>, str, list<(str, str)>)
99 99 """
100 100 _complete_environ(environ, input_data)
101 101 start_response = _StartResponse()
102 102 log.debug("Calling wrapped WSGI application")
103 103 responses = self.app(environ, start_response)
104 104 responses_list = list(responses)
105 105 existing_responses = start_response.content
106 106 if existing_responses:
107 107 log.debug(
108 108 "Adding returned response to response written via write()")
109 109 existing_responses.extend(responses_list)
110 110 responses_list = existing_responses
111 111 if hasattr(responses, 'close'):
112 112 log.debug("Closing iterator from WSGI application")
113 113 responses.close()
114 114
115 115 log.debug("Handling of WSGI request done, returning response")
116 116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now