##// END OF EJS Templates
code: update copyrights to 2020
marcink -
r850:cbc05af2 default
parent child Browse files
Show More
@@ -1,28 +1,28 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pkgutil
18 import pkgutil
19
19
20
20
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
22
22
23 # link to config for pyramid
23 # link to config for pyramid
24 CONFIG = {}
24 CONFIG = {}
25
25
26 # Populated with the settings dictionary from application init in
26 # Populated with the settings dictionary from application init in
27 #
27 #
28 PYRAMID_SETTINGS = {}
28 PYRAMID_SETTINGS = {}
@@ -1,76 +1,76 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class RepoFactory(object):
27 class RepoFactory(object):
28 """
28 """
29 Utility to create instances of repository
29 Utility to create instances of repository
30
30
31 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
32 the :term:`call context`.
32 the :term:`call context`.
33 """
33 """
34 repo_type = None
34 repo_type = None
35
35
36 def __init__(self):
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38
38
39 def _create_config(self, path, config):
39 def _create_config(self, path, config):
40 config = {}
40 config = {}
41 return config
41 return config
42
42
43 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
44 raise NotImplementedError()
44 raise NotImplementedError()
45
45
46 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
47 raise NotImplementedError()
47 raise NotImplementedError()
48
48
49
49
50 def obfuscate_qs(query_string):
50 def obfuscate_qs(query_string):
51 if query_string is None:
51 if query_string is None:
52 return None
52 return None
53
53
54 parsed = []
54 parsed = []
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
56 if k in ['auth_token', 'api_key']:
56 if k in ['auth_token', 'api_key']:
57 v = "*****"
57 v = "*****"
58 parsed.append((k, v))
58 parsed.append((k, v))
59
59
60 return '&'.join('{}{}'.format(
60 return '&'.join('{}{}'.format(
61 k, '={}'.format(v) if v else '') for k, v in parsed)
61 k, '={}'.format(v) if v else '') for k, v in parsed)
62
62
63
63
64 def raise_from_original(new_type):
64 def raise_from_original(new_type):
65 """
65 """
66 Raise a new exception type with original args and traceback.
66 Raise a new exception type with original args and traceback.
67 """
67 """
68 exc_type, exc_value, exc_traceback = sys.exc_info()
68 exc_type, exc_value, exc_traceback = sys.exc_info()
69 new_exc = new_type(*exc_value.args)
69 new_exc = new_type(*exc_value.args)
70 # store the original traceback into the new exc
70 # store the original traceback into the new exc
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72
72
73 try:
73 try:
74 raise new_exc, None, exc_traceback
74 raise new_exc, None, exc_traceback
75 finally:
75 finally:
76 del exc_traceback
76 del exc_traceback
@@ -1,121 +1,121 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28
28
29
29
30 def _make_exception(kind, org_exc, *args):
30 def _make_exception(kind, org_exc, *args):
31 """
31 """
32 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
33
33
34 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
36 """
36 """
37 exc = Exception(*args)
37 exc = Exception(*args)
38 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
39 exc._org_exc = org_exc
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 return exc
41 return exc
42
42
43
43
44 def AbortException(org_exc=None):
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
47 return _make_exception_wrapper
48
48
49
49
50 def ArchiveException(org_exc=None):
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
53 return _make_exception_wrapper
54
54
55
55
56 def LookupException(org_exc=None):
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
59 return _make_exception_wrapper
60
60
61
61
62 def VcsException(org_exc=None):
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
65 return _make_exception_wrapper
66
66
67
67
68 def RepositoryLockedException(org_exc=None):
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
71 return _make_exception_wrapper
72
72
73
73
74 def RepositoryBranchProtectedException(org_exc=None):
74 def RepositoryBranchProtectedException(org_exc=None):
75 def _make_exception_wrapper(*args):
75 def _make_exception_wrapper(*args):
76 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception('repo_branch_protected', org_exc, *args)
77 return _make_exception_wrapper
77 return _make_exception_wrapper
78
78
79
79
80 def RequirementException(org_exc=None):
80 def RequirementException(org_exc=None):
81 def _make_exception_wrapper(*args):
81 def _make_exception_wrapper(*args):
82 return _make_exception('requirement', org_exc, *args)
82 return _make_exception('requirement', org_exc, *args)
83 return _make_exception_wrapper
83 return _make_exception_wrapper
84
84
85
85
86 def UnhandledException(org_exc=None):
86 def UnhandledException(org_exc=None):
87 def _make_exception_wrapper(*args):
87 def _make_exception_wrapper(*args):
88 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception('unhandled', org_exc, *args)
89 return _make_exception_wrapper
89 return _make_exception_wrapper
90
90
91
91
92 def URLError(org_exc=None):
92 def URLError(org_exc=None):
93 def _make_exception_wrapper(*args):
93 def _make_exception_wrapper(*args):
94 return _make_exception('url_error', org_exc, *args)
94 return _make_exception('url_error', org_exc, *args)
95 return _make_exception_wrapper
95 return _make_exception_wrapper
96
96
97
97
98 def SubrepoMergeException(org_exc=None):
98 def SubrepoMergeException(org_exc=None):
99 def _make_exception_wrapper(*args):
99 def _make_exception_wrapper(*args):
100 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 return _make_exception_wrapper
101 return _make_exception_wrapper
102
102
103
103
104 class HTTPRepoLocked(HTTPLocked):
104 class HTTPRepoLocked(HTTPLocked):
105 """
105 """
106 Subclass of HTTPLocked response that allows to set the title and status
106 Subclass of HTTPLocked response that allows to set the title and status
107 code via constructor arguments.
107 code via constructor arguments.
108 """
108 """
109 def __init__(self, title, status_code=None, **kwargs):
109 def __init__(self, title, status_code=None, **kwargs):
110 self.code = status_code or HTTPLocked.code
110 self.code = status_code or HTTPLocked.code
111 self.title = title
111 self.title = title
112 super(HTTPRepoLocked, self).__init__(**kwargs)
112 super(HTTPRepoLocked, self).__init__(**kwargs)
113
113
114
114
115 class HTTPRepoBranchProtected(HTTPForbidden):
115 class HTTPRepoBranchProtected(HTTPForbidden):
116 def __init__(self, *args, **kwargs):
116 def __init__(self, *args, **kwargs):
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118
118
119
119
120 class RefNotFoundException(KeyError):
120 class RefNotFoundException(KeyError):
121 pass
121 pass
@@ -1,1192 +1,1192 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.vcs_base import RemoteBase
48
48
49 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def str_to_dulwich(value):
58 def str_to_dulwich(value):
59 """
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
61 """
62 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
63
63
64
64
65 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
67
68 @wraps(func)
68 @wraps(func)
69 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
70 try:
70 try:
71 return func(*args, **kwargs)
71 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
74 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
77 raise exc(safe_str(e))
78 except Exception as e:
78 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
85 raise
85 raise
86 return wrapper
86 return wrapper
87
87
88
88
89 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
90 """
90 """
91 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
92
92
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
95 once the repo object is destroyed.
96 """
96 """
97 def __del__(self):
97 def __del__(self):
98 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
99 self.close()
99 self.close()
100
100
101
101
102 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
103
103
104 def __enter__(self):
104 def __enter__(self):
105 return self
105 return self
106
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
108 self.free()
109
109
110
110
111 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
112 repo_type = 'git'
112 repo_type = 'git'
113
113
114 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
115 if use_libgit2:
116 return Repository(wire['path'])
116 return Repository(wire['path'])
117 else:
117 else:
118 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
119 return Repo(repo_path)
120
120
121 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
122 """
123 Get a repository instance for the given path.
123 Get a repository instance for the given path.
124 """
124 """
125 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
126
126
127 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
129
129
130
130
131 class GitRemote(RemoteBase):
131 class GitRemote(RemoteBase):
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143
143
144 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
145 if 'config' in wire:
145 if 'config' in wire:
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return {}
147 return {}
148
148
149 def _remote_conf(self, config):
149 def _remote_conf(self, config):
150 params = [
150 params = [
151 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
152 ]
152 ]
153 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 if ssl_cert_dir:
154 if ssl_cert_dir:
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 return params
156 return params
157
157
158 @reraise_safe_exceptions
158 @reraise_safe_exceptions
159 def discover_git_version(self):
159 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
160 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
161 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
162 prefix = 'git version'
163 if stdout.startswith(prefix):
163 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
164 stdout = stdout[len(prefix):]
165 return stdout.strip()
165 return stdout.strip()
166
166
167 @reraise_safe_exceptions
167 @reraise_safe_exceptions
168 def is_empty(self, wire):
168 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
169 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
170 with repo_init as repo:
171
171
172 try:
172 try:
173 has_head = repo.head.name
173 has_head = repo.head.name
174 if has_head:
174 if has_head:
175 return False
175 return False
176
176
177 # NOTE(marcink): check again using more expensive method
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
178 return repo.is_empty
179 except Exception:
179 except Exception:
180 pass
180 pass
181
181
182 return True
182 return True
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
186 cache_on, context_uid, repo_id = self._cache_on(wire)
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
189 try:
189 try:
190 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
191 with repo_init as repo:
192 pass
192 pass
193 except pygit2.GitError:
193 except pygit2.GitError:
194 path = wire.get('path')
194 path = wire.get('path')
195 tb = traceback.format_exc()
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
197 return False
198
198
199 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
201
201
202 @reraise_safe_exceptions
202 @reraise_safe_exceptions
203 def bare(self, wire):
203 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
205 with repo_init as repo:
206 return repo.is_bare
206 return repo.is_bare
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
211 with repo_init as repo:
212 blob_obj = repo[sha]
212 blob_obj = repo[sha]
213 blob = blob_obj.data
213 blob = blob_obj.data
214 return blob
214 return blob
215
215
216 @reraise_safe_exceptions
216 @reraise_safe_exceptions
217 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
220 def _blob_raw_length(_repo_id, _sha):
221
221
222 repo_init = self._factory.repo_libgit2(wire)
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
223 with repo_init as repo:
224 blob = repo[sha]
224 blob = repo[sha]
225 return blob.size
225 return blob.size
226
226
227 return _blob_raw_length(repo_id, sha)
227 return _blob_raw_length(repo_id, sha)
228
228
229 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
230
230
231 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
233 pattern = re.compile(r"""
233 pattern = re.compile(r"""
234 (?:\n)?
234 (?:\n)?
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
238 (?:\n)?
238 (?:\n)?
239 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
240 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
241 if match:
241 if match:
242 return match.groupdict()
242 return match.groupdict()
243
243
244 return {}
244 return {}
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def is_large_file(self, wire, commit_id):
247 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
248 cache_on, context_uid, repo_id = self._cache_on(wire)
249
249
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
253 with repo_init as repo:
254 blob = repo[commit_id]
254 blob = repo[commit_id]
255 if blob.is_binary:
255 if blob.is_binary:
256 return {}
256 return {}
257
257
258 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
259
259
260 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
271 return blob_obj.is_binary
272
272
273 return _is_binary(repo_id, tree_id)
273 return _is_binary(repo_id, tree_id)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def in_largefiles_store(self, wire, oid):
276 def in_largefiles_store(self, wire, oid):
277 conf = self._wire_to_config(wire)
277 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
278 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
279 with repo_init as repo:
280 repo_name = repo.path
280 repo_name = repo.path
281
281
282 store_location = conf.get('vcs_git_lfs_store_location')
282 store_location = conf.get('vcs_git_lfs_store_location')
283 if store_location:
283 if store_location:
284
284
285 store = LFSOidStore(
285 store = LFSOidStore(
286 oid=oid, repo=repo_name, store_location=store_location)
286 oid=oid, repo=repo_name, store_location=store_location)
287 return store.has_oid()
287 return store.has_oid()
288
288
289 return False
289 return False
290
290
291 @reraise_safe_exceptions
291 @reraise_safe_exceptions
292 def store_path(self, wire, oid):
292 def store_path(self, wire, oid):
293 conf = self._wire_to_config(wire)
293 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
294 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
295 with repo_init as repo:
296 repo_name = repo.path
296 repo_name = repo.path
297
297
298 store_location = conf.get('vcs_git_lfs_store_location')
298 store_location = conf.get('vcs_git_lfs_store_location')
299 if store_location:
299 if store_location:
300 store = LFSOidStore(
300 store = LFSOidStore(
301 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
302 return store.oid_path
302 return store.oid_path
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def bulk_request(self, wire, rev, pre_load):
306 def bulk_request(self, wire, rev, pre_load):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 def _bulk_request(_repo_id, _rev, _pre_load):
309 def _bulk_request(_repo_id, _rev, _pre_load):
310 result = {}
310 result = {}
311 for attr in pre_load:
311 for attr in pre_load:
312 try:
312 try:
313 method = self._bulk_methods[attr]
313 method = self._bulk_methods[attr]
314 args = [wire, rev]
314 args = [wire, rev]
315 result[attr] = method(*args)
315 result[attr] = method(*args)
316 except KeyError as e:
316 except KeyError as e:
317 raise exceptions.VcsException(e)(
317 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
318 "Unknown bulk attribute: %s" % attr)
319 return result
319 return result
320
320
321 return _bulk_request(repo_id, rev, sorted(pre_load))
321 return _bulk_request(repo_id, rev, sorted(pre_load))
322
322
323 def _build_opener(self, url):
323 def _build_opener(self, url):
324 handlers = []
324 handlers = []
325 url_obj = url_parser(url)
325 url_obj = url_parser(url)
326 _, authinfo = url_obj.authinfo()
326 _, authinfo = url_obj.authinfo()
327
327
328 if authinfo:
328 if authinfo:
329 # create a password manager
329 # create a password manager
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 passmgr.add_password(*authinfo)
331 passmgr.add_password(*authinfo)
332
332
333 handlers.extend((httpbasicauthhandler(passmgr),
333 handlers.extend((httpbasicauthhandler(passmgr),
334 httpdigestauthhandler(passmgr)))
334 httpdigestauthhandler(passmgr)))
335
335
336 return urllib2.build_opener(*handlers)
336 return urllib2.build_opener(*handlers)
337
337
338 def _type_id_to_name(self, type_id):
338 def _type_id_to_name(self, type_id):
339 return {
339 return {
340 1: b'commit',
340 1: b'commit',
341 2: b'tree',
341 2: b'tree',
342 3: b'blob',
342 3: b'blob',
343 4: b'tag'
343 4: b'tag'
344 }[type_id]
344 }[type_id]
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def check_url(self, url, config):
347 def check_url(self, url, config):
348 url_obj = url_parser(url)
348 url_obj = url_parser(url)
349 test_uri, _ = url_obj.authinfo()
349 test_uri, _ = url_obj.authinfo()
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 url_obj.query = obfuscate_qs(url_obj.query)
351 url_obj.query = obfuscate_qs(url_obj.query)
352 cleaned_uri = str(url_obj)
352 cleaned_uri = str(url_obj)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354
354
355 if not test_uri.endswith('info/refs'):
355 if not test_uri.endswith('info/refs'):
356 test_uri = test_uri.rstrip('/') + '/info/refs'
356 test_uri = test_uri.rstrip('/') + '/info/refs'
357
357
358 o = self._build_opener(url)
358 o = self._build_opener(url)
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360
360
361 q = {"service": 'git-upload-pack'}
361 q = {"service": 'git-upload-pack'}
362 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.urlencode(q)
363 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
364 req = urllib2.Request(cu, None, {})
364 req = urllib2.Request(cu, None, {})
365
365
366 try:
366 try:
367 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
368 resp = o.open(req)
368 resp = o.open(req)
369 if resp.code != 200:
369 if resp.code != 200:
370 raise exceptions.URLError()('Return Code is not 200')
370 raise exceptions.URLError()('Return Code is not 200')
371 except Exception as e:
371 except Exception as e:
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 # means it cannot be cloned
373 # means it cannot be cloned
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375
375
376 # now detect if it's proper git repo
376 # now detect if it's proper git repo
377 gitdata = resp.read()
377 gitdata = resp.read()
378 if 'service=git-upload-pack' in gitdata:
378 if 'service=git-upload-pack' in gitdata:
379 pass
379 pass
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 # old style git can return some other format !
381 # old style git can return some other format !
382 pass
382 pass
383 else:
383 else:
384 raise exceptions.URLError()(
384 raise exceptions.URLError()(
385 "url [%s] does not look like an git" % (cleaned_uri,))
385 "url [%s] does not look like an git" % (cleaned_uri,))
386
386
387 return True
387 return True
388
388
389 @reraise_safe_exceptions
389 @reraise_safe_exceptions
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 remote_refs = self.pull(wire, url, apply_refs=False)
392 remote_refs = self.pull(wire, url, apply_refs=False)
393 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
394 if isinstance(valid_refs, list):
394 if isinstance(valid_refs, list):
395 valid_refs = tuple(valid_refs)
395 valid_refs = tuple(valid_refs)
396
396
397 for k in remote_refs:
397 for k in remote_refs:
398 # only parse heads/tags and skip so called deferred tags
398 # only parse heads/tags and skip so called deferred tags
399 if k.startswith(valid_refs) and not k.endswith(deferred):
399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 repo[k] = remote_refs[k]
400 repo[k] = remote_refs[k]
401
401
402 if update_after_clone:
402 if update_after_clone:
403 # we want to checkout HEAD
403 # we want to checkout HEAD
404 repo["HEAD"] = remote_refs["HEAD"]
404 repo["HEAD"] = remote_refs["HEAD"]
405 index.build_index_from_tree(repo.path, repo.index_path(),
405 index.build_index_from_tree(repo.path, repo.index_path(),
406 repo.object_store, repo["HEAD"].tree)
406 repo.object_store, repo["HEAD"].tree)
407
407
408 @reraise_safe_exceptions
408 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
409 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
412 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
413 regex = re.compile('^refs/heads')
414
414
415 def filter_with(ref):
415 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
416 return regex.match(ref[0]) and ref[1] == _commit_id
417
417
418 branches = filter(filter_with, self.get_refs(wire).items())
418 branches = filter(filter_with, self.get_refs(wire).items())
419 return [x[0].split('refs/heads/')[-1] for x in branches]
419 return [x[0].split('refs/heads/')[-1] for x in branches]
420
420
421 return _branch(context_uid, repo_id, commit_id)
421 return _branch(context_uid, repo_id, commit_id)
422
422
423 @reraise_safe_exceptions
423 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
424 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
428 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
429 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
431 return branches
432
432
433 return _commit_branches(context_uid, repo_id, commit_id)
433 return _commit_branches(context_uid, repo_id, commit_id)
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def add_object(self, wire, content):
436 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
437 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
438 with repo_init as repo:
439 blob = objects.Blob()
439 blob = objects.Blob()
440 blob.set_raw_string(content)
440 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
441 repo.object_store.add_object(blob)
442 return blob.id
442 return blob.id
443
443
444 # TODO: this is quite complex, check if that can be simplified
444 # TODO: this is quite complex, check if that can be simplified
445 @reraise_safe_exceptions
445 @reraise_safe_exceptions
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
448 object_store = repo.object_store
448 object_store = repo.object_store
449
449
450 # Create tree and populates it with blobs
450 # Create tree and populates it with blobs
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452
452
453 for node in updated:
453 for node in updated:
454 # Compute subdirs if needed
454 # Compute subdirs if needed
455 dirpath, nodename = vcspath.split(node['path'])
455 dirpath, nodename = vcspath.split(node['path'])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 parent = commit_tree
457 parent = commit_tree
458 ancestors = [('', parent)]
458 ancestors = [('', parent)]
459
459
460 # Tries to dig for the deepest existing tree
460 # Tries to dig for the deepest existing tree
461 while dirnames:
461 while dirnames:
462 curdir = dirnames.pop(0)
462 curdir = dirnames.pop(0)
463 try:
463 try:
464 dir_id = parent[curdir][1]
464 dir_id = parent[curdir][1]
465 except KeyError:
465 except KeyError:
466 # put curdir back into dirnames and stops
466 # put curdir back into dirnames and stops
467 dirnames.insert(0, curdir)
467 dirnames.insert(0, curdir)
468 break
468 break
469 else:
469 else:
470 # If found, updates parent
470 # If found, updates parent
471 parent = repo[dir_id]
471 parent = repo[dir_id]
472 ancestors.append((curdir, parent))
472 ancestors.append((curdir, parent))
473 # Now parent is deepest existing tree and we need to create
473 # Now parent is deepest existing tree and we need to create
474 # subtrees for dirnames (in reverse order)
474 # subtrees for dirnames (in reverse order)
475 # [this only applies for nodes from added]
475 # [this only applies for nodes from added]
476 new_trees = []
476 new_trees = []
477
477
478 blob = objects.Blob.from_string(node['content'])
478 blob = objects.Blob.from_string(node['content'])
479
479
480 if dirnames:
480 if dirnames:
481 # If there are trees which should be created we need to build
481 # If there are trees which should be created we need to build
482 # them now (in reverse order)
482 # them now (in reverse order)
483 reversed_dirnames = list(reversed(dirnames))
483 reversed_dirnames = list(reversed(dirnames))
484 curtree = objects.Tree()
484 curtree = objects.Tree()
485 curtree[node['node_path']] = node['mode'], blob.id
485 curtree[node['node_path']] = node['mode'], blob.id
486 new_trees.append(curtree)
486 new_trees.append(curtree)
487 for dirname in reversed_dirnames[:-1]:
487 for dirname in reversed_dirnames[:-1]:
488 newtree = objects.Tree()
488 newtree = objects.Tree()
489 newtree[dirname] = (DIR_STAT, curtree.id)
489 newtree[dirname] = (DIR_STAT, curtree.id)
490 new_trees.append(newtree)
490 new_trees.append(newtree)
491 curtree = newtree
491 curtree = newtree
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 else:
493 else:
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495
495
496 new_trees.append(parent)
496 new_trees.append(parent)
497 # Update ancestors
497 # Update ancestors
498 reversed_ancestors = reversed(
498 reversed_ancestors = reversed(
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 for parent, tree, path in reversed_ancestors:
500 for parent, tree, path in reversed_ancestors:
501 parent[path] = (DIR_STAT, tree.id)
501 parent[path] = (DIR_STAT, tree.id)
502 object_store.add_object(tree)
502 object_store.add_object(tree)
503
503
504 object_store.add_object(blob)
504 object_store.add_object(blob)
505 for tree in new_trees:
505 for tree in new_trees:
506 object_store.add_object(tree)
506 object_store.add_object(tree)
507
507
508 for node_path in removed:
508 for node_path in removed:
509 paths = node_path.split('/')
509 paths = node_path.split('/')
510 tree = commit_tree
510 tree = commit_tree
511 trees = [tree]
511 trees = [tree]
512 # Traverse deep into the forest...
512 # Traverse deep into the forest...
513 for path in paths:
513 for path in paths:
514 try:
514 try:
515 obj = repo[tree[path][1]]
515 obj = repo[tree[path][1]]
516 if isinstance(obj, objects.Tree):
516 if isinstance(obj, objects.Tree):
517 trees.append(obj)
517 trees.append(obj)
518 tree = obj
518 tree = obj
519 except KeyError:
519 except KeyError:
520 break
520 break
521 # Cut down the blob and all rotten trees on the way back...
521 # Cut down the blob and all rotten trees on the way back...
522 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(zip(paths, trees)):
523 del tree[path]
523 del tree[path]
524 if tree:
524 if tree:
525 # This tree still has elements - don't remove it or any
525 # This tree still has elements - don't remove it or any
526 # of it's parents
526 # of it's parents
527 break
527 break
528
528
529 object_store.add_object(commit_tree)
529 object_store.add_object(commit_tree)
530
530
531 # Create commit
531 # Create commit
532 commit = objects.Commit()
532 commit = objects.Commit()
533 commit.tree = commit_tree.id
533 commit.tree = commit_tree.id
534 for k, v in commit_data.iteritems():
534 for k, v in commit_data.iteritems():
535 setattr(commit, k, v)
535 setattr(commit, k, v)
536 object_store.add_object(commit)
536 object_store.add_object(commit)
537
537
538 self.create_branch(wire, branch, commit.id)
538 self.create_branch(wire, branch, commit.id)
539
539
540 # dulwich set-ref
540 # dulwich set-ref
541 ref = 'refs/heads/%s' % branch
541 ref = 'refs/heads/%s' % branch
542 repo.refs[ref] = commit.id
542 repo.refs[ref] = commit.id
543
543
544 return commit.id
544 return commit.id
545
545
546 @reraise_safe_exceptions
546 @reraise_safe_exceptions
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 if url != 'default' and '://' not in url:
548 if url != 'default' and '://' not in url:
549 client = LocalGitClient(url)
549 client = LocalGitClient(url)
550 else:
550 else:
551 url_obj = url_parser(url)
551 url_obj = url_parser(url)
552 o = self._build_opener(url)
552 o = self._build_opener(url)
553 url, _ = url_obj.authinfo()
553 url, _ = url_obj.authinfo()
554 client = HttpGitClient(base_url=url, opener=o)
554 client = HttpGitClient(base_url=url, opener=o)
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556
556
557 determine_wants = repo.object_store.determine_wants_all
557 determine_wants = repo.object_store.determine_wants_all
558 if refs:
558 if refs:
559 def determine_wants_requested(references):
559 def determine_wants_requested(references):
560 return [references[r] for r in references if r in refs]
560 return [references[r] for r in references if r in refs]
561 determine_wants = determine_wants_requested
561 determine_wants = determine_wants_requested
562
562
563 try:
563 try:
564 remote_refs = client.fetch(
564 remote_refs = client.fetch(
565 path=url, target=repo, determine_wants=determine_wants)
565 path=url, target=repo, determine_wants=determine_wants)
566 except NotGitRepository as e:
566 except NotGitRepository as e:
567 log.warning(
567 log.warning(
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 # Exception can contain unicode which we convert
569 # Exception can contain unicode which we convert
570 raise exceptions.AbortException(e)(repr(e))
570 raise exceptions.AbortException(e)(repr(e))
571
571
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 # refs filtered by `determine_wants` function. We need to filter result
573 # refs filtered by `determine_wants` function. We need to filter result
574 # as well
574 # as well
575 if refs:
575 if refs:
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577
577
578 if apply_refs:
578 if apply_refs:
579 # TODO: johbo: Needs proper test coverage with a git repository
579 # TODO: johbo: Needs proper test coverage with a git repository
580 # that contains a tag object, so that we would end up with
580 # that contains a tag object, so that we would end up with
581 # a peeled ref at this point.
581 # a peeled ref at this point.
582 for k in remote_refs:
582 for k in remote_refs:
583 if k.endswith(PEELED_REF_MARKER):
583 if k.endswith(PEELED_REF_MARKER):
584 log.debug("Skipping peeled reference %s", k)
584 log.debug("Skipping peeled reference %s", k)
585 continue
585 continue
586 repo[k] = remote_refs[k]
586 repo[k] = remote_refs[k]
587
587
588 if refs and not update_after:
588 if refs and not update_after:
589 # mikhail: explicitly set the head to the last ref.
589 # mikhail: explicitly set the head to the last ref.
590 repo['HEAD'] = remote_refs[refs[-1]]
590 repo['HEAD'] = remote_refs[refs[-1]]
591
591
592 if update_after:
592 if update_after:
593 # we want to checkout HEAD
593 # we want to checkout HEAD
594 repo["HEAD"] = remote_refs["HEAD"]
594 repo["HEAD"] = remote_refs["HEAD"]
595 index.build_index_from_tree(repo.path, repo.index_path(),
595 index.build_index_from_tree(repo.path, repo.index_path(),
596 repo.object_store, repo["HEAD"].tree)
596 repo.object_store, repo["HEAD"].tree)
597 return remote_refs
597 return remote_refs
598
598
599 @reraise_safe_exceptions
599 @reraise_safe_exceptions
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
602 if refs and not isinstance(refs, (list, tuple)):
602 if refs and not isinstance(refs, (list, tuple)):
603 refs = [refs]
603 refs = [refs]
604
604
605 config = self._wire_to_config(wire)
605 config = self._wire_to_config(wire)
606 # get all remote refs we'll use to fetch later
606 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
607 cmd = ['ls-remote']
608 if not all_refs:
608 if not all_refs:
609 cmd += ['--heads', '--tags']
609 cmd += ['--heads', '--tags']
610 cmd += [url]
610 cmd += [url]
611 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
612 wire, cmd, fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
613 _copts=self._remote_conf(config),
613 _copts=self._remote_conf(config),
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615
615
616 remote_refs = collections.OrderedDict()
616 remote_refs = collections.OrderedDict()
617 fetch_refs = []
617 fetch_refs = []
618
618
619 for ref_line in output.splitlines():
619 for ref_line in output.splitlines():
620 sha, ref = ref_line.split('\t')
620 sha, ref = ref_line.split('\t')
621 sha = sha.strip()
621 sha = sha.strip()
622 if ref in remote_refs:
622 if ref in remote_refs:
623 # duplicate, skip
623 # duplicate, skip
624 continue
624 continue
625 if ref.endswith(PEELED_REF_MARKER):
625 if ref.endswith(PEELED_REF_MARKER):
626 log.debug("Skipping peeled reference %s", ref)
626 log.debug("Skipping peeled reference %s", ref)
627 continue
627 continue
628 # don't sync HEAD
628 # don't sync HEAD
629 if ref in ['HEAD']:
629 if ref in ['HEAD']:
630 continue
630 continue
631
631
632 remote_refs[ref] = sha
632 remote_refs[ref] = sha
633
633
634 if refs and sha in refs:
634 if refs and sha in refs:
635 # we filter fetch using our specified refs
635 # we filter fetch using our specified refs
636 fetch_refs.append('{}:{}'.format(ref, ref))
636 fetch_refs.append('{}:{}'.format(ref, ref))
637 elif not refs:
637 elif not refs:
638 fetch_refs.append('{}:{}'.format(ref, ref))
638 fetch_refs.append('{}:{}'.format(ref, ref))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
640
641 if fetch_refs:
641 if fetch_refs:
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 fetch_refs_chunks = list(chunk)
643 fetch_refs_chunks = list(chunk)
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 _out, _err = self.run_git_command(
645 _out, _err = self.run_git_command(
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 fail_on_stderr=False,
647 fail_on_stderr=False,
648 _copts=self._remote_conf(config),
648 _copts=self._remote_conf(config),
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650
650
651 return remote_refs
651 return remote_refs
652
652
653 @reraise_safe_exceptions
653 @reraise_safe_exceptions
654 def sync_push(self, wire, url, refs=None):
654 def sync_push(self, wire, url, refs=None):
655 if not self.check_url(url, wire):
655 if not self.check_url(url, wire):
656 return
656 return
657 config = self._wire_to_config(wire)
657 config = self._wire_to_config(wire)
658 self._factory.repo(wire)
658 self._factory.repo(wire)
659 self.run_git_command(
659 self.run_git_command(
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663
663
664 @reraise_safe_exceptions
664 @reraise_safe_exceptions
665 def get_remote_refs(self, wire, url):
665 def get_remote_refs(self, wire, url):
666 repo = Repo(url)
666 repo = Repo(url)
667 return repo.get_refs()
667 return repo.get_refs()
668
668
669 @reraise_safe_exceptions
669 @reraise_safe_exceptions
670 def get_description(self, wire):
670 def get_description(self, wire):
671 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
672 return repo.get_description()
672 return repo.get_description()
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def get_missing_revs(self, wire, rev1, rev2, path2):
675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678
678
679 wire_remote = wire.copy()
679 wire_remote = wire.copy()
680 wire_remote['path'] = path2
680 wire_remote['path'] = path2
681 repo_remote = self._factory.repo(wire_remote)
681 repo_remote = self._factory.repo(wire_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683
683
684 revs = [
684 revs = [
685 x.commit.id
685 x.commit.id
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 return revs
687 return revs
688
688
689 @reraise_safe_exceptions
689 @reraise_safe_exceptions
690 def get_object(self, wire, sha, maybe_unreachable=False):
690 def get_object(self, wire, sha, maybe_unreachable=False):
691 cache_on, context_uid, repo_id = self._cache_on(wire)
691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 def _get_object(_context_uid, _repo_id, _sha):
693 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
694 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
695 with repo_init as repo:
696
696
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 try:
698 try:
699 commit = repo.revparse_single(sha)
699 commit = repo.revparse_single(sha)
700 except KeyError:
700 except KeyError:
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 # here, we instead give something more explicit
702 # here, we instead give something more explicit
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 raise exceptions.LookupException(e)(missing_commit_err)
704 raise exceptions.LookupException(e)(missing_commit_err)
705 except ValueError as e:
705 except ValueError as e:
706 raise exceptions.LookupException(e)(missing_commit_err)
706 raise exceptions.LookupException(e)(missing_commit_err)
707
707
708 is_tag = False
708 is_tag = False
709 if isinstance(commit, pygit2.Tag):
709 if isinstance(commit, pygit2.Tag):
710 commit = repo.get(commit.target)
710 commit = repo.get(commit.target)
711 is_tag = True
711 is_tag = True
712
712
713 check_dangling = True
713 check_dangling = True
714 if is_tag:
714 if is_tag:
715 check_dangling = False
715 check_dangling = False
716
716
717 if check_dangling and maybe_unreachable:
717 if check_dangling and maybe_unreachable:
718 check_dangling = False
718 check_dangling = False
719
719
720 # we used a reference and it parsed means we're not having a dangling commit
720 # we used a reference and it parsed means we're not having a dangling commit
721 if sha != commit.hex:
721 if sha != commit.hex:
722 check_dangling = False
722 check_dangling = False
723
723
724 if check_dangling:
724 if check_dangling:
725 # check for dangling commit
725 # check for dangling commit
726 for branch in repo.branches.with_commit(commit.hex):
726 for branch in repo.branches.with_commit(commit.hex):
727 if branch:
727 if branch:
728 break
728 break
729 else:
729 else:
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 # here, we instead give something more explicit
731 # here, we instead give something more explicit
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 raise exceptions.LookupException(e)(missing_commit_err)
733 raise exceptions.LookupException(e)(missing_commit_err)
734
734
735 commit_id = commit.hex
735 commit_id = commit.hex
736 type_id = commit.type
736 type_id = commit.type
737
737
738 return {
738 return {
739 'id': commit_id,
739 'id': commit_id,
740 'type': self._type_id_to_name(type_id),
740 'type': self._type_id_to_name(type_id),
741 'commit_id': commit_id,
741 'commit_id': commit_id,
742 'idx': 0
742 'idx': 0
743 }
743 }
744
744
745 return _get_object(context_uid, repo_id, sha)
745 return _get_object(context_uid, repo_id, sha)
746
746
747 @reraise_safe_exceptions
747 @reraise_safe_exceptions
748 def get_refs(self, wire):
748 def get_refs(self, wire):
749 cache_on, context_uid, repo_id = self._cache_on(wire)
749 cache_on, context_uid, repo_id = self._cache_on(wire)
750 @self.region.conditional_cache_on_arguments(condition=cache_on)
750 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 def _get_refs(_context_uid, _repo_id):
751 def _get_refs(_context_uid, _repo_id):
752
752
753 repo_init = self._factory.repo_libgit2(wire)
753 repo_init = self._factory.repo_libgit2(wire)
754 with repo_init as repo:
754 with repo_init as repo:
755 regex = re.compile('^refs/(heads|tags)/')
755 regex = re.compile('^refs/(heads|tags)/')
756 return {x.name: x.target.hex for x in
756 return {x.name: x.target.hex for x in
757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
758
758
759 return _get_refs(context_uid, repo_id)
759 return _get_refs(context_uid, repo_id)
760
760
761 @reraise_safe_exceptions
761 @reraise_safe_exceptions
762 def get_branch_pointers(self, wire):
762 def get_branch_pointers(self, wire):
763 cache_on, context_uid, repo_id = self._cache_on(wire)
763 cache_on, context_uid, repo_id = self._cache_on(wire)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 def _get_branch_pointers(_context_uid, _repo_id):
765 def _get_branch_pointers(_context_uid, _repo_id):
766
766
767 repo_init = self._factory.repo_libgit2(wire)
767 repo_init = self._factory.repo_libgit2(wire)
768 regex = re.compile('^refs/heads')
768 regex = re.compile('^refs/heads')
769 with repo_init as repo:
769 with repo_init as repo:
770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
771 return {x.target.hex: x.shorthand for x in branches}
771 return {x.target.hex: x.shorthand for x in branches}
772
772
773 return _get_branch_pointers(context_uid, repo_id)
773 return _get_branch_pointers(context_uid, repo_id)
774
774
775 @reraise_safe_exceptions
775 @reraise_safe_exceptions
776 def head(self, wire, show_exc=True):
776 def head(self, wire, show_exc=True):
777 cache_on, context_uid, repo_id = self._cache_on(wire)
777 cache_on, context_uid, repo_id = self._cache_on(wire)
778 @self.region.conditional_cache_on_arguments(condition=cache_on)
778 @self.region.conditional_cache_on_arguments(condition=cache_on)
779 def _head(_context_uid, _repo_id, _show_exc):
779 def _head(_context_uid, _repo_id, _show_exc):
780 repo_init = self._factory.repo_libgit2(wire)
780 repo_init = self._factory.repo_libgit2(wire)
781 with repo_init as repo:
781 with repo_init as repo:
782 try:
782 try:
783 return repo.head.peel().hex
783 return repo.head.peel().hex
784 except Exception:
784 except Exception:
785 if show_exc:
785 if show_exc:
786 raise
786 raise
787 return _head(context_uid, repo_id, show_exc)
787 return _head(context_uid, repo_id, show_exc)
788
788
789 @reraise_safe_exceptions
789 @reraise_safe_exceptions
790 def init(self, wire):
790 def init(self, wire):
791 repo_path = str_to_dulwich(wire['path'])
791 repo_path = str_to_dulwich(wire['path'])
792 self.repo = Repo.init(repo_path)
792 self.repo = Repo.init(repo_path)
793
793
794 @reraise_safe_exceptions
794 @reraise_safe_exceptions
795 def init_bare(self, wire):
795 def init_bare(self, wire):
796 repo_path = str_to_dulwich(wire['path'])
796 repo_path = str_to_dulwich(wire['path'])
797 self.repo = Repo.init_bare(repo_path)
797 self.repo = Repo.init_bare(repo_path)
798
798
799 @reraise_safe_exceptions
799 @reraise_safe_exceptions
800 def revision(self, wire, rev):
800 def revision(self, wire, rev):
801
801
802 cache_on, context_uid, repo_id = self._cache_on(wire)
802 cache_on, context_uid, repo_id = self._cache_on(wire)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 def _revision(_context_uid, _repo_id, _rev):
804 def _revision(_context_uid, _repo_id, _rev):
805 repo_init = self._factory.repo_libgit2(wire)
805 repo_init = self._factory.repo_libgit2(wire)
806 with repo_init as repo:
806 with repo_init as repo:
807 commit = repo[rev]
807 commit = repo[rev]
808 obj_data = {
808 obj_data = {
809 'id': commit.id.hex,
809 'id': commit.id.hex,
810 }
810 }
811 # tree objects itself don't have tree_id attribute
811 # tree objects itself don't have tree_id attribute
812 if hasattr(commit, 'tree_id'):
812 if hasattr(commit, 'tree_id'):
813 obj_data['tree'] = commit.tree_id.hex
813 obj_data['tree'] = commit.tree_id.hex
814
814
815 return obj_data
815 return obj_data
816 return _revision(context_uid, repo_id, rev)
816 return _revision(context_uid, repo_id, rev)
817
817
818 @reraise_safe_exceptions
818 @reraise_safe_exceptions
819 def date(self, wire, commit_id):
819 def date(self, wire, commit_id):
820 cache_on, context_uid, repo_id = self._cache_on(wire)
820 cache_on, context_uid, repo_id = self._cache_on(wire)
821 @self.region.conditional_cache_on_arguments(condition=cache_on)
821 @self.region.conditional_cache_on_arguments(condition=cache_on)
822 def _date(_repo_id, _commit_id):
822 def _date(_repo_id, _commit_id):
823 repo_init = self._factory.repo_libgit2(wire)
823 repo_init = self._factory.repo_libgit2(wire)
824 with repo_init as repo:
824 with repo_init as repo:
825 commit = repo[commit_id]
825 commit = repo[commit_id]
826
826
827 if hasattr(commit, 'commit_time'):
827 if hasattr(commit, 'commit_time'):
828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 else:
829 else:
830 commit = commit.get_object()
830 commit = commit.get_object()
831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832
832
833 # TODO(marcink): check dulwich difference of offset vs timezone
833 # TODO(marcink): check dulwich difference of offset vs timezone
834 return [commit_time, commit_time_offset]
834 return [commit_time, commit_time_offset]
835 return _date(repo_id, commit_id)
835 return _date(repo_id, commit_id)
836
836
837 @reraise_safe_exceptions
837 @reraise_safe_exceptions
838 def author(self, wire, commit_id):
838 def author(self, wire, commit_id):
839 cache_on, context_uid, repo_id = self._cache_on(wire)
839 cache_on, context_uid, repo_id = self._cache_on(wire)
840 @self.region.conditional_cache_on_arguments(condition=cache_on)
840 @self.region.conditional_cache_on_arguments(condition=cache_on)
841 def _author(_repo_id, _commit_id):
841 def _author(_repo_id, _commit_id):
842 repo_init = self._factory.repo_libgit2(wire)
842 repo_init = self._factory.repo_libgit2(wire)
843 with repo_init as repo:
843 with repo_init as repo:
844 commit = repo[commit_id]
844 commit = repo[commit_id]
845
845
846 if hasattr(commit, 'author'):
846 if hasattr(commit, 'author'):
847 author = commit.author
847 author = commit.author
848 else:
848 else:
849 author = commit.get_object().author
849 author = commit.get_object().author
850
850
851 if author.email:
851 if author.email:
852 return u"{} <{}>".format(author.name, author.email)
852 return u"{} <{}>".format(author.name, author.email)
853
853
854 try:
854 try:
855 return u"{}".format(author.name)
855 return u"{}".format(author.name)
856 except Exception:
856 except Exception:
857 return u"{}".format(safe_unicode(author.raw_name))
857 return u"{}".format(safe_unicode(author.raw_name))
858
858
859 return _author(repo_id, commit_id)
859 return _author(repo_id, commit_id)
860
860
861 @reraise_safe_exceptions
861 @reraise_safe_exceptions
862 def message(self, wire, commit_id):
862 def message(self, wire, commit_id):
863 cache_on, context_uid, repo_id = self._cache_on(wire)
863 cache_on, context_uid, repo_id = self._cache_on(wire)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 def _message(_repo_id, _commit_id):
865 def _message(_repo_id, _commit_id):
866 repo_init = self._factory.repo_libgit2(wire)
866 repo_init = self._factory.repo_libgit2(wire)
867 with repo_init as repo:
867 with repo_init as repo:
868 commit = repo[commit_id]
868 commit = repo[commit_id]
869 return commit.message
869 return commit.message
870 return _message(repo_id, commit_id)
870 return _message(repo_id, commit_id)
871
871
872 @reraise_safe_exceptions
872 @reraise_safe_exceptions
873 def parents(self, wire, commit_id):
873 def parents(self, wire, commit_id):
874 cache_on, context_uid, repo_id = self._cache_on(wire)
874 cache_on, context_uid, repo_id = self._cache_on(wire)
875 @self.region.conditional_cache_on_arguments(condition=cache_on)
875 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 def _parents(_repo_id, _commit_id):
876 def _parents(_repo_id, _commit_id):
877 repo_init = self._factory.repo_libgit2(wire)
877 repo_init = self._factory.repo_libgit2(wire)
878 with repo_init as repo:
878 with repo_init as repo:
879 commit = repo[commit_id]
879 commit = repo[commit_id]
880 if hasattr(commit, 'parent_ids'):
880 if hasattr(commit, 'parent_ids'):
881 parent_ids = commit.parent_ids
881 parent_ids = commit.parent_ids
882 else:
882 else:
883 parent_ids = commit.get_object().parent_ids
883 parent_ids = commit.get_object().parent_ids
884
884
885 return [x.hex for x in parent_ids]
885 return [x.hex for x in parent_ids]
886 return _parents(repo_id, commit_id)
886 return _parents(repo_id, commit_id)
887
887
888 @reraise_safe_exceptions
888 @reraise_safe_exceptions
889 def children(self, wire, commit_id):
889 def children(self, wire, commit_id):
890 cache_on, context_uid, repo_id = self._cache_on(wire)
890 cache_on, context_uid, repo_id = self._cache_on(wire)
891 @self.region.conditional_cache_on_arguments(condition=cache_on)
891 @self.region.conditional_cache_on_arguments(condition=cache_on)
892 def _children(_repo_id, _commit_id):
892 def _children(_repo_id, _commit_id):
893 output, __ = self.run_git_command(
893 output, __ = self.run_git_command(
894 wire, ['rev-list', '--all', '--children'])
894 wire, ['rev-list', '--all', '--children'])
895
895
896 child_ids = []
896 child_ids = []
897 pat = re.compile(r'^%s' % commit_id)
897 pat = re.compile(r'^%s' % commit_id)
898 for l in output.splitlines():
898 for l in output.splitlines():
899 if pat.match(l):
899 if pat.match(l):
900 found_ids = l.split(' ')[1:]
900 found_ids = l.split(' ')[1:]
901 child_ids.extend(found_ids)
901 child_ids.extend(found_ids)
902
902
903 return child_ids
903 return child_ids
904 return _children(repo_id, commit_id)
904 return _children(repo_id, commit_id)
905
905
906 @reraise_safe_exceptions
906 @reraise_safe_exceptions
907 def set_refs(self, wire, key, value):
907 def set_refs(self, wire, key, value):
908 repo_init = self._factory.repo_libgit2(wire)
908 repo_init = self._factory.repo_libgit2(wire)
909 with repo_init as repo:
909 with repo_init as repo:
910 repo.references.create(key, value, force=True)
910 repo.references.create(key, value, force=True)
911
911
912 @reraise_safe_exceptions
912 @reraise_safe_exceptions
913 def create_branch(self, wire, branch_name, commit_id, force=False):
913 def create_branch(self, wire, branch_name, commit_id, force=False):
914 repo_init = self._factory.repo_libgit2(wire)
914 repo_init = self._factory.repo_libgit2(wire)
915 with repo_init as repo:
915 with repo_init as repo:
916 commit = repo[commit_id]
916 commit = repo[commit_id]
917
917
918 if force:
918 if force:
919 repo.branches.local.create(branch_name, commit, force=force)
919 repo.branches.local.create(branch_name, commit, force=force)
920 elif not repo.branches.get(branch_name):
920 elif not repo.branches.get(branch_name):
921 # create only if that branch isn't existing
921 # create only if that branch isn't existing
922 repo.branches.local.create(branch_name, commit, force=force)
922 repo.branches.local.create(branch_name, commit, force=force)
923
923
924 @reraise_safe_exceptions
924 @reraise_safe_exceptions
925 def remove_ref(self, wire, key):
925 def remove_ref(self, wire, key):
926 repo_init = self._factory.repo_libgit2(wire)
926 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
927 with repo_init as repo:
928 repo.references.delete(key)
928 repo.references.delete(key)
929
929
930 @reraise_safe_exceptions
930 @reraise_safe_exceptions
931 def tag_remove(self, wire, tag_name):
931 def tag_remove(self, wire, tag_name):
932 repo_init = self._factory.repo_libgit2(wire)
932 repo_init = self._factory.repo_libgit2(wire)
933 with repo_init as repo:
933 with repo_init as repo:
934 key = 'refs/tags/{}'.format(tag_name)
934 key = 'refs/tags/{}'.format(tag_name)
935 repo.references.delete(key)
935 repo.references.delete(key)
936
936
937 @reraise_safe_exceptions
937 @reraise_safe_exceptions
938 def tree_changes(self, wire, source_id, target_id):
938 def tree_changes(self, wire, source_id, target_id):
939 # TODO(marcink): remove this seems it's only used by tests
939 # TODO(marcink): remove this seems it's only used by tests
940 repo = self._factory.repo(wire)
940 repo = self._factory.repo(wire)
941 source = repo[source_id].tree if source_id else None
941 source = repo[source_id].tree if source_id else None
942 target = repo[target_id].tree
942 target = repo[target_id].tree
943 result = repo.object_store.tree_changes(source, target)
943 result = repo.object_store.tree_changes(source, target)
944 return list(result)
944 return list(result)
945
945
946 @reraise_safe_exceptions
946 @reraise_safe_exceptions
947 def tree_and_type_for_path(self, wire, commit_id, path):
947 def tree_and_type_for_path(self, wire, commit_id, path):
948
948
949 cache_on, context_uid, repo_id = self._cache_on(wire)
949 cache_on, context_uid, repo_id = self._cache_on(wire)
950 @self.region.conditional_cache_on_arguments(condition=cache_on)
950 @self.region.conditional_cache_on_arguments(condition=cache_on)
951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 repo_init = self._factory.repo_libgit2(wire)
952 repo_init = self._factory.repo_libgit2(wire)
953
953
954 with repo_init as repo:
954 with repo_init as repo:
955 commit = repo[commit_id]
955 commit = repo[commit_id]
956 try:
956 try:
957 tree = commit.tree[path]
957 tree = commit.tree[path]
958 except KeyError:
958 except KeyError:
959 return None, None, None
959 return None, None, None
960
960
961 return tree.id.hex, tree.type, tree.filemode
961 return tree.id.hex, tree.type, tree.filemode
962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963
963
964 @reraise_safe_exceptions
964 @reraise_safe_exceptions
965 def tree_items(self, wire, tree_id):
965 def tree_items(self, wire, tree_id):
966 cache_on, context_uid, repo_id = self._cache_on(wire)
966 cache_on, context_uid, repo_id = self._cache_on(wire)
967 @self.region.conditional_cache_on_arguments(condition=cache_on)
967 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 def _tree_items(_repo_id, _tree_id):
968 def _tree_items(_repo_id, _tree_id):
969
969
970 repo_init = self._factory.repo_libgit2(wire)
970 repo_init = self._factory.repo_libgit2(wire)
971 with repo_init as repo:
971 with repo_init as repo:
972 try:
972 try:
973 tree = repo[tree_id]
973 tree = repo[tree_id]
974 except KeyError:
974 except KeyError:
975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976
976
977 result = []
977 result = []
978 for item in tree:
978 for item in tree:
979 item_sha = item.hex
979 item_sha = item.hex
980 item_mode = item.filemode
980 item_mode = item.filemode
981 item_type = item.type
981 item_type = item.type
982
982
983 if item_type == 'commit':
983 if item_type == 'commit':
984 # NOTE(marcink): submodules we translate to 'link' for backward compat
984 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 item_type = 'link'
985 item_type = 'link'
986
986
987 result.append((item.name, item_mode, item_sha, item_type))
987 result.append((item.name, item_mode, item_sha, item_type))
988 return result
988 return result
989 return _tree_items(repo_id, tree_id)
989 return _tree_items(repo_id, tree_id)
990
990
991 @reraise_safe_exceptions
991 @reraise_safe_exceptions
992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 """
993 """
994 Old version that uses subprocess to call diff
994 Old version that uses subprocess to call diff
995 """
995 """
996
996
997 flags = [
997 flags = [
998 '-U%s' % context, '--patch',
998 '-U%s' % context, '--patch',
999 '--binary',
999 '--binary',
1000 '--find-renames',
1000 '--find-renames',
1001 '--no-indent-heuristic',
1001 '--no-indent-heuristic',
1002 # '--indent-heuristic',
1002 # '--indent-heuristic',
1003 #'--full-index',
1003 #'--full-index',
1004 #'--abbrev=40'
1004 #'--abbrev=40'
1005 ]
1005 ]
1006
1006
1007 if opt_ignorews:
1007 if opt_ignorews:
1008 flags.append('--ignore-all-space')
1008 flags.append('--ignore-all-space')
1009
1009
1010 if commit_id_1 == self.EMPTY_COMMIT:
1010 if commit_id_1 == self.EMPTY_COMMIT:
1011 cmd = ['show'] + flags + [commit_id_2]
1011 cmd = ['show'] + flags + [commit_id_2]
1012 else:
1012 else:
1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014
1014
1015 if file_filter:
1015 if file_filter:
1016 cmd.extend(['--', file_filter])
1016 cmd.extend(['--', file_filter])
1017
1017
1018 diff, __ = self.run_git_command(wire, cmd)
1018 diff, __ = self.run_git_command(wire, cmd)
1019 # If we used 'show' command, strip first few lines (until actual diff
1019 # If we used 'show' command, strip first few lines (until actual diff
1020 # starts)
1020 # starts)
1021 if commit_id_1 == self.EMPTY_COMMIT:
1021 if commit_id_1 == self.EMPTY_COMMIT:
1022 lines = diff.splitlines()
1022 lines = diff.splitlines()
1023 x = 0
1023 x = 0
1024 for line in lines:
1024 for line in lines:
1025 if line.startswith('diff'):
1025 if line.startswith('diff'):
1026 break
1026 break
1027 x += 1
1027 x += 1
1028 # Append new line just like 'diff' command do
1028 # Append new line just like 'diff' command do
1029 diff = '\n'.join(lines[x:]) + '\n'
1029 diff = '\n'.join(lines[x:]) + '\n'
1030 return diff
1030 return diff
1031
1031
1032 @reraise_safe_exceptions
1032 @reraise_safe_exceptions
1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 repo_init = self._factory.repo_libgit2(wire)
1034 repo_init = self._factory.repo_libgit2(wire)
1035 with repo_init as repo:
1035 with repo_init as repo:
1036 swap = True
1036 swap = True
1037 flags = 0
1037 flags = 0
1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039
1039
1040 if opt_ignorews:
1040 if opt_ignorews:
1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042
1042
1043 if commit_id_1 == self.EMPTY_COMMIT:
1043 if commit_id_1 == self.EMPTY_COMMIT:
1044 comm1 = repo[commit_id_2]
1044 comm1 = repo[commit_id_2]
1045 diff_obj = comm1.tree.diff_to_tree(
1045 diff_obj = comm1.tree.diff_to_tree(
1046 flags=flags, context_lines=context, swap=swap)
1046 flags=flags, context_lines=context, swap=swap)
1047
1047
1048 else:
1048 else:
1049 comm1 = repo[commit_id_2]
1049 comm1 = repo[commit_id_2]
1050 comm2 = repo[commit_id_1]
1050 comm2 = repo[commit_id_1]
1051 diff_obj = comm1.tree.diff_to_tree(
1051 diff_obj = comm1.tree.diff_to_tree(
1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 similar_flags = 0
1053 similar_flags = 0
1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 diff_obj.find_similar(flags=similar_flags)
1055 diff_obj.find_similar(flags=similar_flags)
1056
1056
1057 if file_filter:
1057 if file_filter:
1058 for p in diff_obj:
1058 for p in diff_obj:
1059 if p.delta.old_file.path == file_filter:
1059 if p.delta.old_file.path == file_filter:
1060 return p.patch or ''
1060 return p.patch or ''
1061 # fo matching path == no diff
1061 # fo matching path == no diff
1062 return ''
1062 return ''
1063 return diff_obj.patch or ''
1063 return diff_obj.patch or ''
1064
1064
1065 @reraise_safe_exceptions
1065 @reraise_safe_exceptions
1066 def node_history(self, wire, commit_id, path, limit):
1066 def node_history(self, wire, commit_id, path, limit):
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 # optimize for n==1, rev-list is much faster for that use-case
1070 # optimize for n==1, rev-list is much faster for that use-case
1071 if limit == 1:
1071 if limit == 1:
1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 else:
1073 else:
1074 cmd = ['log']
1074 cmd = ['log']
1075 if limit:
1075 if limit:
1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078
1078
1079 output, __ = self.run_git_command(wire, cmd)
1079 output, __ = self.run_git_command(wire, cmd)
1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081
1081
1082 return [x for x in commit_ids]
1082 return [x for x in commit_ids]
1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084
1084
1085 @reraise_safe_exceptions
1085 @reraise_safe_exceptions
1086 def node_annotate(self, wire, commit_id, path):
1086 def node_annotate(self, wire, commit_id, path):
1087
1087
1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 # -l ==> outputs long shas (and we need all 40 characters)
1089 # -l ==> outputs long shas (and we need all 40 characters)
1090 # --root ==> doesn't put '^' character for boundaries
1090 # --root ==> doesn't put '^' character for boundaries
1091 # -r commit_id ==> blames for the given commit
1091 # -r commit_id ==> blames for the given commit
1092 output, __ = self.run_git_command(wire, cmd)
1092 output, __ = self.run_git_command(wire, cmd)
1093
1093
1094 result = []
1094 result = []
1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 line_no = i + 1
1096 line_no = i + 1
1097 commit_id, line = re.split(r' ', blame_line, 1)
1097 commit_id, line = re.split(r' ', blame_line, 1)
1098 result.append((line_no, commit_id, line))
1098 result.append((line_no, commit_id, line))
1099 return result
1099 return result
1100
1100
1101 @reraise_safe_exceptions
1101 @reraise_safe_exceptions
1102 def update_server_info(self, wire):
1102 def update_server_info(self, wire):
1103 repo = self._factory.repo(wire)
1103 repo = self._factory.repo(wire)
1104 update_server_info(repo)
1104 update_server_info(repo)
1105
1105
1106 @reraise_safe_exceptions
1106 @reraise_safe_exceptions
1107 def get_all_commit_ids(self, wire):
1107 def get_all_commit_ids(self, wire):
1108
1108
1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1111 def _get_all_commit_ids(_context_uid, _repo_id):
1111 def _get_all_commit_ids(_context_uid, _repo_id):
1112
1112
1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 try:
1114 try:
1115 output, __ = self.run_git_command(wire, cmd)
1115 output, __ = self.run_git_command(wire, cmd)
1116 return output.splitlines()
1116 return output.splitlines()
1117 except Exception:
1117 except Exception:
1118 # Can be raised for empty repositories
1118 # Can be raised for empty repositories
1119 return []
1119 return []
1120 return _get_all_commit_ids(context_uid, repo_id)
1120 return _get_all_commit_ids(context_uid, repo_id)
1121
1121
1122 @reraise_safe_exceptions
1122 @reraise_safe_exceptions
1123 def run_git_command(self, wire, cmd, **opts):
1123 def run_git_command(self, wire, cmd, **opts):
1124 path = wire.get('path', None)
1124 path = wire.get('path', None)
1125
1125
1126 if path and os.path.isdir(path):
1126 if path and os.path.isdir(path):
1127 opts['cwd'] = path
1127 opts['cwd'] = path
1128
1128
1129 if '_bare' in opts:
1129 if '_bare' in opts:
1130 _copts = []
1130 _copts = []
1131 del opts['_bare']
1131 del opts['_bare']
1132 else:
1132 else:
1133 _copts = ['-c', 'core.quotepath=false', ]
1133 _copts = ['-c', 'core.quotepath=false', ]
1134 safe_call = False
1134 safe_call = False
1135 if '_safe' in opts:
1135 if '_safe' in opts:
1136 # no exc on failure
1136 # no exc on failure
1137 del opts['_safe']
1137 del opts['_safe']
1138 safe_call = True
1138 safe_call = True
1139
1139
1140 if '_copts' in opts:
1140 if '_copts' in opts:
1141 _copts.extend(opts['_copts'] or [])
1141 _copts.extend(opts['_copts'] or [])
1142 del opts['_copts']
1142 del opts['_copts']
1143
1143
1144 gitenv = os.environ.copy()
1144 gitenv = os.environ.copy()
1145 gitenv.update(opts.pop('extra_env', {}))
1145 gitenv.update(opts.pop('extra_env', {}))
1146 # need to clean fix GIT_DIR !
1146 # need to clean fix GIT_DIR !
1147 if 'GIT_DIR' in gitenv:
1147 if 'GIT_DIR' in gitenv:
1148 del gitenv['GIT_DIR']
1148 del gitenv['GIT_DIR']
1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151
1151
1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 _opts = {'env': gitenv, 'shell': False}
1153 _opts = {'env': gitenv, 'shell': False}
1154
1154
1155 proc = None
1155 proc = None
1156 try:
1156 try:
1157 _opts.update(opts)
1157 _opts.update(opts)
1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159
1159
1160 return ''.join(proc), ''.join(proc.error)
1160 return ''.join(proc), ''.join(proc.error)
1161 except (EnvironmentError, OSError) as err:
1161 except (EnvironmentError, OSError) as err:
1162 cmd = ' '.join(cmd) # human friendly CMD
1162 cmd = ' '.join(cmd) # human friendly CMD
1163 tb_err = ("Couldn't run git command (%s).\n"
1163 tb_err = ("Couldn't run git command (%s).\n"
1164 "Original error was:%s\n"
1164 "Original error was:%s\n"
1165 "Call options:%s\n"
1165 "Call options:%s\n"
1166 % (cmd, err, _opts))
1166 % (cmd, err, _opts))
1167 log.exception(tb_err)
1167 log.exception(tb_err)
1168 if safe_call:
1168 if safe_call:
1169 return '', err
1169 return '', err
1170 else:
1170 else:
1171 raise exceptions.VcsException()(tb_err)
1171 raise exceptions.VcsException()(tb_err)
1172 finally:
1172 finally:
1173 if proc:
1173 if proc:
1174 proc.close()
1174 proc.close()
1175
1175
1176 @reraise_safe_exceptions
1176 @reraise_safe_exceptions
1177 def install_hooks(self, wire, force=False):
1177 def install_hooks(self, wire, force=False):
1178 from vcsserver.hook_utils import install_git_hooks
1178 from vcsserver.hook_utils import install_git_hooks
1179 bare = self.bare(wire)
1179 bare = self.bare(wire)
1180 path = wire['path']
1180 path = wire['path']
1181 return install_git_hooks(path, bare, force_create=force)
1181 return install_git_hooks(path, bare, force_create=force)
1182
1182
1183 @reraise_safe_exceptions
1183 @reraise_safe_exceptions
1184 def get_hooks_info(self, wire):
1184 def get_hooks_info(self, wire):
1185 from vcsserver.hook_utils import (
1185 from vcsserver.hook_utils import (
1186 get_git_pre_hook_version, get_git_post_hook_version)
1186 get_git_pre_hook_version, get_git_post_hook_version)
1187 bare = self.bare(wire)
1187 bare = self.bare(wire)
1188 path = wire['path']
1188 path = wire['path']
1189 return {
1189 return {
1190 'pre_version': get_git_pre_hook_version(path, bare),
1190 'pre_version': get_git_pre_hook_version(path, bare),
1191 'post_version': get_git_post_hook_version(path, bare),
1191 'post_version': get_git_post_hook_version(path, bare),
1192 }
1192 }
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from app import create_app
19 from app import create_app
@@ -1,292 +1,292 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
20 from wsgiref.util import FileWrapper
21
21
22 import simplejson as json
22 import simplejson as json
23 from pyramid.config import Configurator
23 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
24 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
25 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
27 HTTPUnprocessableEntity)
28
28
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
31 from vcsserver.utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired(object):
51 class AuthHeaderRequired(object):
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 return write_response_error(HTTPForbidden)
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
64 return func(*fargs[1:], **fkwargs)
65
65
66
66
67 # views
67 # views
68
68
69 def lfs_objects(request):
69 def lfs_objects(request):
70 # indicate not supported, V1 API
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
73
74
74
75 @AuthHeaderRequired()
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
76 def lfs_objects_batch(request):
77 """
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
79
80 operation - Should be download or upload.
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
87 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
89 auth = request.authorization
90 repo = request.matchdict.get('repo')
90 repo = request.matchdict.get('repo')
91 data = request.json
91 data = request.json
92 operation = data.get('operation')
92 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
93 http_scheme = request.registry.git_lfs_http_scheme
94
94
95 if operation not in ('download', 'upload'):
95 if operation not in ('download', 'upload'):
96 log.debug('LFS: unsupported operation:%s', operation)
96 log.debug('LFS: unsupported operation:%s', operation)
97 return write_response_error(
97 return write_response_error(
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99
99
100 if 'objects' not in data:
100 if 'objects' not in data:
101 log.debug('LFS: missing objects data')
101 log.debug('LFS: missing objects data')
102 return write_response_error(
102 return write_response_error(
103 HTTPBadRequest, 'missing objects data')
103 HTTPBadRequest, 'missing objects data')
104
104
105 log.debug('LFS: handling operation of type: %s', operation)
105 log.debug('LFS: handling operation of type: %s', operation)
106
106
107 objects = []
107 objects = []
108 for o in data['objects']:
108 for o in data['objects']:
109 try:
109 try:
110 oid = o['oid']
110 oid = o['oid']
111 obj_size = o['size']
111 obj_size = o['size']
112 except KeyError:
112 except KeyError:
113 log.exception('LFS, failed to extract data')
113 log.exception('LFS, failed to extract data')
114 return write_response_error(
114 return write_response_error(
115 HTTPBadRequest, 'unsupported data in objects')
115 HTTPBadRequest, 'unsupported data in objects')
116
116
117 obj_data = {'oid': oid}
117 obj_data = {'oid': oid}
118
118
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 _scheme=http_scheme)
120 _scheme=http_scheme)
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 _scheme=http_scheme)
122 _scheme=http_scheme)
123 store = LFSOidStore(
123 store = LFSOidStore(
124 oid, repo, store_location=request.registry.git_lfs_store_path)
124 oid, repo, store_location=request.registry.git_lfs_store_path)
125 handler = OidHandler(
125 handler = OidHandler(
126 store, repo, auth, oid, obj_size, obj_data,
126 store, repo, auth, oid, obj_size, obj_data,
127 obj_href, obj_verify_href)
127 obj_href, obj_verify_href)
128
128
129 # this verifies also OIDs
129 # this verifies also OIDs
130 actions, errors = handler.exec_operation(operation)
130 actions, errors = handler.exec_operation(operation)
131 if errors:
131 if errors:
132 log.warning('LFS: got following errors: %s', errors)
132 log.warning('LFS: got following errors: %s', errors)
133 obj_data['errors'] = errors
133 obj_data['errors'] = errors
134
134
135 if actions:
135 if actions:
136 obj_data['actions'] = actions
136 obj_data['actions'] = actions
137
137
138 obj_data['size'] = obj_size
138 obj_data['size'] = obj_size
139 obj_data['authenticated'] = True
139 obj_data['authenticated'] = True
140 objects.append(obj_data)
140 objects.append(obj_data)
141
141
142 result = {'objects': objects, 'transfer': 'basic'}
142 result = {'objects': objects, 'transfer': 'basic'}
143 log.debug('LFS Response %s', safe_result(result))
143 log.debug('LFS Response %s', safe_result(result))
144
144
145 return result
145 return result
146
146
147
147
148 def lfs_objects_oid_upload(request):
148 def lfs_objects_oid_upload(request):
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 repo = request.matchdict.get('repo')
150 repo = request.matchdict.get('repo')
151 oid = request.matchdict.get('oid')
151 oid = request.matchdict.get('oid')
152 store = LFSOidStore(
152 store = LFSOidStore(
153 oid, repo, store_location=request.registry.git_lfs_store_path)
153 oid, repo, store_location=request.registry.git_lfs_store_path)
154 engine = store.get_engine(mode='wb')
154 engine = store.get_engine(mode='wb')
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156
156
157 body = request.environ['wsgi.input']
157 body = request.environ['wsgi.input']
158
158
159 with engine as f:
159 with engine as f:
160 blksize = 64 * 1024 # 64kb
160 blksize = 64 * 1024 # 64kb
161 while True:
161 while True:
162 # read in chunks as stream comes in from Gunicorn
162 # read in chunks as stream comes in from Gunicorn
163 # this is a specific Gunicorn support function.
163 # this is a specific Gunicorn support function.
164 # might work differently on waitress
164 # might work differently on waitress
165 chunk = body.read(blksize)
165 chunk = body.read(blksize)
166 if not chunk:
166 if not chunk:
167 break
167 break
168 f.write(chunk)
168 f.write(chunk)
169
169
170 return {'upload': 'ok'}
170 return {'upload': 'ok'}
171
171
172
172
173 def lfs_objects_oid_download(request):
173 def lfs_objects_oid_download(request):
174 repo = request.matchdict.get('repo')
174 repo = request.matchdict.get('repo')
175 oid = request.matchdict.get('oid')
175 oid = request.matchdict.get('oid')
176
176
177 store = LFSOidStore(
177 store = LFSOidStore(
178 oid, repo, store_location=request.registry.git_lfs_store_path)
178 oid, repo, store_location=request.registry.git_lfs_store_path)
179 if not store.has_oid():
179 if not store.has_oid():
180 log.debug('LFS: oid %s does not exists in store', oid)
180 log.debug('LFS: oid %s does not exists in store', oid)
181 return write_response_error(
181 return write_response_error(
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183
183
184 # TODO(marcink): support range header ?
184 # TODO(marcink): support range header ?
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
186
186
187 f = open(store.oid_path, 'rb')
187 f = open(store.oid_path, 'rb')
188 response = Response(
188 response = Response(
189 content_type='application/octet-stream', app_iter=FileIter(f))
189 content_type='application/octet-stream', app_iter=FileIter(f))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 return response
191 return response
192
192
193
193
194 def lfs_objects_verify(request):
194 def lfs_objects_verify(request):
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 repo = request.matchdict.get('repo')
196 repo = request.matchdict.get('repo')
197
197
198 data = request.json
198 data = request.json
199 oid = data.get('oid')
199 oid = data.get('oid')
200 size = safe_int(data.get('size'))
200 size = safe_int(data.get('size'))
201
201
202 if not (oid and size):
202 if not (oid and size):
203 return write_response_error(
203 return write_response_error(
204 HTTPBadRequest, 'missing oid and size in request data')
204 HTTPBadRequest, 'missing oid and size in request data')
205
205
206 store = LFSOidStore(
206 store = LFSOidStore(
207 oid, repo, store_location=request.registry.git_lfs_store_path)
207 oid, repo, store_location=request.registry.git_lfs_store_path)
208 if not store.has_oid():
208 if not store.has_oid():
209 log.debug('LFS: oid %s does not exists in store', oid)
209 log.debug('LFS: oid %s does not exists in store', oid)
210 return write_response_error(
210 return write_response_error(
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212
212
213 store_size = store.size_oid()
213 store_size = store.size_oid()
214 if store_size != size:
214 if store_size != size:
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
216 store_size, size)
216 store_size, size)
217 return write_response_error(
217 return write_response_error(
218 HTTPUnprocessableEntity, msg)
218 HTTPUnprocessableEntity, msg)
219
219
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221
221
222
222
223 def lfs_objects_lock(request):
223 def lfs_objects_lock(request):
224 return write_response_error(
224 return write_response_error(
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
226
226
227
227
228 def not_found(request):
228 def not_found(request):
229 return write_response_error(
229 return write_response_error(
230 HTTPNotFound, 'request path not found')
230 HTTPNotFound, 'request path not found')
231
231
232
232
233 def lfs_disabled(request):
233 def lfs_disabled(request):
234 return write_response_error(
234 return write_response_error(
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236
236
237
237
238 def git_lfs_app(config):
238 def git_lfs_app(config):
239
239
240 # v1 API deprecation endpoint
240 # v1 API deprecation endpoint
241 config.add_route('lfs_objects',
241 config.add_route('lfs_objects',
242 '/{repo:.*?[^/]}/info/lfs/objects')
242 '/{repo:.*?[^/]}/info/lfs/objects')
243 config.add_view(lfs_objects, route_name='lfs_objects',
243 config.add_view(lfs_objects, route_name='lfs_objects',
244 request_method='POST', renderer='json')
244 request_method='POST', renderer='json')
245
245
246 # locking API
246 # locking API
247 config.add_route('lfs_objects_lock',
247 config.add_route('lfs_objects_lock',
248 '/{repo:.*?[^/]}/info/lfs/locks')
248 '/{repo:.*?[^/]}/info/lfs/locks')
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 request_method=('POST', 'GET'), renderer='json')
250 request_method=('POST', 'GET'), renderer='json')
251
251
252 config.add_route('lfs_objects_lock_verify',
252 config.add_route('lfs_objects_lock_verify',
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 request_method=('POST', 'GET'), renderer='json')
255 request_method=('POST', 'GET'), renderer='json')
256
256
257 # batch API
257 # batch API
258 config.add_route('lfs_objects_batch',
258 config.add_route('lfs_objects_batch',
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 request_method='POST', renderer='json')
261 request_method='POST', renderer='json')
262
262
263 # oid upload/download API
263 # oid upload/download API
264 config.add_route('lfs_objects_oid',
264 config.add_route('lfs_objects_oid',
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 request_method='PUT', renderer='json')
267 request_method='PUT', renderer='json')
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 request_method='GET', renderer='json')
269 request_method='GET', renderer='json')
270
270
271 # verification API
271 # verification API
272 config.add_route('lfs_objects_verify',
272 config.add_route('lfs_objects_verify',
273 '/{repo:.*?[^/]}/info/lfs/verify')
273 '/{repo:.*?[^/]}/info/lfs/verify')
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 request_method='POST', renderer='json')
275 request_method='POST', renderer='json')
276
276
277 # not found handler for API
277 # not found handler for API
278 config.add_notfound_view(not_found, renderer='json')
278 config.add_notfound_view(not_found, renderer='json')
279
279
280
280
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 config = Configurator()
282 config = Configurator()
283 if git_lfs_enabled:
283 if git_lfs_enabled:
284 config.include(git_lfs_app)
284 config.include(git_lfs_app)
285 config.registry.git_lfs_store_path = git_lfs_store_path
285 config.registry.git_lfs_store_path = git_lfs_store_path
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 else:
287 else:
288 # not found handler for API, reporting disabled LFS support
288 # not found handler for API, reporting disabled LFS support
289 config.add_notfound_view(lfs_disabled, renderer='json')
289 config.add_notfound_view(lfs_disabled, renderer='json')
290
290
291 app = config.make_wsgi_app()
291 app = config.make_wsgi_app()
292 return app
292 return app
@@ -1,175 +1,175 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import logging
20 import logging
21 from collections import OrderedDict
21 from collections import OrderedDict
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class OidHandler(object):
26 class OidHandler(object):
27
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
29 obj_verify_href=None):
30 self.current_store = store
30 self.current_store = store
31 self.repo_name = repo_name
31 self.repo_name = repo_name
32 self.auth = auth
32 self.auth = auth
33 self.oid = oid
33 self.oid = oid
34 self.obj_size = obj_size
34 self.obj_size = obj_size
35 self.obj_data = obj_data
35 self.obj_data = obj_data
36 self.obj_href = obj_href
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
37 self.obj_verify_href = obj_verify_href
38
38
39 def get_store(self, mode=None):
39 def get_store(self, mode=None):
40 return self.current_store
40 return self.current_store
41
41
42 def get_auth(self):
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
44 return " ".join(self.auth)
45
45
46 def download(self):
46 def download(self):
47
47
48 store = self.get_store()
48 store = self.get_store()
49 response = None
49 response = None
50 has_errors = None
50 has_errors = None
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
58 message=err_msg
58 message=err_msg
59 )
59 )
60 )
60 )
61
61
62 download_action = OrderedDict(
62 download_action = OrderedDict(
63 href=self.obj_href,
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
65 )
66 if not has_errors:
66 if not has_errors:
67 response = OrderedDict(download=download_action)
67 response = OrderedDict(download=download_action)
68 return response, has_errors
68 return response, has_errors
69
69
70 def upload(self, skip_existing=True):
70 def upload(self, skip_existing=True):
71 """
71 """
72 Write upload action for git-lfs server
72 Write upload action for git-lfs server
73 """
73 """
74
74
75 store = self.get_store()
75 store = self.get_store()
76 response = None
76 response = None
77 has_errors = None
77 has_errors = None
78
78
79 # verify if we have the OID before, if we do, reply with empty
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
81 log.debug('LFS: store already has oid %s', store.oid)
82
82
83 # validate size
83 # validate size
84 store_size = store.size_oid()
84 store_size = store.size_oid()
85 size_match = store_size == self.obj_size
85 size_match = store_size == self.obj_size
86 if not size_match:
86 if not size_match:
87 log.warning(
87 log.warning(
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 self.oid, store_size, self.obj_size)
89 self.oid, store_size, self.obj_size)
90 elif skip_existing:
90 elif skip_existing:
91 log.debug('LFS: skipping further action as oid is existing')
91 log.debug('LFS: skipping further action as oid is existing')
92 return response, has_errors
92 return response, has_errors
93
93
94 chunked = ("Transfer-Encoding", "chunked")
94 chunked = ("Transfer-Encoding", "chunked")
95 upload_action = OrderedDict(
95 upload_action = OrderedDict(
96 href=self.obj_href,
96 href=self.obj_href,
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 )
98 )
99 if not has_errors:
99 if not has_errors:
100 response = OrderedDict(upload=upload_action)
100 response = OrderedDict(upload=upload_action)
101 # if specified in handler, return the verification endpoint
101 # if specified in handler, return the verification endpoint
102 if self.obj_verify_href:
102 if self.obj_verify_href:
103 verify_action = OrderedDict(
103 verify_action = OrderedDict(
104 href=self.obj_verify_href,
104 href=self.obj_verify_href,
105 header=OrderedDict([("Authorization", self.get_auth())])
105 header=OrderedDict([("Authorization", self.get_auth())])
106 )
106 )
107 response['verify'] = verify_action
107 response['verify'] = verify_action
108 return response, has_errors
108 return response, has_errors
109
109
110 def exec_operation(self, operation, *args, **kwargs):
110 def exec_operation(self, operation, *args, **kwargs):
111 handler = getattr(self, operation)
111 handler = getattr(self, operation)
112 log.debug('LFS: handling request using %s handler', handler)
112 log.debug('LFS: handling request using %s handler', handler)
113 return handler(*args, **kwargs)
113 return handler(*args, **kwargs)
114
114
115
115
116 class LFSOidStore(object):
116 class LFSOidStore(object):
117
117
118 def __init__(self, oid, repo, store_location=None):
118 def __init__(self, oid, repo, store_location=None):
119 self.oid = oid
119 self.oid = oid
120 self.repo = repo
120 self.repo = repo
121 self.store_path = store_location or self.get_default_store()
121 self.store_path = store_location or self.get_default_store()
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 self.oid_path = os.path.join(self.store_path, oid)
123 self.oid_path = os.path.join(self.store_path, oid)
124 self.fd = None
124 self.fd = None
125
125
126 def get_engine(self, mode):
126 def get_engine(self, mode):
127 """
127 """
128 engine = .get_engine(mode='wb')
128 engine = .get_engine(mode='wb')
129 with engine as f:
129 with engine as f:
130 f.write('...')
130 f.write('...')
131 """
131 """
132
132
133 class StoreEngine(object):
133 class StoreEngine(object):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 self.mode = mode
135 self.mode = mode
136 self.store_path = store_path
136 self.store_path = store_path
137 self.oid_path = oid_path
137 self.oid_path = oid_path
138 self.tmp_oid_path = tmp_oid_path
138 self.tmp_oid_path = tmp_oid_path
139
139
140 def __enter__(self):
140 def __enter__(self):
141 if not os.path.isdir(self.store_path):
141 if not os.path.isdir(self.store_path):
142 os.makedirs(self.store_path)
142 os.makedirs(self.store_path)
143
143
144 # TODO(marcink): maybe write metadata here with size/oid ?
144 # TODO(marcink): maybe write metadata here with size/oid ?
145 fd = open(self.tmp_oid_path, self.mode)
145 fd = open(self.tmp_oid_path, self.mode)
146 self.fd = fd
146 self.fd = fd
147 return fd
147 return fd
148
148
149 def __exit__(self, exc_type, exc_value, traceback):
149 def __exit__(self, exc_type, exc_value, traceback):
150 # close tmp file, and rename to final destination
150 # close tmp file, and rename to final destination
151 self.fd.close()
151 self.fd.close()
152 shutil.move(self.tmp_oid_path, self.oid_path)
152 shutil.move(self.tmp_oid_path, self.oid_path)
153
153
154 return StoreEngine(
154 return StoreEngine(
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156
156
157 def get_default_store(self):
157 def get_default_store(self):
158 """
158 """
159 Default store, consistent with defaults of Mercurial large files store
159 Default store, consistent with defaults of Mercurial large files store
160 which is /home/username/.cache/largefiles
160 which is /home/username/.cache/largefiles
161 """
161 """
162 user_home = os.path.expanduser("~")
162 user_home = os.path.expanduser("~")
163 return os.path.join(user_home, '.cache', 'lfs-store')
163 return os.path.join(user_home, '.cache', 'lfs-store')
164
164
165 def has_oid(self):
165 def has_oid(self):
166 return os.path.exists(os.path.join(self.store_path, self.oid))
166 return os.path.exists(os.path.join(self.store_path, self.oid))
167
167
168 def size_oid(self):
168 def size_oid(self):
169 size = -1
169 size = -1
170
170
171 if self.has_oid():
171 if self.has_oid():
172 oid = os.path.join(self.store_path, self.oid)
172 oid = os.path.join(self.store_path, self.oid)
173 size = os.stat(oid).st_size
173 size = os.stat(oid).st_size
174
174
175 return size
175 return size
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,272 +1,272 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21 import simplejson as json
21 import simplejson as json
22
22
23 from vcsserver.git_lfs.app import create_app
23 from vcsserver.git_lfs.app import create_app
24
24
25
25
26 @pytest.fixture(scope='function')
26 @pytest.fixture(scope='function')
27 def git_lfs_app(tmpdir):
27 def git_lfs_app(tmpdir):
28 custom_app = WebObTestApp(create_app(
28 custom_app = WebObTestApp(create_app(
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 git_lfs_http_scheme='http'))
30 git_lfs_http_scheme='http'))
31 custom_app._store = str(tmpdir)
31 custom_app._store = str(tmpdir)
32 return custom_app
32 return custom_app
33
33
34
34
35 @pytest.fixture(scope='function')
35 @pytest.fixture(scope='function')
36 def git_lfs_https_app(tmpdir):
36 def git_lfs_https_app(tmpdir):
37 custom_app = WebObTestApp(create_app(
37 custom_app = WebObTestApp(create_app(
38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 git_lfs_http_scheme='https'))
39 git_lfs_http_scheme='https'))
40 custom_app._store = str(tmpdir)
40 custom_app._store = str(tmpdir)
41 return custom_app
41 return custom_app
42
42
43
43
44 @pytest.fixture()
44 @pytest.fixture()
45 def http_auth():
45 def http_auth():
46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
47
47
48
48
49 class TestLFSApplication(object):
49 class TestLFSApplication(object):
50
50
51 def test_app_wrong_path(self, git_lfs_app):
51 def test_app_wrong_path(self, git_lfs_app):
52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
53
53
54 def test_app_deprecated_endpoint(self, git_lfs_app):
54 def test_app_deprecated_endpoint(self, git_lfs_app):
55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 assert response.status_code == 501
56 assert response.status_code == 501
57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
58
58
59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 assert response.status_code == 501
61 assert response.status_code == 501
62 assert json.loads(response.text) == {
62 assert json.loads(response.text) == {
63 u'message': u'GIT LFS locking api not supported'}
63 u'message': u'GIT LFS locking api not supported'}
64
64
65 def test_app_lock_api_not_available(self, git_lfs_app):
65 def test_app_lock_api_not_available(self, git_lfs_app):
66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 assert response.status_code == 501
67 assert response.status_code == 501
68 assert json.loads(response.text) == {
68 assert json.loads(response.text) == {
69 u'message': u'GIT LFS locking api not supported'}
69 u'message': u'GIT LFS locking api not supported'}
70
70
71 def test_app_batch_api_missing_auth(self, git_lfs_app):
71 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 git_lfs_app.post_json(
72 git_lfs_app.post_json(
73 '/repo/info/lfs/objects/batch', params={}, status=403)
73 '/repo/info/lfs/objects/batch', params={}, status=403)
74
74
75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
76 response = git_lfs_app.post_json(
76 response = git_lfs_app.post_json(
77 '/repo/info/lfs/objects/batch', params={}, status=400,
77 '/repo/info/lfs/objects/batch', params={}, status=400,
78 extra_environ=http_auth)
78 extra_environ=http_auth)
79 assert json.loads(response.text) == {
79 assert json.loads(response.text) == {
80 u'message': u'unsupported operation mode: `None`'}
80 u'message': u'unsupported operation mode: `None`'}
81
81
82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 response = git_lfs_app.post_json(
83 response = git_lfs_app.post_json(
84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 status=400, extra_environ=http_auth)
85 status=400, extra_environ=http_auth)
86 assert json.loads(response.text) == {
86 assert json.loads(response.text) == {
87 u'message': u'missing objects data'}
87 u'message': u'missing objects data'}
88
88
89 def test_app_batch_api_unsupported_data_in_objects(
89 def test_app_batch_api_unsupported_data_in_objects(
90 self, git_lfs_app, http_auth):
90 self, git_lfs_app, http_auth):
91 params = {'operation': 'download',
91 params = {'operation': 'download',
92 'objects': [{}]}
92 'objects': [{}]}
93 response = git_lfs_app.post_json(
93 response = git_lfs_app.post_json(
94 '/repo/info/lfs/objects/batch', params=params, status=400,
94 '/repo/info/lfs/objects/batch', params=params, status=400,
95 extra_environ=http_auth)
95 extra_environ=http_auth)
96 assert json.loads(response.text) == {
96 assert json.loads(response.text) == {
97 u'message': u'unsupported data in objects'}
97 u'message': u'unsupported data in objects'}
98
98
99 def test_app_batch_api_download_missing_object(
99 def test_app_batch_api_download_missing_object(
100 self, git_lfs_app, http_auth):
100 self, git_lfs_app, http_auth):
101 params = {'operation': 'download',
101 params = {'operation': 'download',
102 'objects': [{'oid': '123', 'size': '1024'}]}
102 'objects': [{'oid': '123', 'size': '1024'}]}
103 response = git_lfs_app.post_json(
103 response = git_lfs_app.post_json(
104 '/repo/info/lfs/objects/batch', params=params,
104 '/repo/info/lfs/objects/batch', params=params,
105 extra_environ=http_auth)
105 extra_environ=http_auth)
106
106
107 expected_objects = [
107 expected_objects = [
108 {u'authenticated': True,
108 {u'authenticated': True,
109 u'errors': {u'error': {
109 u'errors': {u'error': {
110 u'code': 404,
110 u'code': 404,
111 u'message': u'object: 123 does not exist in store'}},
111 u'message': u'object: 123 does not exist in store'}},
112 u'oid': u'123',
112 u'oid': u'123',
113 u'size': u'1024'}
113 u'size': u'1024'}
114 ]
114 ]
115 assert json.loads(response.text) == {
115 assert json.loads(response.text) == {
116 'objects': expected_objects, 'transfer': 'basic'}
116 'objects': expected_objects, 'transfer': 'basic'}
117
117
118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
119 oid = '456'
119 oid = '456'
120 oid_path = os.path.join(git_lfs_app._store, oid)
120 oid_path = os.path.join(git_lfs_app._store, oid)
121 if not os.path.isdir(os.path.dirname(oid_path)):
121 if not os.path.isdir(os.path.dirname(oid_path)):
122 os.makedirs(os.path.dirname(oid_path))
122 os.makedirs(os.path.dirname(oid_path))
123 with open(oid_path, 'wb') as f:
123 with open(oid_path, 'wb') as f:
124 f.write('OID_CONTENT')
124 f.write('OID_CONTENT')
125
125
126 params = {'operation': 'download',
126 params = {'operation': 'download',
127 'objects': [{'oid': oid, 'size': '1024'}]}
127 'objects': [{'oid': oid, 'size': '1024'}]}
128 response = git_lfs_app.post_json(
128 response = git_lfs_app.post_json(
129 '/repo/info/lfs/objects/batch', params=params,
129 '/repo/info/lfs/objects/batch', params=params,
130 extra_environ=http_auth)
130 extra_environ=http_auth)
131
131
132 expected_objects = [
132 expected_objects = [
133 {u'authenticated': True,
133 {u'authenticated': True,
134 u'actions': {
134 u'actions': {
135 u'download': {
135 u'download': {
136 u'header': {u'Authorization': u'Basic XXXXX'},
136 u'header': {u'Authorization': u'Basic XXXXX'},
137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
138 },
138 },
139 u'oid': u'456',
139 u'oid': u'456',
140 u'size': u'1024'}
140 u'size': u'1024'}
141 ]
141 ]
142 assert json.loads(response.text) == {
142 assert json.loads(response.text) == {
143 'objects': expected_objects, 'transfer': 'basic'}
143 'objects': expected_objects, 'transfer': 'basic'}
144
144
145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
146 params = {'operation': 'upload',
146 params = {'operation': 'upload',
147 'objects': [{'oid': '123', 'size': '1024'}]}
147 'objects': [{'oid': '123', 'size': '1024'}]}
148 response = git_lfs_app.post_json(
148 response = git_lfs_app.post_json(
149 '/repo/info/lfs/objects/batch', params=params,
149 '/repo/info/lfs/objects/batch', params=params,
150 extra_environ=http_auth)
150 extra_environ=http_auth)
151 expected_objects = [
151 expected_objects = [
152 {u'authenticated': True,
152 {u'authenticated': True,
153 u'actions': {
153 u'actions': {
154 u'upload': {
154 u'upload': {
155 u'header': {u'Authorization': u'Basic XXXXX',
155 u'header': {u'Authorization': u'Basic XXXXX',
156 u'Transfer-Encoding': u'chunked'},
156 u'Transfer-Encoding': u'chunked'},
157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
158 u'verify': {
158 u'verify': {
159 u'header': {u'Authorization': u'Basic XXXXX'},
159 u'header': {u'Authorization': u'Basic XXXXX'},
160 u'href': u'http://localhost/repo/info/lfs/verify'}
160 u'href': u'http://localhost/repo/info/lfs/verify'}
161 },
161 },
162 u'oid': u'123',
162 u'oid': u'123',
163 u'size': u'1024'}
163 u'size': u'1024'}
164 ]
164 ]
165 assert json.loads(response.text) == {
165 assert json.loads(response.text) == {
166 'objects': expected_objects, 'transfer': 'basic'}
166 'objects': expected_objects, 'transfer': 'basic'}
167
167
168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 params = {'operation': 'upload',
169 params = {'operation': 'upload',
170 'objects': [{'oid': '123', 'size': '1024'}]}
170 'objects': [{'oid': '123', 'size': '1024'}]}
171 response = git_lfs_https_app.post_json(
171 response = git_lfs_https_app.post_json(
172 '/repo/info/lfs/objects/batch', params=params,
172 '/repo/info/lfs/objects/batch', params=params,
173 extra_environ=http_auth)
173 extra_environ=http_auth)
174 expected_objects = [
174 expected_objects = [
175 {u'authenticated': True,
175 {u'authenticated': True,
176 u'actions': {
176 u'actions': {
177 u'upload': {
177 u'upload': {
178 u'header': {u'Authorization': u'Basic XXXXX',
178 u'header': {u'Authorization': u'Basic XXXXX',
179 u'Transfer-Encoding': u'chunked'},
179 u'Transfer-Encoding': u'chunked'},
180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 u'verify': {
181 u'verify': {
182 u'header': {u'Authorization': u'Basic XXXXX'},
182 u'header': {u'Authorization': u'Basic XXXXX'},
183 u'href': u'https://localhost/repo/info/lfs/verify'}
183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 },
184 },
185 u'oid': u'123',
185 u'oid': u'123',
186 u'size': u'1024'}
186 u'size': u'1024'}
187 ]
187 ]
188 assert json.loads(response.text) == {
188 assert json.loads(response.text) == {
189 'objects': expected_objects, 'transfer': 'basic'}
189 'objects': expected_objects, 'transfer': 'basic'}
190
190
191 def test_app_verify_api_missing_data(self, git_lfs_app):
191 def test_app_verify_api_missing_data(self, git_lfs_app):
192 params = {'oid': 'missing'}
192 params = {'oid': 'missing'}
193 response = git_lfs_app.post_json(
193 response = git_lfs_app.post_json(
194 '/repo/info/lfs/verify', params=params,
194 '/repo/info/lfs/verify', params=params,
195 status=400)
195 status=400)
196
196
197 assert json.loads(response.text) == {
197 assert json.loads(response.text) == {
198 u'message': u'missing oid and size in request data'}
198 u'message': u'missing oid and size in request data'}
199
199
200 def test_app_verify_api_missing_obj(self, git_lfs_app):
200 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 params = {'oid': 'missing', 'size': '1024'}
201 params = {'oid': 'missing', 'size': '1024'}
202 response = git_lfs_app.post_json(
202 response = git_lfs_app.post_json(
203 '/repo/info/lfs/verify', params=params,
203 '/repo/info/lfs/verify', params=params,
204 status=404)
204 status=404)
205
205
206 assert json.loads(response.text) == {
206 assert json.loads(response.text) == {
207 u'message': u'oid `missing` does not exists in store'}
207 u'message': u'oid `missing` does not exists in store'}
208
208
209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 oid = 'existing'
210 oid = 'existing'
211 oid_path = os.path.join(git_lfs_app._store, oid)
211 oid_path = os.path.join(git_lfs_app._store, oid)
212 if not os.path.isdir(os.path.dirname(oid_path)):
212 if not os.path.isdir(os.path.dirname(oid_path)):
213 os.makedirs(os.path.dirname(oid_path))
213 os.makedirs(os.path.dirname(oid_path))
214 with open(oid_path, 'wb') as f:
214 with open(oid_path, 'wb') as f:
215 f.write('OID_CONTENT')
215 f.write('OID_CONTENT')
216
216
217 params = {'oid': oid, 'size': '1024'}
217 params = {'oid': oid, 'size': '1024'}
218 response = git_lfs_app.post_json(
218 response = git_lfs_app.post_json(
219 '/repo/info/lfs/verify', params=params, status=422)
219 '/repo/info/lfs/verify', params=params, status=422)
220
220
221 assert json.loads(response.text) == {
221 assert json.loads(response.text) == {
222 u'message': u'requested file size mismatch '
222 u'message': u'requested file size mismatch '
223 u'store size:11 requested:1024'}
223 u'store size:11 requested:1024'}
224
224
225 def test_app_verify_api(self, git_lfs_app):
225 def test_app_verify_api(self, git_lfs_app):
226 oid = 'existing'
226 oid = 'existing'
227 oid_path = os.path.join(git_lfs_app._store, oid)
227 oid_path = os.path.join(git_lfs_app._store, oid)
228 if not os.path.isdir(os.path.dirname(oid_path)):
228 if not os.path.isdir(os.path.dirname(oid_path)):
229 os.makedirs(os.path.dirname(oid_path))
229 os.makedirs(os.path.dirname(oid_path))
230 with open(oid_path, 'wb') as f:
230 with open(oid_path, 'wb') as f:
231 f.write('OID_CONTENT')
231 f.write('OID_CONTENT')
232
232
233 params = {'oid': oid, 'size': 11}
233 params = {'oid': oid, 'size': 11}
234 response = git_lfs_app.post_json(
234 response = git_lfs_app.post_json(
235 '/repo/info/lfs/verify', params=params)
235 '/repo/info/lfs/verify', params=params)
236
236
237 assert json.loads(response.text) == {
237 assert json.loads(response.text) == {
238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
239
239
240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 oid = 'missing'
241 oid = 'missing'
242
242
243 response = git_lfs_app.get(
243 response = git_lfs_app.get(
244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245
245
246 assert json.loads(response.text) == {
246 assert json.loads(response.text) == {
247 u'message': u'requested file with oid `missing` not found in store'}
247 u'message': u'requested file with oid `missing` not found in store'}
248
248
249 def test_app_download_api(self, git_lfs_app):
249 def test_app_download_api(self, git_lfs_app):
250 oid = 'existing'
250 oid = 'existing'
251 oid_path = os.path.join(git_lfs_app._store, oid)
251 oid_path = os.path.join(git_lfs_app._store, oid)
252 if not os.path.isdir(os.path.dirname(oid_path)):
252 if not os.path.isdir(os.path.dirname(oid_path)):
253 os.makedirs(os.path.dirname(oid_path))
253 os.makedirs(os.path.dirname(oid_path))
254 with open(oid_path, 'wb') as f:
254 with open(oid_path, 'wb') as f:
255 f.write('OID_CONTENT')
255 f.write('OID_CONTENT')
256
256
257 response = git_lfs_app.get(
257 response = git_lfs_app.get(
258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
259 assert response
259 assert response
260
260
261 def test_app_upload(self, git_lfs_app):
261 def test_app_upload(self, git_lfs_app):
262 oid = 'uploaded'
262 oid = 'uploaded'
263
263
264 response = git_lfs_app.put(
264 response = git_lfs_app.put(
265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266
266
267 assert json.loads(response.text) == {u'upload': u'ok'}
267 assert json.loads(response.text) == {u'upload': u'ok'}
268
268
269 # verify that we actually wrote that OID
269 # verify that we actually wrote that OID
270 oid_path = os.path.join(git_lfs_app._store, oid)
270 oid_path = os.path.join(git_lfs_app._store, oid)
271 assert os.path.isfile(oid_path)
271 assert os.path.isfile(oid_path)
272 assert 'CONTENT' == open(oid_path).read()
272 assert 'CONTENT' == open(oid_path).read()
@@ -1,141 +1,141 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21
21
22
22
23 @pytest.fixture()
23 @pytest.fixture()
24 def lfs_store(tmpdir):
24 def lfs_store(tmpdir):
25 repo = 'test'
25 repo = 'test'
26 oid = '123456789'
26 oid = '123456789'
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 return store
28 return store
29
29
30
30
31 @pytest.fixture()
31 @pytest.fixture()
32 def oid_handler(lfs_store):
32 def oid_handler(lfs_store):
33 store = lfs_store
33 store = lfs_store
34 repo = store.repo
34 repo = store.repo
35 oid = store.oid
35 oid = store.oid
36
36
37 oid_handler = OidHandler(
37 oid_handler = OidHandler(
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 oid=oid,
39 oid=oid,
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_verify_href='http://localhost/verify')
41 obj_verify_href='http://localhost/verify')
42 return oid_handler
42 return oid_handler
43
43
44
44
45 class TestOidHandler(object):
45 class TestOidHandler(object):
46
46
47 @pytest.mark.parametrize('exec_action', [
47 @pytest.mark.parametrize('exec_action', [
48 'download',
48 'download',
49 'upload',
49 'upload',
50 ])
50 ])
51 def test_exec_action(self, exec_action, oid_handler):
51 def test_exec_action(self, exec_action, oid_handler):
52 handler = oid_handler.exec_operation(exec_action)
52 handler = oid_handler.exec_operation(exec_action)
53 assert handler
53 assert handler
54
54
55 def test_exec_action_undefined(self, oid_handler):
55 def test_exec_action_undefined(self, oid_handler):
56 with pytest.raises(AttributeError):
56 with pytest.raises(AttributeError):
57 oid_handler.exec_operation('wrong')
57 oid_handler.exec_operation('wrong')
58
58
59 def test_download_oid_not_existing(self, oid_handler):
59 def test_download_oid_not_existing(self, oid_handler):
60 response, has_errors = oid_handler.exec_operation('download')
60 response, has_errors = oid_handler.exec_operation('download')
61
61
62 assert response is None
62 assert response is None
63 assert has_errors['error'] == {
63 assert has_errors['error'] == {
64 'code': 404,
64 'code': 404,
65 'message': 'object: 123456789 does not exist in store'}
65 'message': 'object: 123456789 does not exist in store'}
66
66
67 def test_download_oid(self, oid_handler):
67 def test_download_oid(self, oid_handler):
68 store = oid_handler.get_store()
68 store = oid_handler.get_store()
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 os.makedirs(os.path.dirname(store.oid_path))
70 os.makedirs(os.path.dirname(store.oid_path))
71
71
72 with open(store.oid_path, 'wb') as f:
72 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
73 f.write('CONTENT')
74
74
75 response, has_errors = oid_handler.exec_operation('download')
75 response, has_errors = oid_handler.exec_operation('download')
76
76
77 assert has_errors is None
77 assert has_errors is None
78 assert response['download'] == {
78 assert response['download'] == {
79 'header': {'Authorization': 'basic xxxx'},
79 'header': {'Authorization': 'basic xxxx'},
80 'href': 'http://localhost/handle_oid'
80 'href': 'http://localhost/handle_oid'
81 }
81 }
82
82
83 def test_upload_oid_that_exists(self, oid_handler):
83 def test_upload_oid_that_exists(self, oid_handler):
84 store = oid_handler.get_store()
84 store = oid_handler.get_store()
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 os.makedirs(os.path.dirname(store.oid_path))
86 os.makedirs(os.path.dirname(store.oid_path))
87
87
88 with open(store.oid_path, 'wb') as f:
88 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
89 f.write('CONTENT')
90 oid_handler.obj_size = 7
90 oid_handler.obj_size = 7
91 response, has_errors = oid_handler.exec_operation('upload')
91 response, has_errors = oid_handler.exec_operation('upload')
92 assert has_errors is None
92 assert has_errors is None
93 assert response is None
93 assert response is None
94
94
95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 store = oid_handler.get_store()
96 store = oid_handler.get_store()
97 if not os.path.isdir(os.path.dirname(store.oid_path)):
97 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 os.makedirs(os.path.dirname(store.oid_path))
98 os.makedirs(os.path.dirname(store.oid_path))
99
99
100 with open(store.oid_path, 'wb') as f:
100 with open(store.oid_path, 'wb') as f:
101 f.write('CONTENT')
101 f.write('CONTENT')
102
102
103 oid_handler.obj_size = 10240
103 oid_handler.obj_size = 10240
104 response, has_errors = oid_handler.exec_operation('upload')
104 response, has_errors = oid_handler.exec_operation('upload')
105 assert has_errors is None
105 assert has_errors is None
106 assert response['upload'] == {
106 assert response['upload'] == {
107 'header': {'Authorization': 'basic xxxx',
107 'header': {'Authorization': 'basic xxxx',
108 'Transfer-Encoding': 'chunked'},
108 'Transfer-Encoding': 'chunked'},
109 'href': 'http://localhost/handle_oid',
109 'href': 'http://localhost/handle_oid',
110 }
110 }
111
111
112 def test_upload_oid(self, oid_handler):
112 def test_upload_oid(self, oid_handler):
113 response, has_errors = oid_handler.exec_operation('upload')
113 response, has_errors = oid_handler.exec_operation('upload')
114 assert has_errors is None
114 assert has_errors is None
115 assert response['upload'] == {
115 assert response['upload'] == {
116 'header': {'Authorization': 'basic xxxx',
116 'header': {'Authorization': 'basic xxxx',
117 'Transfer-Encoding': 'chunked'},
117 'Transfer-Encoding': 'chunked'},
118 'href': 'http://localhost/handle_oid'
118 'href': 'http://localhost/handle_oid'
119 }
119 }
120
120
121
121
122 class TestLFSStore(object):
122 class TestLFSStore(object):
123 def test_write_oid(self, lfs_store):
123 def test_write_oid(self, lfs_store):
124 oid_location = lfs_store.oid_path
124 oid_location = lfs_store.oid_path
125
125
126 assert not os.path.isfile(oid_location)
126 assert not os.path.isfile(oid_location)
127
127
128 engine = lfs_store.get_engine(mode='wb')
128 engine = lfs_store.get_engine(mode='wb')
129 with engine as f:
129 with engine as f:
130 f.write('CONTENT')
130 f.write('CONTENT')
131
131
132 assert os.path.isfile(oid_location)
132 assert os.path.isfile(oid_location)
133
133
134 def test_detect_has_oid(self, lfs_store):
134 def test_detect_has_oid(self, lfs_store):
135
135
136 assert lfs_store.has_oid() is False
136 assert lfs_store.has_oid() is False
137 engine = lfs_store.get_engine(mode='wb')
137 engine = lfs_store.get_engine(mode='wb')
138 with engine as f:
138 with engine as f:
139 f.write('CONTENT')
139 f.write('CONTENT')
140
140
141 assert lfs_store.has_oid() is True No newline at end of file
141 assert lfs_store.has_oid() is True
@@ -1,50 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
17 import copy
18 from functools import wraps
18 from functools import wraps
19
19
20
20
21 def get_cython_compat_decorator(wrapper, func):
21 def get_cython_compat_decorator(wrapper, func):
22 """
22 """
23 Creates a cython compatible decorator. The previously used
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
24 decorator.decorator() function seems to be incompatible with cython.
25
25
26 :param wrapper: __wrapper method of the decorator class
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
27 :param func: decorated function
28 """
28 """
29 @wraps(func)
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
33 return local_wrapper
34
34
35
35
36 def safe_result(result):
36 def safe_result(result):
37 """clean result for better representation in logs"""
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
38 clean_copy = copy.deepcopy(result)
39
39
40 try:
40 try:
41 if 'objects' in clean_copy:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
47 except Exception:
48 return result
48 return result
49
49
50 return clean_copy
50 return clean_copy
@@ -1,1009 +1,1009 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30 from mercurial import repair
31
31
32 import vcsserver
32 import vcsserver
33 from vcsserver import exceptions
33 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError)
40 RepoLookupError, InterventionRequired, RequirementError)
41 from vcsserver.vcs_base import RemoteBase
41 from vcsserver.vcs_base import RemoteBase
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def make_ui_from_config(repo_config):
46 def make_ui_from_config(repo_config):
47
47
48 class LoggingUI(ui.ui):
48 class LoggingUI(ui.ui):
49 def status(self, *msg, **opts):
49 def status(self, *msg, **opts):
50 log.info(' '.join(msg).rstrip('\n'))
50 log.info(' '.join(msg).rstrip('\n'))
51 super(LoggingUI, self).status(*msg, **opts)
51 super(LoggingUI, self).status(*msg, **opts)
52
52
53 def warn(self, *msg, **opts):
53 def warn(self, *msg, **opts):
54 log.warn(' '.join(msg).rstrip('\n'))
54 log.warn(' '.join(msg).rstrip('\n'))
55 super(LoggingUI, self).warn(*msg, **opts)
55 super(LoggingUI, self).warn(*msg, **opts)
56
56
57 def error(self, *msg, **opts):
57 def error(self, *msg, **opts):
58 log.error(' '.join(msg).rstrip('\n'))
58 log.error(' '.join(msg).rstrip('\n'))
59 super(LoggingUI, self).error(*msg, **opts)
59 super(LoggingUI, self).error(*msg, **opts)
60
60
61 def note(self, *msg, **opts):
61 def note(self, *msg, **opts):
62 log.info(' '.join(msg).rstrip('\n'))
62 log.info(' '.join(msg).rstrip('\n'))
63 super(LoggingUI, self).note(*msg, **opts)
63 super(LoggingUI, self).note(*msg, **opts)
64
64
65 def debug(self, *msg, **opts):
65 def debug(self, *msg, **opts):
66 log.debug(' '.join(msg).rstrip('\n'))
66 log.debug(' '.join(msg).rstrip('\n'))
67 super(LoggingUI, self).debug(*msg, **opts)
67 super(LoggingUI, self).debug(*msg, **opts)
68
68
69 baseui = LoggingUI()
69 baseui = LoggingUI()
70
70
71 # clean the baseui object
71 # clean the baseui object
72 baseui._ocfg = hgconfig.config()
72 baseui._ocfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
75
75
76 for section, option, value in repo_config:
76 for section, option, value in repo_config:
77 baseui.setconfig(section, option, value)
77 baseui.setconfig(section, option, value)
78
78
79 # make our hgweb quiet so it doesn't print output
79 # make our hgweb quiet so it doesn't print output
80 baseui.setconfig('ui', 'quiet', 'true')
80 baseui.setconfig('ui', 'quiet', 'true')
81
81
82 baseui.setconfig('ui', 'paginate', 'never')
82 baseui.setconfig('ui', 'paginate', 'never')
83 # for better Error reporting of Mercurial
83 # for better Error reporting of Mercurial
84 baseui.setconfig('ui', 'message-output', 'stderr')
84 baseui.setconfig('ui', 'message-output', 'stderr')
85
85
86 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 # signal in a non-main thread, thus generating a ValueError.
87 # signal in a non-main thread, thus generating a ValueError.
88 baseui.setconfig('worker', 'numcpus', 1)
88 baseui.setconfig('worker', 'numcpus', 1)
89
89
90 # If there is no config for the largefiles extension, we explicitly disable
90 # If there is no config for the largefiles extension, we explicitly disable
91 # it here. This overrides settings from repositories hgrc file. Recent
91 # it here. This overrides settings from repositories hgrc file. Recent
92 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 # repo.
93 # repo.
94 if not baseui.hasconfig('extensions', 'largefiles'):
94 if not baseui.hasconfig('extensions', 'largefiles'):
95 log.debug('Explicitly disable largefiles extension for repo.')
95 log.debug('Explicitly disable largefiles extension for repo.')
96 baseui.setconfig('extensions', 'largefiles', '!')
96 baseui.setconfig('extensions', 'largefiles', '!')
97
97
98 return baseui
98 return baseui
99
99
100
100
101 def reraise_safe_exceptions(func):
101 def reraise_safe_exceptions(func):
102 """Decorator for converting mercurial exceptions to something neutral."""
102 """Decorator for converting mercurial exceptions to something neutral."""
103
103
104 def wrapper(*args, **kwargs):
104 def wrapper(*args, **kwargs):
105 try:
105 try:
106 return func(*args, **kwargs)
106 return func(*args, **kwargs)
107 except (Abort, InterventionRequired) as e:
107 except (Abort, InterventionRequired) as e:
108 raise_from_original(exceptions.AbortException(e))
108 raise_from_original(exceptions.AbortException(e))
109 except RepoLookupError as e:
109 except RepoLookupError as e:
110 raise_from_original(exceptions.LookupException(e))
110 raise_from_original(exceptions.LookupException(e))
111 except RequirementError as e:
111 except RequirementError as e:
112 raise_from_original(exceptions.RequirementException(e))
112 raise_from_original(exceptions.RequirementException(e))
113 except RepoError as e:
113 except RepoError as e:
114 raise_from_original(exceptions.VcsException(e))
114 raise_from_original(exceptions.VcsException(e))
115 except LookupError as e:
115 except LookupError as e:
116 raise_from_original(exceptions.LookupException(e))
116 raise_from_original(exceptions.LookupException(e))
117 except Exception as e:
117 except Exception as e:
118 if not hasattr(e, '_vcs_kind'):
118 if not hasattr(e, '_vcs_kind'):
119 log.exception("Unhandled exception in hg remote call")
119 log.exception("Unhandled exception in hg remote call")
120 raise_from_original(exceptions.UnhandledException(e))
120 raise_from_original(exceptions.UnhandledException(e))
121
121
122 raise
122 raise
123 return wrapper
123 return wrapper
124
124
125
125
126 class MercurialFactory(RepoFactory):
126 class MercurialFactory(RepoFactory):
127 repo_type = 'hg'
127 repo_type = 'hg'
128
128
129 def _create_config(self, config, hooks=True):
129 def _create_config(self, config, hooks=True):
130 if not hooks:
130 if not hooks:
131 hooks_to_clean = frozenset((
131 hooks_to_clean = frozenset((
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 new_config = []
134 new_config = []
135 for section, option, value in config:
135 for section, option, value in config:
136 if section == 'hooks' and option in hooks_to_clean:
136 if section == 'hooks' and option in hooks_to_clean:
137 continue
137 continue
138 new_config.append((section, option, value))
138 new_config.append((section, option, value))
139 config = new_config
139 config = new_config
140
140
141 baseui = make_ui_from_config(config)
141 baseui = make_ui_from_config(config)
142 return baseui
142 return baseui
143
143
144 def _create_repo(self, wire, create):
144 def _create_repo(self, wire, create):
145 baseui = self._create_config(wire["config"])
145 baseui = self._create_config(wire["config"])
146 return instance(baseui, wire["path"], create)
146 return instance(baseui, wire["path"], create)
147
147
148 def repo(self, wire, create=False):
148 def repo(self, wire, create=False):
149 """
149 """
150 Get a repository instance for the given path.
150 Get a repository instance for the given path.
151 """
151 """
152 return self._create_repo(wire, create)
152 return self._create_repo(wire, create)
153
153
154
154
155 def patch_ui_message_output(baseui):
155 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
156 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
157 output = io.BytesIO()
158
158
159 def write(data, **unused_kwargs):
159 def write(data, **unused_kwargs):
160 output.write(data)
160 output.write(data)
161
161
162 baseui.status = write
162 baseui.status = write
163 baseui.write = write
163 baseui.write = write
164 baseui.warn = write
164 baseui.warn = write
165 baseui.debug = write
165 baseui.debug = write
166
166
167 return baseui, output
167 return baseui, output
168
168
169
169
170 class HgRemote(RemoteBase):
170 class HgRemote(RemoteBase):
171
171
172 def __init__(self, factory):
172 def __init__(self, factory):
173 self._factory = factory
173 self._factory = factory
174 self._bulk_methods = {
174 self._bulk_methods = {
175 "affected_files": self.ctx_files,
175 "affected_files": self.ctx_files,
176 "author": self.ctx_user,
176 "author": self.ctx_user,
177 "branch": self.ctx_branch,
177 "branch": self.ctx_branch,
178 "children": self.ctx_children,
178 "children": self.ctx_children,
179 "date": self.ctx_date,
179 "date": self.ctx_date,
180 "message": self.ctx_description,
180 "message": self.ctx_description,
181 "parents": self.ctx_parents,
181 "parents": self.ctx_parents,
182 "status": self.ctx_status,
182 "status": self.ctx_status,
183 "obsolete": self.ctx_obsolete,
183 "obsolete": self.ctx_obsolete,
184 "phase": self.ctx_phase,
184 "phase": self.ctx_phase,
185 "hidden": self.ctx_hidden,
185 "hidden": self.ctx_hidden,
186 "_file_paths": self.ctx_list,
186 "_file_paths": self.ctx_list,
187 }
187 }
188
188
189 def _get_ctx(self, repo, ref):
189 def _get_ctx(self, repo, ref):
190 return get_ctx(repo, ref)
190 return get_ctx(repo, ref)
191
191
192 @reraise_safe_exceptions
192 @reraise_safe_exceptions
193 def discover_hg_version(self):
193 def discover_hg_version(self):
194 from mercurial import util
194 from mercurial import util
195 return util.version()
195 return util.version()
196
196
197 @reraise_safe_exceptions
197 @reraise_safe_exceptions
198 def is_empty(self, wire):
198 def is_empty(self, wire):
199 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
200
200
201 try:
201 try:
202 return len(repo) == 0
202 return len(repo) == 0
203 except Exception:
203 except Exception:
204 log.exception("failed to read object_store")
204 log.exception("failed to read object_store")
205 return False
205 return False
206
206
207 @reraise_safe_exceptions
207 @reraise_safe_exceptions
208 def archive_repo(self, archive_path, mtime, file_info, kind):
208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 if kind == "tgz":
209 if kind == "tgz":
210 archiver = archival.tarit(archive_path, mtime, "gz")
210 archiver = archival.tarit(archive_path, mtime, "gz")
211 elif kind == "tbz2":
211 elif kind == "tbz2":
212 archiver = archival.tarit(archive_path, mtime, "bz2")
212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 elif kind == 'zip':
213 elif kind == 'zip':
214 archiver = archival.zipit(archive_path, mtime)
214 archiver = archival.zipit(archive_path, mtime)
215 else:
215 else:
216 raise exceptions.ArchiveException()(
216 raise exceptions.ArchiveException()(
217 'Remote does not support: "%s".' % kind)
217 'Remote does not support: "%s".' % kind)
218
218
219 for f_path, f_mode, f_is_link, f_content in file_info:
219 for f_path, f_mode, f_is_link, f_content in file_info:
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 archiver.done()
221 archiver.done()
222
222
223 @reraise_safe_exceptions
223 @reraise_safe_exceptions
224 def bookmarks(self, wire):
224 def bookmarks(self, wire):
225 cache_on, context_uid, repo_id = self._cache_on(wire)
225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 def _bookmarks(_context_uid, _repo_id):
227 def _bookmarks(_context_uid, _repo_id):
228 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
229 return dict(repo._bookmarks)
229 return dict(repo._bookmarks)
230
230
231 return _bookmarks(context_uid, repo_id)
231 return _bookmarks(context_uid, repo_id)
232
232
233 @reraise_safe_exceptions
233 @reraise_safe_exceptions
234 def branches(self, wire, normal, closed):
234 def branches(self, wire, normal, closed):
235 cache_on, context_uid, repo_id = self._cache_on(wire)
235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _branches(_context_uid, _repo_id, _normal, _closed):
237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 repo = self._factory.repo(wire)
238 repo = self._factory.repo(wire)
239 iter_branches = repo.branchmap().iterbranches()
239 iter_branches = repo.branchmap().iterbranches()
240 bt = {}
240 bt = {}
241 for branch_name, _heads, tip, is_closed in iter_branches:
241 for branch_name, _heads, tip, is_closed in iter_branches:
242 if normal and not is_closed:
242 if normal and not is_closed:
243 bt[branch_name] = tip
243 bt[branch_name] = tip
244 if closed and is_closed:
244 if closed and is_closed:
245 bt[branch_name] = tip
245 bt[branch_name] = tip
246
246
247 return bt
247 return bt
248
248
249 return _branches(context_uid, repo_id, normal, closed)
249 return _branches(context_uid, repo_id, normal, closed)
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def bulk_request(self, wire, commit_id, pre_load):
252 def bulk_request(self, wire, commit_id, pre_load):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 result = {}
256 result = {}
257 for attr in pre_load:
257 for attr in pre_load:
258 try:
258 try:
259 method = self._bulk_methods[attr]
259 method = self._bulk_methods[attr]
260 result[attr] = method(wire, commit_id)
260 result[attr] = method(wire, commit_id)
261 except KeyError as e:
261 except KeyError as e:
262 raise exceptions.VcsException(e)(
262 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
263 'Unknown bulk attribute: "%s"' % attr)
264 return result
264 return result
265
265
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
269 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
272 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
274 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
275 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
276 return _ctx_branch(repo_id, commit_id)
277
277
278 @reraise_safe_exceptions
278 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
279 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_date(_repo_id, _commit_id):
282 def _ctx_date(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.date()
285 return ctx.date()
286 return _ctx_date(repo_id, commit_id)
286 return _ctx_date(repo_id, commit_id)
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def ctx_description(self, wire, revision):
289 def ctx_description(self, wire, revision):
290 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
291 ctx = self._get_ctx(repo, revision)
291 ctx = self._get_ctx(repo, revision)
292 return ctx.description()
292 return ctx.description()
293
293
294 @reraise_safe_exceptions
294 @reraise_safe_exceptions
295 def ctx_files(self, wire, commit_id):
295 def ctx_files(self, wire, commit_id):
296 cache_on, context_uid, repo_id = self._cache_on(wire)
296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 def _ctx_files(_repo_id, _commit_id):
298 def _ctx_files(_repo_id, _commit_id):
299 repo = self._factory.repo(wire)
299 repo = self._factory.repo(wire)
300 ctx = self._get_ctx(repo, commit_id)
300 ctx = self._get_ctx(repo, commit_id)
301 return ctx.files()
301 return ctx.files()
302
302
303 return _ctx_files(repo_id, commit_id)
303 return _ctx_files(repo_id, commit_id)
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_list(self, path, revision):
306 def ctx_list(self, path, revision):
307 repo = self._factory.repo(path)
307 repo = self._factory.repo(path)
308 ctx = self._get_ctx(repo, revision)
308 ctx = self._get_ctx(repo, revision)
309 return list(ctx)
309 return list(ctx)
310
310
311 @reraise_safe_exceptions
311 @reraise_safe_exceptions
312 def ctx_parents(self, wire, commit_id):
312 def ctx_parents(self, wire, commit_id):
313 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 def _ctx_parents(_repo_id, _commit_id):
315 def _ctx_parents(_repo_id, _commit_id):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 ctx = self._get_ctx(repo, commit_id)
317 ctx = self._get_ctx(repo, commit_id)
318 return [parent.hex() for parent in ctx.parents()
318 return [parent.hex() for parent in ctx.parents()
319 if not (parent.hidden() or parent.obsolete())]
319 if not (parent.hidden() or parent.obsolete())]
320
320
321 return _ctx_parents(repo_id, commit_id)
321 return _ctx_parents(repo_id, commit_id)
322
322
323 @reraise_safe_exceptions
323 @reraise_safe_exceptions
324 def ctx_children(self, wire, commit_id):
324 def ctx_children(self, wire, commit_id):
325 cache_on, context_uid, repo_id = self._cache_on(wire)
325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _ctx_children(_repo_id, _commit_id):
327 def _ctx_children(_repo_id, _commit_id):
328 repo = self._factory.repo(wire)
328 repo = self._factory.repo(wire)
329 ctx = self._get_ctx(repo, commit_id)
329 ctx = self._get_ctx(repo, commit_id)
330 return [child.hex() for child in ctx.children()
330 return [child.hex() for child in ctx.children()
331 if not (child.hidden() or child.obsolete())]
331 if not (child.hidden() or child.obsolete())]
332
332
333 return _ctx_children(repo_id, commit_id)
333 return _ctx_children(repo_id, commit_id)
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def ctx_phase(self, wire, commit_id):
336 def ctx_phase(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
341 ctx = self._get_ctx(repo, commit_id)
341 ctx = self._get_ctx(repo, commit_id)
342 # public=0, draft=1, secret=3
342 # public=0, draft=1, secret=3
343 return ctx.phase()
343 return ctx.phase()
344 return _ctx_phase(context_uid, repo_id, commit_id)
344 return _ctx_phase(context_uid, repo_id, commit_id)
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def ctx_obsolete(self, wire, commit_id):
347 def ctx_obsolete(self, wire, commit_id):
348 cache_on, context_uid, repo_id = self._cache_on(wire)
348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
353 return ctx.obsolete()
353 return ctx.obsolete()
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_hidden(self, wire, commit_id):
357 def ctx_hidden(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
363 return ctx.hidden()
363 return ctx.hidden()
364 return _ctx_hidden(context_uid, repo_id, commit_id)
364 return _ctx_hidden(context_uid, repo_id, commit_id)
365
365
366 @reraise_safe_exceptions
366 @reraise_safe_exceptions
367 def ctx_substate(self, wire, revision):
367 def ctx_substate(self, wire, revision):
368 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
370 return ctx.substate
370 return ctx.substate
371
371
372 @reraise_safe_exceptions
372 @reraise_safe_exceptions
373 def ctx_status(self, wire, revision):
373 def ctx_status(self, wire, revision):
374 repo = self._factory.repo(wire)
374 repo = self._factory.repo(wire)
375 ctx = self._get_ctx(repo, revision)
375 ctx = self._get_ctx(repo, revision)
376 status = repo[ctx.p1().node()].status(other=ctx.node())
376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 # object of status (odd, custom named tuple in mercurial) is not
377 # object of status (odd, custom named tuple in mercurial) is not
378 # correctly serializable, we make it a list, as the underling
378 # correctly serializable, we make it a list, as the underling
379 # API expects this to be a list
379 # API expects this to be a list
380 return list(status)
380 return list(status)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_user(self, wire, revision):
383 def ctx_user(self, wire, revision):
384 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
385 ctx = self._get_ctx(repo, revision)
386 return ctx.user()
386 return ctx.user()
387
387
388 @reraise_safe_exceptions
388 @reraise_safe_exceptions
389 def check_url(self, url, config):
389 def check_url(self, url, config):
390 _proto = None
390 _proto = None
391 if '+' in url[:url.find('://')]:
391 if '+' in url[:url.find('://')]:
392 _proto = url[0:url.find('+')]
392 _proto = url[0:url.find('+')]
393 url = url[url.find('+') + 1:]
393 url = url[url.find('+') + 1:]
394 handlers = []
394 handlers = []
395 url_obj = url_parser(url)
395 url_obj = url_parser(url)
396 test_uri, authinfo = url_obj.authinfo()
396 test_uri, authinfo = url_obj.authinfo()
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 url_obj.query = obfuscate_qs(url_obj.query)
398 url_obj.query = obfuscate_qs(url_obj.query)
399
399
400 cleaned_uri = str(url_obj)
400 cleaned_uri = str(url_obj)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402
402
403 if authinfo:
403 if authinfo:
404 # create a password manager
404 # create a password manager
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
406 passmgr.add_password(*authinfo)
406 passmgr.add_password(*authinfo)
407
407
408 handlers.extend((httpbasicauthhandler(passmgr),
408 handlers.extend((httpbasicauthhandler(passmgr),
409 httpdigestauthhandler(passmgr)))
409 httpdigestauthhandler(passmgr)))
410
410
411 o = urllib2.build_opener(*handlers)
411 o = urllib2.build_opener(*handlers)
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 ('Accept', 'application/mercurial-0.1')]
413 ('Accept', 'application/mercurial-0.1')]
414
414
415 q = {"cmd": 'between'}
415 q = {"cmd": 'between'}
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 qs = '?%s' % urllib.urlencode(q)
417 qs = '?%s' % urllib.urlencode(q)
418 cu = "%s%s" % (test_uri, qs)
418 cu = "%s%s" % (test_uri, qs)
419 req = urllib2.Request(cu, None, {})
419 req = urllib2.Request(cu, None, {})
420
420
421 try:
421 try:
422 log.debug("Trying to open URL %s", cleaned_uri)
422 log.debug("Trying to open URL %s", cleaned_uri)
423 resp = o.open(req)
423 resp = o.open(req)
424 if resp.code != 200:
424 if resp.code != 200:
425 raise exceptions.URLError()('Return Code is not 200')
425 raise exceptions.URLError()('Return Code is not 200')
426 except Exception as e:
426 except Exception as e:
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 # means it cannot be cloned
428 # means it cannot be cloned
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430
430
431 # now check if it's a proper hg repo, but don't do it for svn
431 # now check if it's a proper hg repo, but don't do it for svn
432 try:
432 try:
433 if _proto == 'svn':
433 if _proto == 'svn':
434 pass
434 pass
435 else:
435 else:
436 # check for pure hg repos
436 # check for pure hg repos
437 log.debug(
437 log.debug(
438 "Verifying if URL is a Mercurial repository: %s",
438 "Verifying if URL is a Mercurial repository: %s",
439 cleaned_uri)
439 cleaned_uri)
440 ui = make_ui_from_config(config)
440 ui = make_ui_from_config(config)
441 peer_checker = makepeer(ui, url)
441 peer_checker = makepeer(ui, url)
442 peer_checker.lookup('tip')
442 peer_checker.lookup('tip')
443 except Exception as e:
443 except Exception as e:
444 log.warning("URL is not a valid Mercurial repository: %s",
444 log.warning("URL is not a valid Mercurial repository: %s",
445 cleaned_uri)
445 cleaned_uri)
446 raise exceptions.URLError(e)(
446 raise exceptions.URLError(e)(
447 "url [%s] does not look like an hg repo org_exc: %s"
447 "url [%s] does not look like an hg repo org_exc: %s"
448 % (cleaned_uri, e))
448 % (cleaned_uri, e))
449
449
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 return True
451 return True
452
452
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
456
456
457 if file_filter:
457 if file_filter:
458 match_filter = match(file_filter[0], '', [file_filter[1]])
458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 else:
459 else:
460 match_filter = file_filter
460 match_filter = file_filter
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462
462
463 try:
463 try:
464 return "".join(patch.diff(
464 return "".join(patch.diff(
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 except RepoLookupError as e:
466 except RepoLookupError as e:
467 raise exceptions.LookupException(e)()
467 raise exceptions.LookupException(e)()
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def node_history(self, wire, revision, path, limit):
470 def node_history(self, wire, revision, path, limit):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
475
475
476 ctx = self._get_ctx(repo, revision)
476 ctx = self._get_ctx(repo, revision)
477 fctx = ctx.filectx(path)
477 fctx = ctx.filectx(path)
478
478
479 def history_iter():
479 def history_iter():
480 limit_rev = fctx.rev()
480 limit_rev = fctx.rev()
481 for obj in reversed(list(fctx.filelog())):
481 for obj in reversed(list(fctx.filelog())):
482 obj = fctx.filectx(obj)
482 obj = fctx.filectx(obj)
483 ctx = obj.changectx()
483 ctx = obj.changectx()
484 if ctx.hidden() or ctx.obsolete():
484 if ctx.hidden() or ctx.obsolete():
485 continue
485 continue
486
486
487 if limit_rev >= obj.rev():
487 if limit_rev >= obj.rev():
488 yield obj
488 yield obj
489
489
490 history = []
490 history = []
491 for cnt, obj in enumerate(history_iter()):
491 for cnt, obj in enumerate(history_iter()):
492 if limit and cnt >= limit:
492 if limit and cnt >= limit:
493 break
493 break
494 history.append(hex(obj.node()))
494 history.append(hex(obj.node()))
495
495
496 return [x for x in history]
496 return [x for x in history]
497 return _node_history(context_uid, repo_id, revision, path, limit)
497 return _node_history(context_uid, repo_id, revision, path, limit)
498
498
499 @reraise_safe_exceptions
499 @reraise_safe_exceptions
500 def node_history_untill(self, wire, revision, path, limit):
500 def node_history_untill(self, wire, revision, path, limit):
501 cache_on, context_uid, repo_id = self._cache_on(wire)
501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 def _node_history_until(_context_uid, _repo_id):
503 def _node_history_until(_context_uid, _repo_id):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
507
507
508 file_log = list(fctx.filelog())
508 file_log = list(fctx.filelog())
509 if limit:
509 if limit:
510 # Limit to the last n items
510 # Limit to the last n items
511 file_log = file_log[-limit:]
511 file_log = file_log[-limit:]
512
512
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515
515
516 @reraise_safe_exceptions
516 @reraise_safe_exceptions
517 def fctx_annotate(self, wire, revision, path):
517 def fctx_annotate(self, wire, revision, path):
518 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
519 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
520 fctx = ctx.filectx(path)
521
521
522 result = []
522 result = []
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 ln_no = i
524 ln_no = i
525 sha = hex(annotate_obj.fctx.node())
525 sha = hex(annotate_obj.fctx.node())
526 content = annotate_obj.text
526 content = annotate_obj.text
527 result.append((ln_no, sha, content))
527 result.append((ln_no, sha, content))
528 return result
528 return result
529
529
530 @reraise_safe_exceptions
530 @reraise_safe_exceptions
531 def fctx_node_data(self, wire, revision, path):
531 def fctx_node_data(self, wire, revision, path):
532 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
533 ctx = self._get_ctx(repo, revision)
533 ctx = self._get_ctx(repo, revision)
534 fctx = ctx.filectx(path)
534 fctx = ctx.filectx(path)
535 return fctx.data()
535 return fctx.data()
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def fctx_flags(self, wire, commit_id, path):
538 def fctx_flags(self, wire, commit_id, path):
539 cache_on, context_uid, repo_id = self._cache_on(wire)
539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 def _fctx_size(_repo_id, _revision, _path):
553 def _fctx_size(_repo_id, _revision, _path):
554 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
555 ctx = self._get_ctx(repo, commit_id)
555 ctx = self._get_ctx(repo, commit_id)
556 fctx = ctx.filectx(path)
556 fctx = ctx.filectx(path)
557 return fctx.size()
557 return fctx.size()
558 return _fctx_size(repo_id, commit_id, path)
558 return _fctx_size(repo_id, commit_id, path)
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def get_all_commit_ids(self, wire, name):
561 def get_all_commit_ids(self, wire, name):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
566 repo = repo.filtered(name)
566 repo = repo.filtered(name)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
568 return revs
568 return revs
569 return _get_all_commit_ids(context_uid, repo_id, name)
569 return _get_all_commit_ids(context_uid, repo_id, name)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def get_config_value(self, wire, section, name, untrusted=False):
572 def get_config_value(self, wire, section, name, untrusted=False):
573 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
574 return repo.ui.config(section, name, untrusted=untrusted)
574 return repo.ui.config(section, name, untrusted=untrusted)
575
575
576 @reraise_safe_exceptions
576 @reraise_safe_exceptions
577 def is_large_file(self, wire, commit_id, path):
577 def is_large_file(self, wire, commit_id, path):
578 cache_on, context_uid, repo_id = self._cache_on(wire)
578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 return largefiles.lfutil.isstandin(path)
581 return largefiles.lfutil.isstandin(path)
582
582
583 return _is_large_file(context_uid, repo_id, commit_id, path)
583 return _is_large_file(context_uid, repo_id, commit_id, path)
584
584
585 @reraise_safe_exceptions
585 @reraise_safe_exceptions
586 def is_binary(self, wire, revision, path):
586 def is_binary(self, wire, revision, path):
587 cache_on, context_uid, repo_id = self._cache_on(wire)
587 cache_on, context_uid, repo_id = self._cache_on(wire)
588
588
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 def _is_binary(_repo_id, _sha, _path):
590 def _is_binary(_repo_id, _sha, _path):
591 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
592 ctx = self._get_ctx(repo, revision)
592 ctx = self._get_ctx(repo, revision)
593 fctx = ctx.filectx(path)
593 fctx = ctx.filectx(path)
594 return fctx.isbinary()
594 return fctx.isbinary()
595
595
596 return _is_binary(repo_id, revision, path)
596 return _is_binary(repo_id, revision, path)
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def in_largefiles_store(self, wire, sha):
599 def in_largefiles_store(self, wire, sha):
600 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
601 return largefiles.lfutil.instore(repo, sha)
601 return largefiles.lfutil.instore(repo, sha)
602
602
603 @reraise_safe_exceptions
603 @reraise_safe_exceptions
604 def in_user_cache(self, wire, sha):
604 def in_user_cache(self, wire, sha):
605 repo = self._factory.repo(wire)
605 repo = self._factory.repo(wire)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
607
607
608 @reraise_safe_exceptions
608 @reraise_safe_exceptions
609 def store_path(self, wire, sha):
609 def store_path(self, wire, sha):
610 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
611 return largefiles.lfutil.storepath(repo, sha)
611 return largefiles.lfutil.storepath(repo, sha)
612
612
613 @reraise_safe_exceptions
613 @reraise_safe_exceptions
614 def link(self, wire, sha, path):
614 def link(self, wire, sha, path):
615 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
616 largefiles.lfutil.link(
616 largefiles.lfutil.link(
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def localrepository(self, wire, create=False):
620 def localrepository(self, wire, create=False):
621 self._factory.repo(wire, create=create)
621 self._factory.repo(wire, create=create)
622
622
623 @reraise_safe_exceptions
623 @reraise_safe_exceptions
624 def lookup(self, wire, revision, both):
624 def lookup(self, wire, revision, both):
625 cache_on, context_uid, repo_id = self._cache_on(wire)
625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 def _lookup(_context_uid, _repo_id, _revision, _both):
627 def _lookup(_context_uid, _repo_id, _revision, _both):
628
628
629 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
630 rev = _revision
630 rev = _revision
631 if isinstance(rev, int):
631 if isinstance(rev, int):
632 # NOTE(marcink):
632 # NOTE(marcink):
633 # since Mercurial doesn't support negative indexes properly
633 # since Mercurial doesn't support negative indexes properly
634 # we need to shift accordingly by one to get proper index, e.g
634 # we need to shift accordingly by one to get proper index, e.g
635 # repo[-1] => repo[-2]
635 # repo[-1] => repo[-2]
636 # repo[0] => repo[-1]
636 # repo[0] => repo[-1]
637 if rev <= 0:
637 if rev <= 0:
638 rev = rev + -1
638 rev = rev + -1
639 try:
639 try:
640 ctx = self._get_ctx(repo, rev)
640 ctx = self._get_ctx(repo, rev)
641 except (TypeError, RepoLookupError) as e:
641 except (TypeError, RepoLookupError) as e:
642 e._org_exc_tb = traceback.format_exc()
642 e._org_exc_tb = traceback.format_exc()
643 raise exceptions.LookupException(e)(rev)
643 raise exceptions.LookupException(e)(rev)
644 except LookupError as e:
644 except LookupError as e:
645 e._org_exc_tb = traceback.format_exc()
645 e._org_exc_tb = traceback.format_exc()
646 raise exceptions.LookupException(e)(e.name)
646 raise exceptions.LookupException(e)(e.name)
647
647
648 if not both:
648 if not both:
649 return ctx.hex()
649 return ctx.hex()
650
650
651 ctx = repo[ctx.hex()]
651 ctx = repo[ctx.hex()]
652 return ctx.hex(), ctx.rev()
652 return ctx.hex(), ctx.rev()
653
653
654 return _lookup(context_uid, repo_id, revision, both)
654 return _lookup(context_uid, repo_id, revision, both)
655
655
656 @reraise_safe_exceptions
656 @reraise_safe_exceptions
657 def sync_push(self, wire, url):
657 def sync_push(self, wire, url):
658 if not self.check_url(url, wire['config']):
658 if not self.check_url(url, wire['config']):
659 return
659 return
660
660
661 repo = self._factory.repo(wire)
661 repo = self._factory.repo(wire)
662
662
663 # Disable any prompts for this repo
663 # Disable any prompts for this repo
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665
665
666 bookmarks = dict(repo._bookmarks).keys()
666 bookmarks = dict(repo._bookmarks).keys()
667 remote = peer(repo, {}, url)
667 remote = peer(repo, {}, url)
668 # Disable any prompts for this remote
668 # Disable any prompts for this remote
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670
670
671 return exchange.push(
671 return exchange.push(
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def revision(self, wire, rev):
675 def revision(self, wire, rev):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 ctx = self._get_ctx(repo, rev)
677 ctx = self._get_ctx(repo, rev)
678 return ctx.rev()
678 return ctx.rev()
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def rev_range(self, wire, commit_filter):
681 def rev_range(self, wire, commit_filter):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683
683
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 def _rev_range(_context_uid, _repo_id, _filter):
685 def _rev_range(_context_uid, _repo_id, _filter):
686 repo = self._factory.repo(wire)
686 repo = self._factory.repo(wire)
687 revisions = [rev for rev in revrange(repo, commit_filter)]
687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 return revisions
688 return revisions
689
689
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691
691
692 @reraise_safe_exceptions
692 @reraise_safe_exceptions
693 def rev_range_hash(self, wire, node):
693 def rev_range_hash(self, wire, node):
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695
695
696 def get_revs(repo, rev_opt):
696 def get_revs(repo, rev_opt):
697 if rev_opt:
697 if rev_opt:
698 revs = revrange(repo, rev_opt)
698 revs = revrange(repo, rev_opt)
699 if len(revs) == 0:
699 if len(revs) == 0:
700 return (nullrev, nullrev)
700 return (nullrev, nullrev)
701 return max(revs), min(revs)
701 return max(revs), min(revs)
702 else:
702 else:
703 return len(repo) - 1, 0
703 return len(repo) - 1, 0
704
704
705 stop, start = get_revs(repo, [node + ':'])
705 stop, start = get_revs(repo, [node + ':'])
706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
707 return revs
707 return revs
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 other_path = kwargs.pop('other_path', None)
711 other_path = kwargs.pop('other_path', None)
712
712
713 # case when we want to compare two independent repositories
713 # case when we want to compare two independent repositories
714 if other_path and other_path != wire["path"]:
714 if other_path and other_path != wire["path"]:
715 baseui = self._factory._create_config(wire["config"])
715 baseui = self._factory._create_config(wire["config"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 else:
717 else:
718 repo = self._factory.repo(wire)
718 repo = self._factory.repo(wire)
719 return list(repo.revs(rev_spec, *args))
719 return list(repo.revs(rev_spec, *args))
720
720
721 @reraise_safe_exceptions
721 @reraise_safe_exceptions
722 def verify(self, wire,):
722 def verify(self, wire,):
723 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
725
725
726 baseui, output = patch_ui_message_output(baseui)
726 baseui, output = patch_ui_message_output(baseui)
727
727
728 repo.ui = baseui
728 repo.ui = baseui
729 verify.verify(repo)
729 verify.verify(repo)
730 return output.getvalue()
730 return output.getvalue()
731
731
732 @reraise_safe_exceptions
732 @reraise_safe_exceptions
733 def hg_update_cache(self, wire,):
733 def hg_update_cache(self, wire,):
734 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
735 baseui = self._factory._create_config(wire['config'])
735 baseui = self._factory._create_config(wire['config'])
736 baseui, output = patch_ui_message_output(baseui)
736 baseui, output = patch_ui_message_output(baseui)
737
737
738 repo.ui = baseui
738 repo.ui = baseui
739 with repo.wlock(), repo.lock():
739 with repo.wlock(), repo.lock():
740 repo.updatecaches(full=True)
740 repo.updatecaches(full=True)
741
741
742 return output.getvalue()
742 return output.getvalue()
743
743
744 @reraise_safe_exceptions
744 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
745 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
746 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
747 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
748 baseui, output = patch_ui_message_output(baseui)
749
749
750 repo.ui = baseui
750 repo.ui = baseui
751
751
752 repair.rebuildfncache(baseui, repo)
752 repair.rebuildfncache(baseui, repo)
753
753
754 return output.getvalue()
754 return output.getvalue()
755
755
756 @reraise_safe_exceptions
756 @reraise_safe_exceptions
757 def tags(self, wire):
757 def tags(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 def _tags(_context_uid, _repo_id):
760 def _tags(_context_uid, _repo_id):
761 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
762 return repo.tags()
762 return repo.tags()
763
763
764 return _tags(context_uid, repo_id)
764 return _tags(context_uid, repo_id)
765
765
766 @reraise_safe_exceptions
766 @reraise_safe_exceptions
767 def update(self, wire, node=None, clean=False):
767 def update(self, wire, node=None, clean=False):
768 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
769 baseui = self._factory._create_config(wire['config'])
769 baseui = self._factory._create_config(wire['config'])
770 commands.update(baseui, repo, node=node, clean=clean)
770 commands.update(baseui, repo, node=node, clean=clean)
771
771
772 @reraise_safe_exceptions
772 @reraise_safe_exceptions
773 def identify(self, wire):
773 def identify(self, wire):
774 repo = self._factory.repo(wire)
774 repo = self._factory.repo(wire)
775 baseui = self._factory._create_config(wire['config'])
775 baseui = self._factory._create_config(wire['config'])
776 output = io.BytesIO()
776 output = io.BytesIO()
777 baseui.write = output.write
777 baseui.write = output.write
778 # This is required to get a full node id
778 # This is required to get a full node id
779 baseui.debugflag = True
779 baseui.debugflag = True
780 commands.identify(baseui, repo, id=True)
780 commands.identify(baseui, repo, id=True)
781
781
782 return output.getvalue()
782 return output.getvalue()
783
783
784 @reraise_safe_exceptions
784 @reraise_safe_exceptions
785 def heads(self, wire, branch=None):
785 def heads(self, wire, branch=None):
786 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
788 output = io.BytesIO()
788 output = io.BytesIO()
789
789
790 def write(data, **unused_kwargs):
790 def write(data, **unused_kwargs):
791 output.write(data)
791 output.write(data)
792
792
793 baseui.write = write
793 baseui.write = write
794 if branch:
794 if branch:
795 args = [branch]
795 args = [branch]
796 else:
796 else:
797 args = []
797 args = []
798 commands.heads(baseui, repo, template='{node} ', *args)
798 commands.heads(baseui, repo, template='{node} ', *args)
799
799
800 return output.getvalue()
800 return output.getvalue()
801
801
802 @reraise_safe_exceptions
802 @reraise_safe_exceptions
803 def ancestor(self, wire, revision1, revision2):
803 def ancestor(self, wire, revision1, revision2):
804 repo = self._factory.repo(wire)
804 repo = self._factory.repo(wire)
805 changelog = repo.changelog
805 changelog = repo.changelog
806 lookup = repo.lookup
806 lookup = repo.lookup
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 return hex(a)
808 return hex(a)
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
814
814
815 @reraise_safe_exceptions
815 @reraise_safe_exceptions
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817
817
818 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
819 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
820 publishing = baseui.configbool('phases', 'publish')
820 publishing = baseui.configbool('phases', 'publish')
821 if publishing:
821 if publishing:
822 new_commit = 'public'
822 new_commit = 'public'
823 else:
823 else:
824 new_commit = 'draft'
824 new_commit = 'draft'
825
825
826 def _filectxfn(_repo, ctx, path):
826 def _filectxfn(_repo, ctx, path):
827 """
827 """
828 Marks given path as added/changed/removed in a given _repo. This is
828 Marks given path as added/changed/removed in a given _repo. This is
829 for internal mercurial commit function.
829 for internal mercurial commit function.
830 """
830 """
831
831
832 # check if this path is removed
832 # check if this path is removed
833 if path in removed:
833 if path in removed:
834 # returning None is a way to mark node for removal
834 # returning None is a way to mark node for removal
835 return None
835 return None
836
836
837 # check if this path is added
837 # check if this path is added
838 for node in updated:
838 for node in updated:
839 if node['path'] == path:
839 if node['path'] == path:
840 return memfilectx(
840 return memfilectx(
841 _repo,
841 _repo,
842 changectx=ctx,
842 changectx=ctx,
843 path=node['path'],
843 path=node['path'],
844 data=node['content'],
844 data=node['content'],
845 islink=False,
845 islink=False,
846 isexec=bool(node['mode'] & stat.S_IXUSR),
846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 copysource=False)
847 copysource=False)
848
848
849 raise exceptions.AbortException()(
849 raise exceptions.AbortException()(
850 "Given path haven't been marked as added, "
850 "Given path haven't been marked as added, "
851 "changed or removed (%s)" % path)
851 "changed or removed (%s)" % path)
852
852
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854
854
855 commit_ctx = memctx(
855 commit_ctx = memctx(
856 repo=repo,
856 repo=repo,
857 parents=parents,
857 parents=parents,
858 text=message,
858 text=message,
859 files=files,
859 files=files,
860 filectxfn=_filectxfn,
860 filectxfn=_filectxfn,
861 user=user,
861 user=user,
862 date=(commit_time, commit_timezone),
862 date=(commit_time, commit_timezone),
863 extra=extra)
863 extra=extra)
864
864
865 n = repo.commitctx(commit_ctx)
865 n = repo.commitctx(commit_ctx)
866 new_id = hex(n)
866 new_id = hex(n)
867
867
868 return new_id
868 return new_id
869
869
870 @reraise_safe_exceptions
870 @reraise_safe_exceptions
871 def pull(self, wire, url, commit_ids=None):
871 def pull(self, wire, url, commit_ids=None):
872 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
873 # Disable any prompts for this repo
873 # Disable any prompts for this repo
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875
875
876 remote = peer(repo, {}, url)
876 remote = peer(repo, {}, url)
877 # Disable any prompts for this remote
877 # Disable any prompts for this remote
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879
879
880 if commit_ids:
880 if commit_ids:
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882
882
883 return exchange.pull(
883 return exchange.pull(
884 repo, remote, heads=commit_ids, force=None).cgresult
884 repo, remote, heads=commit_ids, force=None).cgresult
885
885
886 @reraise_safe_exceptions
886 @reraise_safe_exceptions
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 repo = self._factory.repo(wire)
888 repo = self._factory.repo(wire)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890
890
891 # Mercurial internally has a lot of logic that checks ONLY if
891 # Mercurial internally has a lot of logic that checks ONLY if
892 # option is defined, we just pass those if they are defined then
892 # option is defined, we just pass those if they are defined then
893 opts = {}
893 opts = {}
894 if bookmark:
894 if bookmark:
895 opts['bookmark'] = bookmark
895 opts['bookmark'] = bookmark
896 if branch:
896 if branch:
897 opts['branch'] = branch
897 opts['branch'] = branch
898 if revision:
898 if revision:
899 opts['rev'] = revision
899 opts['rev'] = revision
900
900
901 commands.pull(baseui, repo, source, **opts)
901 commands.pull(baseui, repo, source, **opts)
902
902
903 @reraise_safe_exceptions
903 @reraise_safe_exceptions
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 repo = self._factory.repo(wire)
905 repo = self._factory.repo(wire)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 new_branch=push_branches)
908 new_branch=push_branches)
909
909
910 @reraise_safe_exceptions
910 @reraise_safe_exceptions
911 def strip(self, wire, revision, update, backup):
911 def strip(self, wire, revision, update, backup):
912 repo = self._factory.repo(wire)
912 repo = self._factory.repo(wire)
913 ctx = self._get_ctx(repo, revision)
913 ctx = self._get_ctx(repo, revision)
914 hgext_strip(
914 hgext_strip(
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916
916
917 @reraise_safe_exceptions
917 @reraise_safe_exceptions
918 def get_unresolved_files(self, wire):
918 def get_unresolved_files(self, wire):
919 repo = self._factory.repo(wire)
919 repo = self._factory.repo(wire)
920
920
921 log.debug('Calculating unresolved files for repo: %s', repo)
921 log.debug('Calculating unresolved files for repo: %s', repo)
922 output = io.BytesIO()
922 output = io.BytesIO()
923
923
924 def write(data, **unused_kwargs):
924 def write(data, **unused_kwargs):
925 output.write(data)
925 output.write(data)
926
926
927 baseui = self._factory._create_config(wire['config'])
927 baseui = self._factory._create_config(wire['config'])
928 baseui.write = write
928 baseui.write = write
929
929
930 commands.resolve(baseui, repo, list=True)
930 commands.resolve(baseui, repo, list=True)
931 unresolved = output.getvalue().splitlines(0)
931 unresolved = output.getvalue().splitlines(0)
932 return unresolved
932 return unresolved
933
933
934 @reraise_safe_exceptions
934 @reraise_safe_exceptions
935 def merge(self, wire, revision):
935 def merge(self, wire, revision):
936 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
937 baseui = self._factory._create_config(wire['config'])
937 baseui = self._factory._create_config(wire['config'])
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939
939
940 # In case of sub repositories are used mercurial prompts the user in
940 # In case of sub repositories are used mercurial prompts the user in
941 # case of merge conflicts or different sub repository sources. By
941 # case of merge conflicts or different sub repository sources. By
942 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 # used but instead uses a default value.
943 # used but instead uses a default value.
944 repo.ui.setconfig('ui', 'interactive', False)
944 repo.ui.setconfig('ui', 'interactive', False)
945 commands.merge(baseui, repo, rev=revision)
945 commands.merge(baseui, repo, rev=revision)
946
946
947 @reraise_safe_exceptions
947 @reraise_safe_exceptions
948 def merge_state(self, wire):
948 def merge_state(self, wire):
949 repo = self._factory.repo(wire)
949 repo = self._factory.repo(wire)
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951
951
952 # In case of sub repositories are used mercurial prompts the user in
952 # In case of sub repositories are used mercurial prompts the user in
953 # case of merge conflicts or different sub repository sources. By
953 # case of merge conflicts or different sub repository sources. By
954 # setting the interactive flag to `False` mercurial doesn't prompt the
954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 # used but instead uses a default value.
955 # used but instead uses a default value.
956 repo.ui.setconfig('ui', 'interactive', False)
956 repo.ui.setconfig('ui', 'interactive', False)
957 ms = hg_merge.mergestate(repo)
957 ms = hg_merge.mergestate(repo)
958 return [x for x in ms.unresolved()]
958 return [x for x in ms.unresolved()]
959
959
960 @reraise_safe_exceptions
960 @reraise_safe_exceptions
961 def commit(self, wire, message, username, close_branch=False):
961 def commit(self, wire, message, username, close_branch=False):
962 repo = self._factory.repo(wire)
962 repo = self._factory.repo(wire)
963 baseui = self._factory._create_config(wire['config'])
963 baseui = self._factory._create_config(wire['config'])
964 repo.ui.setconfig('ui', 'username', username)
964 repo.ui.setconfig('ui', 'username', username)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966
966
967 @reraise_safe_exceptions
967 @reraise_safe_exceptions
968 def rebase(self, wire, source=None, dest=None, abort=False):
968 def rebase(self, wire, source=None, dest=None, abort=False):
969 repo = self._factory.repo(wire)
969 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'])
970 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 # In case of sub repositories are used mercurial prompts the user in
972 # In case of sub repositories are used mercurial prompts the user in
973 # case of merge conflicts or different sub repository sources. By
973 # case of merge conflicts or different sub repository sources. By
974 # setting the interactive flag to `False` mercurial doesn't prompt the
974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 # used but instead uses a default value.
975 # used but instead uses a default value.
976 repo.ui.setconfig('ui', 'interactive', False)
976 repo.ui.setconfig('ui', 'interactive', False)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978
978
979 @reraise_safe_exceptions
979 @reraise_safe_exceptions
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 repo = self._factory.repo(wire)
981 repo = self._factory.repo(wire)
982 ctx = self._get_ctx(repo, revision)
982 ctx = self._get_ctx(repo, revision)
983 node = ctx.node()
983 node = ctx.node()
984
984
985 date = (tag_time, tag_timezone)
985 date = (tag_time, tag_timezone)
986 try:
986 try:
987 hg_tag.tag(repo, name, node, message, local, user, date)
987 hg_tag.tag(repo, name, node, message, local, user, date)
988 except Abort as e:
988 except Abort as e:
989 log.exception("Tag operation aborted")
989 log.exception("Tag operation aborted")
990 # Exception can contain unicode which we convert
990 # Exception can contain unicode which we convert
991 raise exceptions.AbortException(e)(repr(e))
991 raise exceptions.AbortException(e)(repr(e))
992
992
993 @reraise_safe_exceptions
993 @reraise_safe_exceptions
994 def bookmark(self, wire, bookmark, revision=None):
994 def bookmark(self, wire, bookmark, revision=None):
995 repo = self._factory.repo(wire)
995 repo = self._factory.repo(wire)
996 baseui = self._factory._create_config(wire['config'])
996 baseui = self._factory._create_config(wire['config'])
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998
998
999 @reraise_safe_exceptions
999 @reraise_safe_exceptions
1000 def install_hooks(self, wire, force=False):
1000 def install_hooks(self, wire, force=False):
1001 # we don't need any special hooks for Mercurial
1001 # we don't need any special hooks for Mercurial
1002 pass
1002 pass
1003
1003
1004 @reraise_safe_exceptions
1004 @reraise_safe_exceptions
1005 def get_hooks_info(self, wire):
1005 def get_hooks_info(self, wire):
1006 return {
1006 return {
1007 'pre_version': vcsserver.__version__,
1007 'pre_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1009 }
1009 }
@@ -1,79 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import subrepoutil
39 from mercurial import subrepoutil
40 from mercurial import tags as hg_tag
40 from mercurial import tags as hg_tag
41
41
42 from mercurial.commands import clone, nullid, pull
42 from mercurial.commands import clone, nullid, pull
43 from mercurial.context import memctx, memfilectx
43 from mercurial.context import memctx, memfilectx
44 from mercurial.error import (
44 from mercurial.error import (
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 RequirementError, ProgrammingError)
46 RequirementError, ProgrammingError)
47 from mercurial.hgweb import hgweb_mod
47 from mercurial.hgweb import hgweb_mod
48 from mercurial.localrepo import instance
48 from mercurial.localrepo import instance
49 from mercurial.match import match
49 from mercurial.match import match
50 from mercurial.mdiff import diffopts
50 from mercurial.mdiff import diffopts
51 from mercurial.node import bin, hex
51 from mercurial.node import bin, hex
52 from mercurial.encoding import tolocal
52 from mercurial.encoding import tolocal
53 from mercurial.discovery import findcommonoutgoing
53 from mercurial.discovery import findcommonoutgoing
54 from mercurial.hg import peer
54 from mercurial.hg import peer
55 from mercurial.httppeer import makepeer
55 from mercurial.httppeer import makepeer
56 from mercurial.util import url as hg_url
56 from mercurial.util import url as hg_url
57 from mercurial.scmutil import revrange, revsymbol
57 from mercurial.scmutil import revrange, revsymbol
58 from mercurial.node import nullrev
58 from mercurial.node import nullrev
59 from mercurial import exchange
59 from mercurial import exchange
60 from hgext import largefiles
60 from hgext import largefiles
61
61
62 # those authnadlers are patched for python 2.6.5 bug an
62 # those authnadlers are patched for python 2.6.5 bug an
63 # infinit looping when given invalid resources
63 # infinit looping when given invalid resources
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65
65
66
66
67 def get_ctx(repo, ref):
67 def get_ctx(repo, ref):
68 try:
68 try:
69 ctx = repo[ref]
69 ctx = repo[ref]
70 except ProgrammingError:
70 except ProgrammingError:
71 # we're unable to find the rev using a regular lookup, we fallback
71 # we're unable to find the rev using a regular lookup, we fallback
72 # to slower, but backward compat revsymbol usage
72 # to slower, but backward compat revsymbol usage
73 ctx = revsymbol(repo, ref)
73 ctx = revsymbol(repo, ref)
74 except (LookupError, RepoLookupError):
74 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
75 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
76 if isinstance(ref, (int, long)):
77 raise
77 raise
78 ctx = revsymbol(repo, ref)
78 ctx = revsymbol(repo, ref)
79 return ctx
79 return ctx
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto._capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto._capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(orig, repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 return calc_capabilities(orig, repo, proto)
56 else:
56 else:
57 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
58 return orig(repo, proto)
58 return orig(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo, subrepoutil
65 from hgcompat import subrepo, subrepoutil
66 from vcsserver.exceptions import SubrepoMergeException
66 from vcsserver.exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepoutil.state(self._ctx, self.ui)
100 substate = subrepoutil.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,205 +1,205 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import re
20 import re
21 import os
21 import os
22 import sys
22 import sys
23 import datetime
23 import datetime
24 import logging
24 import logging
25 import pkg_resources
25 import pkg_resources
26
26
27 import vcsserver
27 import vcsserver
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 def get_git_hooks_path(repo_path, bare):
32 def get_git_hooks_path(repo_path, bare):
33 hooks_path = os.path.join(repo_path, 'hooks')
33 hooks_path = os.path.join(repo_path, 'hooks')
34 if not bare:
34 if not bare:
35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36
36
37 return hooks_path
37 return hooks_path
38
38
39
39
40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
41 """
41 """
42 Creates a RhodeCode hook inside a git repository
42 Creates a RhodeCode hook inside a git repository
43
43
44 :param repo_path: path to repository
44 :param repo_path: path to repository
45 :param executable: binary executable to put in the hooks
45 :param executable: binary executable to put in the hooks
46 :param force_create: Create even if same name hook exists
46 :param force_create: Create even if same name hook exists
47 """
47 """
48 executable = executable or sys.executable
48 executable = executable or sys.executable
49 hooks_path = get_git_hooks_path(repo_path, bare)
49 hooks_path = get_git_hooks_path(repo_path, bare)
50
50
51 if not os.path.isdir(hooks_path):
51 if not os.path.isdir(hooks_path):
52 os.makedirs(hooks_path, mode=0o777)
52 os.makedirs(hooks_path, mode=0o777)
53
53
54 tmpl_post = pkg_resources.resource_string(
54 tmpl_post = pkg_resources.resource_string(
55 'vcsserver', '/'.join(
55 'vcsserver', '/'.join(
56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
57 tmpl_pre = pkg_resources.resource_string(
57 tmpl_pre = pkg_resources.resource_string(
58 'vcsserver', '/'.join(
58 'vcsserver', '/'.join(
59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
60
60
61 path = '' # not used for now
61 path = '' # not used for now
62 timestamp = datetime.datetime.utcnow().isoformat()
62 timestamp = datetime.datetime.utcnow().isoformat()
63
63
64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
65 log.debug('Installing git hook in repo %s', repo_path)
65 log.debug('Installing git hook in repo %s', repo_path)
66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
68
68
69 if _rhodecode_hook or force_create:
69 if _rhodecode_hook or force_create:
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 try:
71 try:
72 with open(_hook_file, 'wb') as f:
72 with open(_hook_file, 'wb') as f:
73 template = template.replace(
73 template = template.replace(
74 '_TMPL_', vcsserver.__version__)
74 '_TMPL_', vcsserver.__version__)
75 template = template.replace('_DATE_', timestamp)
75 template = template.replace('_DATE_', timestamp)
76 template = template.replace('_ENV_', executable)
76 template = template.replace('_ENV_', executable)
77 template = template.replace('_PATH_', path)
77 template = template.replace('_PATH_', path)
78 f.write(template)
78 f.write(template)
79 os.chmod(_hook_file, 0o755)
79 os.chmod(_hook_file, 0o755)
80 except IOError:
80 except IOError:
81 log.exception('error writing hook file %s', _hook_file)
81 log.exception('error writing hook file %s', _hook_file)
82 else:
82 else:
83 log.debug('skipping writing hook file')
83 log.debug('skipping writing hook file')
84
84
85 return True
85 return True
86
86
87
87
88 def get_svn_hooks_path(repo_path):
88 def get_svn_hooks_path(repo_path):
89 hooks_path = os.path.join(repo_path, 'hooks')
89 hooks_path = os.path.join(repo_path, 'hooks')
90
90
91 return hooks_path
91 return hooks_path
92
92
93
93
94 def install_svn_hooks(repo_path, executable=None, force_create=False):
94 def install_svn_hooks(repo_path, executable=None, force_create=False):
95 """
95 """
96 Creates RhodeCode hooks inside a svn repository
96 Creates RhodeCode hooks inside a svn repository
97
97
98 :param repo_path: path to repository
98 :param repo_path: path to repository
99 :param executable: binary executable to put in the hooks
99 :param executable: binary executable to put in the hooks
100 :param force_create: Create even if same name hook exists
100 :param force_create: Create even if same name hook exists
101 """
101 """
102 executable = executable or sys.executable
102 executable = executable or sys.executable
103 hooks_path = get_svn_hooks_path(repo_path)
103 hooks_path = get_svn_hooks_path(repo_path)
104 if not os.path.isdir(hooks_path):
104 if not os.path.isdir(hooks_path):
105 os.makedirs(hooks_path, mode=0o777)
105 os.makedirs(hooks_path, mode=0o777)
106
106
107 tmpl_post = pkg_resources.resource_string(
107 tmpl_post = pkg_resources.resource_string(
108 'vcsserver', '/'.join(
108 'vcsserver', '/'.join(
109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
110 tmpl_pre = pkg_resources.resource_string(
110 tmpl_pre = pkg_resources.resource_string(
111 'vcsserver', '/'.join(
111 'vcsserver', '/'.join(
112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
113
113
114 path = '' # not used for now
114 path = '' # not used for now
115 timestamp = datetime.datetime.utcnow().isoformat()
115 timestamp = datetime.datetime.utcnow().isoformat()
116
116
117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
118 log.debug('Installing svn hook in repo %s', repo_path)
118 log.debug('Installing svn hook in repo %s', repo_path)
119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
121
121
122 if _rhodecode_hook or force_create:
122 if _rhodecode_hook or force_create:
123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
124
124
125 try:
125 try:
126 with open(_hook_file, 'wb') as f:
126 with open(_hook_file, 'wb') as f:
127 template = template.replace(
127 template = template.replace(
128 '_TMPL_', vcsserver.__version__)
128 '_TMPL_', vcsserver.__version__)
129 template = template.replace('_DATE_', timestamp)
129 template = template.replace('_DATE_', timestamp)
130 template = template.replace('_ENV_', executable)
130 template = template.replace('_ENV_', executable)
131 template = template.replace('_PATH_', path)
131 template = template.replace('_PATH_', path)
132
132
133 f.write(template)
133 f.write(template)
134 os.chmod(_hook_file, 0o755)
134 os.chmod(_hook_file, 0o755)
135 except IOError:
135 except IOError:
136 log.exception('error writing hook file %s', _hook_file)
136 log.exception('error writing hook file %s', _hook_file)
137 else:
137 else:
138 log.debug('skipping writing hook file')
138 log.debug('skipping writing hook file')
139
139
140 return True
140 return True
141
141
142
142
143 def get_version_from_hook(hook_path):
143 def get_version_from_hook(hook_path):
144 version = ''
144 version = ''
145 hook_content = read_hook_content(hook_path)
145 hook_content = read_hook_content(hook_path)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 if matches:
147 if matches:
148 try:
148 try:
149 version = matches.groups()[0]
149 version = matches.groups()[0]
150 log.debug('got version %s from hooks.', version)
150 log.debug('got version %s from hooks.', version)
151 except Exception:
151 except Exception:
152 log.exception("Exception while reading the hook version.")
152 log.exception("Exception while reading the hook version.")
153 return version.replace("'", "")
153 return version.replace("'", "")
154
154
155
155
156 def check_rhodecode_hook(hook_path):
156 def check_rhodecode_hook(hook_path):
157 """
157 """
158 Check if the hook was created by RhodeCode
158 Check if the hook was created by RhodeCode
159 """
159 """
160 if not os.path.exists(hook_path):
160 if not os.path.exists(hook_path):
161 return True
161 return True
162
162
163 log.debug('hook exists, checking if it is from RhodeCode')
163 log.debug('hook exists, checking if it is from RhodeCode')
164
164
165 version = get_version_from_hook(hook_path)
165 version = get_version_from_hook(hook_path)
166 if version:
166 if version:
167 return True
167 return True
168
168
169 return False
169 return False
170
170
171
171
172 def read_hook_content(hook_path):
172 def read_hook_content(hook_path):
173 content = ''
173 content = ''
174 if os.path.isfile(hook_path):
174 if os.path.isfile(hook_path):
175 with open(hook_path, 'rb') as f:
175 with open(hook_path, 'rb') as f:
176 content = f.read()
176 content = f.read()
177 return content
177 return content
178
178
179
179
180 def get_git_pre_hook_version(repo_path, bare):
180 def get_git_pre_hook_version(repo_path, bare):
181 hooks_path = get_git_hooks_path(repo_path, bare)
181 hooks_path = get_git_hooks_path(repo_path, bare)
182 _hook_file = os.path.join(hooks_path, 'pre-receive')
182 _hook_file = os.path.join(hooks_path, 'pre-receive')
183 version = get_version_from_hook(_hook_file)
183 version = get_version_from_hook(_hook_file)
184 return version
184 return version
185
185
186
186
187 def get_git_post_hook_version(repo_path, bare):
187 def get_git_post_hook_version(repo_path, bare):
188 hooks_path = get_git_hooks_path(repo_path, bare)
188 hooks_path = get_git_hooks_path(repo_path, bare)
189 _hook_file = os.path.join(hooks_path, 'post-receive')
189 _hook_file = os.path.join(hooks_path, 'post-receive')
190 version = get_version_from_hook(_hook_file)
190 version = get_version_from_hook(_hook_file)
191 return version
191 return version
192
192
193
193
194 def get_svn_pre_hook_version(repo_path):
194 def get_svn_pre_hook_version(repo_path):
195 hooks_path = get_svn_hooks_path(repo_path)
195 hooks_path = get_svn_hooks_path(repo_path)
196 _hook_file = os.path.join(hooks_path, 'pre-commit')
196 _hook_file = os.path.join(hooks_path, 'pre-commit')
197 version = get_version_from_hook(_hook_file)
197 version = get_version_from_hook(_hook_file)
198 return version
198 return version
199
199
200
200
201 def get_svn_post_hook_version(repo_path):
201 def get_svn_post_hook_version(repo_path):
202 hooks_path = get_svn_hooks_path(repo_path)
202 hooks_path = get_svn_hooks_path(repo_path)
203 _hook_file = os.path.join(hooks_path, 'post-commit')
203 _hook_file = os.path.join(hooks_path, 'post-commit')
204 version = get_version_from_hook(_hook_file)
204 version = get_version_from_hook(_hook_file)
205 return version
205 return version
@@ -1,729 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in xrange(p + 1, end):
222 for x in xrange(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 if len(commit_data) == 3:
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
698 'Some functionality might be limited')
699 path, commit_id = commit_data
699 path, commit_id = commit_data
700 txn_id = None
700 txn_id = None
701
701
702 branches = []
702 branches = []
703 tags = []
703 tags = []
704
704
705 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
706 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
707 else:
707 else:
708 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
709 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
710 if not extras:
710 if not extras:
711 return 0
711 return 0
712
712
713 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
714 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
715 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
716 extras['new_refs'] = {
716 extras['new_refs'] = {
717 'branches': branches,
717 'branches': branches,
718 'bookmarks': [],
718 'bookmarks': [],
719 'tags': tags,
719 'tags': tags,
720 'total_commits': 1,
720 'total_commits': 1,
721 }
721 }
722
722
723 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
724 try:
724 try:
725 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
726 except Exception:
726 except Exception:
727 pass
727 pass
728
728
729 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,688 +1,688 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 from itertools import chain
27 from itertools import chain
28 from cStringIO import StringIO
28 from cStringIO import StringIO
29
29
30 import simplejson as json
30 import simplejson as json
31 import msgpack
31 import msgpack
32 from pyramid.config import Configurator
32 from pyramid.config import Configurator
33 from pyramid.settings import asbool, aslist
33 from pyramid.settings import asbool, aslist
34 from pyramid.wsgi import wsgiapp
34 from pyramid.wsgi import wsgiapp
35 from pyramid.compat import configparser
35 from pyramid.compat import configparser
36 from pyramid.response import Response
36 from pyramid.response import Response
37
37
38 from vcsserver.utils import safe_int
38 from vcsserver.utils import safe_int
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44
44
45 try:
45 try:
46 locale.setlocale(locale.LC_ALL, '')
46 locale.setlocale(locale.LC_ALL, '')
47 except locale.Error as e:
47 except locale.Error as e:
48 log.error(
48 log.error(
49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 os.environ['LC_ALL'] = 'C'
50 os.environ['LC_ALL'] = 'C'
51
51
52 import vcsserver
52 import vcsserver
53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 from vcsserver.echo_stub.echo_app import EchoApp
56 from vcsserver.echo_stub.echo_app import EchoApp
57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 from vcsserver.lib.exc_tracking import store_exception
58 from vcsserver.lib.exc_tracking import store_exception
59 from vcsserver.server import VcsServer
59 from vcsserver.server import VcsServer
60
60
61 try:
61 try:
62 from vcsserver.git import GitFactory, GitRemote
62 from vcsserver.git import GitFactory, GitRemote
63 except ImportError:
63 except ImportError:
64 GitFactory = None
64 GitFactory = None
65 GitRemote = None
65 GitRemote = None
66
66
67 try:
67 try:
68 from vcsserver.hg import MercurialFactory, HgRemote
68 from vcsserver.hg import MercurialFactory, HgRemote
69 except ImportError:
69 except ImportError:
70 MercurialFactory = None
70 MercurialFactory = None
71 HgRemote = None
71 HgRemote = None
72
72
73 try:
73 try:
74 from vcsserver.svn import SubversionFactory, SvnRemote
74 from vcsserver.svn import SubversionFactory, SvnRemote
75 except ImportError:
75 except ImportError:
76 SubversionFactory = None
76 SubversionFactory = None
77 SvnRemote = None
77 SvnRemote = None
78
78
79
79
80 def _is_request_chunked(environ):
80 def _is_request_chunked(environ):
81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 return stream
82 return stream
83
83
84
84
85 def _int_setting(settings, name, default):
85 def _int_setting(settings, name, default):
86 settings[name] = int(settings.get(name, default))
86 settings[name] = int(settings.get(name, default))
87 return settings[name]
87 return settings[name]
88
88
89
89
90 def _bool_setting(settings, name, default):
90 def _bool_setting(settings, name, default):
91 input_val = settings.get(name, default)
91 input_val = settings.get(name, default)
92 if isinstance(input_val, unicode):
92 if isinstance(input_val, unicode):
93 input_val = input_val.encode('utf8')
93 input_val = input_val.encode('utf8')
94 settings[name] = asbool(input_val)
94 settings[name] = asbool(input_val)
95 return settings[name]
95 return settings[name]
96
96
97
97
98 def _list_setting(settings, name, default):
98 def _list_setting(settings, name, default):
99 raw_value = settings.get(name, default)
99 raw_value = settings.get(name, default)
100
100
101 # Otherwise we assume it uses pyramids space/newline separation.
101 # Otherwise we assume it uses pyramids space/newline separation.
102 settings[name] = aslist(raw_value)
102 settings[name] = aslist(raw_value)
103 return settings[name]
103 return settings[name]
104
104
105
105
106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 value = settings.get(name, default)
107 value = settings.get(name, default)
108
108
109 if default_when_empty and not value:
109 if default_when_empty and not value:
110 # use default value when value is empty
110 # use default value when value is empty
111 value = default
111 value = default
112
112
113 if lower:
113 if lower:
114 value = value.lower()
114 value = value.lower()
115 settings[name] = value
115 settings[name] = value
116 return settings[name]
116 return settings[name]
117
117
118
118
119 class VCS(object):
119 class VCS(object):
120 def __init__(self, locale_conf=None, cache_config=None):
120 def __init__(self, locale_conf=None, cache_config=None):
121 self.locale = locale_conf
121 self.locale = locale_conf
122 self.cache_config = cache_config
122 self.cache_config = cache_config
123 self._configure_locale()
123 self._configure_locale()
124
124
125 if GitFactory and GitRemote:
125 if GitFactory and GitRemote:
126 git_factory = GitFactory()
126 git_factory = GitFactory()
127 self._git_remote = GitRemote(git_factory)
127 self._git_remote = GitRemote(git_factory)
128 else:
128 else:
129 log.info("Git client import failed")
129 log.info("Git client import failed")
130
130
131 if MercurialFactory and HgRemote:
131 if MercurialFactory and HgRemote:
132 hg_factory = MercurialFactory()
132 hg_factory = MercurialFactory()
133 self._hg_remote = HgRemote(hg_factory)
133 self._hg_remote = HgRemote(hg_factory)
134 else:
134 else:
135 log.info("Mercurial client import failed")
135 log.info("Mercurial client import failed")
136
136
137 if SubversionFactory and SvnRemote:
137 if SubversionFactory and SvnRemote:
138 svn_factory = SubversionFactory()
138 svn_factory = SubversionFactory()
139
139
140 # hg factory is used for svn url validation
140 # hg factory is used for svn url validation
141 hg_factory = MercurialFactory()
141 hg_factory = MercurialFactory()
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
143 else:
143 else:
144 log.info("Subversion client import failed")
144 log.info("Subversion client import failed")
145
145
146 self._vcsserver = VcsServer()
146 self._vcsserver = VcsServer()
147
147
148 def _configure_locale(self):
148 def _configure_locale(self):
149 if self.locale:
149 if self.locale:
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
151 else:
151 else:
152 log.info(
152 log.info(
153 'Configuring locale subsystem based on environment variables')
153 'Configuring locale subsystem based on environment variables')
154 try:
154 try:
155 # If self.locale is the empty string, then the locale
155 # If self.locale is the empty string, then the locale
156 # module will use the environment variables. See the
156 # module will use the environment variables. See the
157 # documentation of the package `locale`.
157 # documentation of the package `locale`.
158 locale.setlocale(locale.LC_ALL, self.locale)
158 locale.setlocale(locale.LC_ALL, self.locale)
159
159
160 language_code, encoding = locale.getlocale()
160 language_code, encoding = locale.getlocale()
161 log.info(
161 log.info(
162 'Locale set to language code "%s" with encoding "%s".',
162 'Locale set to language code "%s" with encoding "%s".',
163 language_code, encoding)
163 language_code, encoding)
164 except locale.Error:
164 except locale.Error:
165 log.exception(
165 log.exception(
166 'Cannot set locale, not configuring the locale system')
166 'Cannot set locale, not configuring the locale system')
167
167
168
168
169 class WsgiProxy(object):
169 class WsgiProxy(object):
170 def __init__(self, wsgi):
170 def __init__(self, wsgi):
171 self.wsgi = wsgi
171 self.wsgi = wsgi
172
172
173 def __call__(self, environ, start_response):
173 def __call__(self, environ, start_response):
174 input_data = environ['wsgi.input'].read()
174 input_data = environ['wsgi.input'].read()
175 input_data = msgpack.unpackb(input_data)
175 input_data = msgpack.unpackb(input_data)
176
176
177 error = None
177 error = None
178 try:
178 try:
179 data, status, headers = self.wsgi.handle(
179 data, status, headers = self.wsgi.handle(
180 input_data['environment'], input_data['input_data'],
180 input_data['environment'], input_data['input_data'],
181 *input_data['args'], **input_data['kwargs'])
181 *input_data['args'], **input_data['kwargs'])
182 except Exception as e:
182 except Exception as e:
183 data, status, headers = [], None, None
183 data, status, headers = [], None, None
184 error = {
184 error = {
185 'message': str(e),
185 'message': str(e),
186 '_vcs_kind': getattr(e, '_vcs_kind', None)
186 '_vcs_kind': getattr(e, '_vcs_kind', None)
187 }
187 }
188
188
189 start_response(200, {})
189 start_response(200, {})
190 return self._iterator(error, status, headers, data)
190 return self._iterator(error, status, headers, data)
191
191
192 def _iterator(self, error, status, headers, data):
192 def _iterator(self, error, status, headers, data):
193 initial_data = [
193 initial_data = [
194 error,
194 error,
195 status,
195 status,
196 headers,
196 headers,
197 ]
197 ]
198
198
199 for d in chain(initial_data, data):
199 for d in chain(initial_data, data):
200 yield msgpack.packb(d)
200 yield msgpack.packb(d)
201
201
202
202
203 def not_found(request):
203 def not_found(request):
204 return {'status': '404 NOT FOUND'}
204 return {'status': '404 NOT FOUND'}
205
205
206
206
207 class VCSViewPredicate(object):
207 class VCSViewPredicate(object):
208 def __init__(self, val, config):
208 def __init__(self, val, config):
209 self.remotes = val
209 self.remotes = val
210
210
211 def text(self):
211 def text(self):
212 return 'vcs view method = %s' % (self.remotes.keys(),)
212 return 'vcs view method = %s' % (self.remotes.keys(),)
213
213
214 phash = text
214 phash = text
215
215
216 def __call__(self, context, request):
216 def __call__(self, context, request):
217 """
217 """
218 View predicate that returns true if given backend is supported by
218 View predicate that returns true if given backend is supported by
219 defined remotes.
219 defined remotes.
220 """
220 """
221 backend = request.matchdict.get('backend')
221 backend = request.matchdict.get('backend')
222 return backend in self.remotes
222 return backend in self.remotes
223
223
224
224
225 class HTTPApplication(object):
225 class HTTPApplication(object):
226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
227
227
228 remote_wsgi = remote_wsgi
228 remote_wsgi = remote_wsgi
229 _use_echo_app = False
229 _use_echo_app = False
230
230
231 def __init__(self, settings=None, global_config=None):
231 def __init__(self, settings=None, global_config=None):
232 self._sanitize_settings_and_apply_defaults(settings)
232 self._sanitize_settings_and_apply_defaults(settings)
233
233
234 self.config = Configurator(settings=settings)
234 self.config = Configurator(settings=settings)
235 self.global_config = global_config
235 self.global_config = global_config
236 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache')
237
237
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 self._remotes = {
240 self._remotes = {
241 'hg': vcs._hg_remote,
241 'hg': vcs._hg_remote,
242 'git': vcs._git_remote,
242 'git': vcs._git_remote,
243 'svn': vcs._svn_remote,
243 'svn': vcs._svn_remote,
244 'server': vcs._vcsserver,
244 'server': vcs._vcsserver,
245 }
245 }
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 self._use_echo_app = True
247 self._use_echo_app = True
248 log.warning("Using EchoApp for VCS operations.")
248 log.warning("Using EchoApp for VCS operations.")
249 self.remote_wsgi = remote_wsgi_stub
249 self.remote_wsgi = remote_wsgi_stub
250
250
251 self._configure_settings(global_config, settings)
251 self._configure_settings(global_config, settings)
252 self._configure()
252 self._configure()
253
253
254 def _configure_settings(self, global_config, app_settings):
254 def _configure_settings(self, global_config, app_settings):
255 """
255 """
256 Configure the settings module.
256 Configure the settings module.
257 """
257 """
258 settings_merged = global_config.copy()
258 settings_merged = global_config.copy()
259 settings_merged.update(app_settings)
259 settings_merged.update(app_settings)
260
260
261 git_path = app_settings.get('git_path', None)
261 git_path = app_settings.get('git_path', None)
262 if git_path:
262 if git_path:
263 settings.GIT_EXECUTABLE = git_path
263 settings.GIT_EXECUTABLE = git_path
264 binary_dir = app_settings.get('core.binary_dir', None)
264 binary_dir = app_settings.get('core.binary_dir', None)
265 if binary_dir:
265 if binary_dir:
266 settings.BINARY_DIR = binary_dir
266 settings.BINARY_DIR = binary_dir
267
267
268 # Store the settings to make them available to other modules.
268 # Store the settings to make them available to other modules.
269 vcsserver.PYRAMID_SETTINGS = settings_merged
269 vcsserver.PYRAMID_SETTINGS = settings_merged
270 vcsserver.CONFIG = settings_merged
270 vcsserver.CONFIG = settings_merged
271
271
272 def _sanitize_settings_and_apply_defaults(self, settings):
272 def _sanitize_settings_and_apply_defaults(self, settings):
273 temp_store = tempfile.gettempdir()
273 temp_store = tempfile.gettempdir()
274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
275
275
276 # save default, cache dir, and use it for all backends later.
276 # save default, cache dir, and use it for all backends later.
277 default_cache_dir = _string_setting(
277 default_cache_dir = _string_setting(
278 settings,
278 settings,
279 'cache_dir',
279 'cache_dir',
280 default_cache_dir, lower=False, default_when_empty=True)
280 default_cache_dir, lower=False, default_when_empty=True)
281
281
282 # ensure we have our dir created
282 # ensure we have our dir created
283 if not os.path.isdir(default_cache_dir):
283 if not os.path.isdir(default_cache_dir):
284 os.makedirs(default_cache_dir, mode=0o755)
284 os.makedirs(default_cache_dir, mode=0o755)
285
285
286 # exception store cache
286 # exception store cache
287 _string_setting(
287 _string_setting(
288 settings,
288 settings,
289 'exception_tracker.store_path',
289 'exception_tracker.store_path',
290 temp_store, lower=False, default_when_empty=True)
290 temp_store, lower=False, default_when_empty=True)
291
291
292 # repo_object cache
292 # repo_object cache
293 _string_setting(
293 _string_setting(
294 settings,
294 settings,
295 'rc_cache.repo_object.backend',
295 'rc_cache.repo_object.backend',
296 'dogpile.cache.rc.file_namespace', lower=False)
296 'dogpile.cache.rc.file_namespace', lower=False)
297 _int_setting(
297 _int_setting(
298 settings,
298 settings,
299 'rc_cache.repo_object.expiration_time',
299 'rc_cache.repo_object.expiration_time',
300 30 * 24 * 60 * 60)
300 30 * 24 * 60 * 60)
301 _string_setting(
301 _string_setting(
302 settings,
302 settings,
303 'rc_cache.repo_object.arguments.filename',
303 'rc_cache.repo_object.arguments.filename',
304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
305
305
306 def _configure(self):
306 def _configure(self):
307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
308
308
309 self.config.add_route('service', '/_service')
309 self.config.add_route('service', '/_service')
310 self.config.add_route('status', '/status')
310 self.config.add_route('status', '/status')
311 self.config.add_route('hg_proxy', '/proxy/hg')
311 self.config.add_route('hg_proxy', '/proxy/hg')
312 self.config.add_route('git_proxy', '/proxy/git')
312 self.config.add_route('git_proxy', '/proxy/git')
313
313
314 # rpc methods
314 # rpc methods
315 self.config.add_route('vcs', '/{backend}')
315 self.config.add_route('vcs', '/{backend}')
316
316
317 # streaming rpc remote methods
317 # streaming rpc remote methods
318 self.config.add_route('vcs_stream', '/{backend}/stream')
318 self.config.add_route('vcs_stream', '/{backend}/stream')
319
319
320 # vcs operations clone/push as streaming
320 # vcs operations clone/push as streaming
321 self.config.add_route('stream_git', '/stream/git/*repo_name')
321 self.config.add_route('stream_git', '/stream/git/*repo_name')
322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
323
323
324 self.config.add_view(self.status_view, route_name='status', renderer='json')
324 self.config.add_view(self.status_view, route_name='status', renderer='json')
325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
326
326
327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
330 vcs_view=self._remotes)
330 vcs_view=self._remotes)
331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 vcs_view=self._remotes)
332 vcs_view=self._remotes)
333
333
334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
335 self.config.add_view(self.git_stream(), route_name='stream_git')
335 self.config.add_view(self.git_stream(), route_name='stream_git')
336
336
337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
338
338
339 self.config.add_notfound_view(not_found, renderer='json')
339 self.config.add_notfound_view(not_found, renderer='json')
340
340
341 self.config.add_view(self.handle_vcs_exception, context=Exception)
341 self.config.add_view(self.handle_vcs_exception, context=Exception)
342
342
343 self.config.add_tween(
343 self.config.add_tween(
344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
345 )
345 )
346 self.config.add_request_method(
346 self.config.add_request_method(
347 'vcsserver.lib.request_counter.get_request_counter',
347 'vcsserver.lib.request_counter.get_request_counter',
348 'request_count')
348 'request_count')
349
349
350 def wsgi_app(self):
350 def wsgi_app(self):
351 return self.config.make_wsgi_app()
351 return self.config.make_wsgi_app()
352
352
353 def _vcs_view_params(self, request):
353 def _vcs_view_params(self, request):
354 remote = self._remotes[request.matchdict['backend']]
354 remote = self._remotes[request.matchdict['backend']]
355 payload = msgpack.unpackb(request.body, use_list=True)
355 payload = msgpack.unpackb(request.body, use_list=True)
356 method = payload.get('method')
356 method = payload.get('method')
357 params = payload['params']
357 params = payload['params']
358 wire = params.get('wire')
358 wire = params.get('wire')
359 args = params.get('args')
359 args = params.get('args')
360 kwargs = params.get('kwargs')
360 kwargs = params.get('kwargs')
361 context_uid = None
361 context_uid = None
362
362
363 if wire:
363 if wire:
364 try:
364 try:
365 wire['context'] = context_uid = uuid.UUID(wire['context'])
365 wire['context'] = context_uid = uuid.UUID(wire['context'])
366 except KeyError:
366 except KeyError:
367 pass
367 pass
368 args.insert(0, wire)
368 args.insert(0, wire)
369 repo_state_uid = wire.get('repo_state_uid') if wire else None
369 repo_state_uid = wire.get('repo_state_uid') if wire else None
370
370
371 # NOTE(marcink): trading complexity for slight performance
371 # NOTE(marcink): trading complexity for slight performance
372 if log.isEnabledFor(logging.DEBUG):
372 if log.isEnabledFor(logging.DEBUG):
373 no_args_methods = [
373 no_args_methods = [
374 'archive_repo'
374 'archive_repo'
375 ]
375 ]
376 if method in no_args_methods:
376 if method in no_args_methods:
377 call_args = ''
377 call_args = ''
378 else:
378 else:
379 call_args = args[1:]
379 call_args = args[1:]
380
380
381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 method, call_args, kwargs, context_uid, repo_state_uid)
382 method, call_args, kwargs, context_uid, repo_state_uid)
383
383
384 return payload, remote, method, args, kwargs
384 return payload, remote, method, args, kwargs
385
385
386 def vcs_view(self, request):
386 def vcs_view(self, request):
387
387
388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 payload_id = payload.get('id')
389 payload_id = payload.get('id')
390
390
391 try:
391 try:
392 resp = getattr(remote, method)(*args, **kwargs)
392 resp = getattr(remote, method)(*args, **kwargs)
393 except Exception as e:
393 except Exception as e:
394 exc_info = list(sys.exc_info())
394 exc_info = list(sys.exc_info())
395 exc_type, exc_value, exc_traceback = exc_info
395 exc_type, exc_value, exc_traceback = exc_info
396
396
397 org_exc = getattr(e, '_org_exc', None)
397 org_exc = getattr(e, '_org_exc', None)
398 org_exc_name = None
398 org_exc_name = None
399 org_exc_tb = ''
399 org_exc_tb = ''
400 if org_exc:
400 if org_exc:
401 org_exc_name = org_exc.__class__.__name__
401 org_exc_name = org_exc.__class__.__name__
402 org_exc_tb = getattr(e, '_org_exc_tb', '')
402 org_exc_tb = getattr(e, '_org_exc_tb', '')
403 # replace our "faked" exception with our org
403 # replace our "faked" exception with our org
404 exc_info[0] = org_exc.__class__
404 exc_info[0] = org_exc.__class__
405 exc_info[1] = org_exc
405 exc_info[1] = org_exc
406
406
407 should_store_exc = True
407 should_store_exc = True
408 if org_exc:
408 if org_exc:
409 def get_exc_fqn(_exc_obj):
409 def get_exc_fqn(_exc_obj):
410 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
410 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
411 return module_name + '.' + org_exc_name
411 return module_name + '.' + org_exc_name
412
412
413 exc_fqn = get_exc_fqn(org_exc)
413 exc_fqn = get_exc_fqn(org_exc)
414
414
415 if exc_fqn in ['mercurial.error.RepoLookupError',
415 if exc_fqn in ['mercurial.error.RepoLookupError',
416 'vcsserver.exceptions.RefNotFoundException']:
416 'vcsserver.exceptions.RefNotFoundException']:
417 should_store_exc = False
417 should_store_exc = False
418
418
419 if should_store_exc:
419 if should_store_exc:
420 store_exception(id(exc_info), exc_info)
420 store_exception(id(exc_info), exc_info)
421
421
422 tb_info = ''.join(
422 tb_info = ''.join(
423 traceback.format_exception(exc_type, exc_value, exc_traceback))
423 traceback.format_exception(exc_type, exc_value, exc_traceback))
424
424
425 type_ = e.__class__.__name__
425 type_ = e.__class__.__name__
426 if type_ not in self.ALLOWED_EXCEPTIONS:
426 if type_ not in self.ALLOWED_EXCEPTIONS:
427 type_ = None
427 type_ = None
428
428
429 resp = {
429 resp = {
430 'id': payload_id,
430 'id': payload_id,
431 'error': {
431 'error': {
432 'message': e.message,
432 'message': e.message,
433 'traceback': tb_info,
433 'traceback': tb_info,
434 'org_exc': org_exc_name,
434 'org_exc': org_exc_name,
435 'org_exc_tb': org_exc_tb,
435 'org_exc_tb': org_exc_tb,
436 'type': type_
436 'type': type_
437 }
437 }
438 }
438 }
439 try:
439 try:
440 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
440 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
441 except AttributeError:
441 except AttributeError:
442 pass
442 pass
443 else:
443 else:
444 resp = {
444 resp = {
445 'id': payload_id,
445 'id': payload_id,
446 'result': resp
446 'result': resp
447 }
447 }
448
448
449 return resp
449 return resp
450
450
451 def vcs_stream_view(self, request):
451 def vcs_stream_view(self, request):
452 payload, remote, method, args, kwargs = self._vcs_view_params(request)
452 payload, remote, method, args, kwargs = self._vcs_view_params(request)
453 # this method has a stream: marker we remove it here
453 # this method has a stream: marker we remove it here
454 method = method.split('stream:')[-1]
454 method = method.split('stream:')[-1]
455 chunk_size = safe_int(payload.get('chunk_size')) or 4096
455 chunk_size = safe_int(payload.get('chunk_size')) or 4096
456
456
457 try:
457 try:
458 resp = getattr(remote, method)(*args, **kwargs)
458 resp = getattr(remote, method)(*args, **kwargs)
459 except Exception as e:
459 except Exception as e:
460 raise
460 raise
461
461
462 def get_chunked_data(method_resp):
462 def get_chunked_data(method_resp):
463 stream = StringIO(method_resp)
463 stream = StringIO(method_resp)
464 while 1:
464 while 1:
465 chunk = stream.read(chunk_size)
465 chunk = stream.read(chunk_size)
466 if not chunk:
466 if not chunk:
467 break
467 break
468 yield chunk
468 yield chunk
469
469
470 response = Response(app_iter=get_chunked_data(resp))
470 response = Response(app_iter=get_chunked_data(resp))
471 response.content_type = 'application/octet-stream'
471 response.content_type = 'application/octet-stream'
472
472
473 return response
473 return response
474
474
475 def status_view(self, request):
475 def status_view(self, request):
476 import vcsserver
476 import vcsserver
477 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
477 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
478 'pid': os.getpid()}
478 'pid': os.getpid()}
479
479
480 def service_view(self, request):
480 def service_view(self, request):
481 import vcsserver
481 import vcsserver
482
482
483 payload = msgpack.unpackb(request.body, use_list=True)
483 payload = msgpack.unpackb(request.body, use_list=True)
484 server_config, app_config = {}, {}
484 server_config, app_config = {}, {}
485
485
486 try:
486 try:
487 path = self.global_config['__file__']
487 path = self.global_config['__file__']
488 config = configparser.RawConfigParser()
488 config = configparser.RawConfigParser()
489
489
490 config.read(path)
490 config.read(path)
491
491
492 if config.has_section('server:main'):
492 if config.has_section('server:main'):
493 server_config = dict(config.items('server:main'))
493 server_config = dict(config.items('server:main'))
494 if config.has_section('app:main'):
494 if config.has_section('app:main'):
495 app_config = dict(config.items('app:main'))
495 app_config = dict(config.items('app:main'))
496
496
497 except Exception:
497 except Exception:
498 log.exception('Failed to read .ini file for display')
498 log.exception('Failed to read .ini file for display')
499
499
500 environ = os.environ.items()
500 environ = os.environ.items()
501
501
502 resp = {
502 resp = {
503 'id': payload.get('id'),
503 'id': payload.get('id'),
504 'result': dict(
504 'result': dict(
505 version=vcsserver.__version__,
505 version=vcsserver.__version__,
506 config=server_config,
506 config=server_config,
507 app_config=app_config,
507 app_config=app_config,
508 environ=environ,
508 environ=environ,
509 payload=payload,
509 payload=payload,
510 )
510 )
511 }
511 }
512 return resp
512 return resp
513
513
514 def _msgpack_renderer_factory(self, info):
514 def _msgpack_renderer_factory(self, info):
515 def _render(value, system):
515 def _render(value, system):
516 request = system.get('request')
516 request = system.get('request')
517 if request is not None:
517 if request is not None:
518 response = request.response
518 response = request.response
519 ct = response.content_type
519 ct = response.content_type
520 if ct == response.default_content_type:
520 if ct == response.default_content_type:
521 response.content_type = 'application/x-msgpack'
521 response.content_type = 'application/x-msgpack'
522 return msgpack.packb(value)
522 return msgpack.packb(value)
523 return _render
523 return _render
524
524
525 def set_env_from_config(self, environ, config):
525 def set_env_from_config(self, environ, config):
526 dict_conf = {}
526 dict_conf = {}
527 try:
527 try:
528 for elem in config:
528 for elem in config:
529 if elem[0] == 'rhodecode':
529 if elem[0] == 'rhodecode':
530 dict_conf = json.loads(elem[2])
530 dict_conf = json.loads(elem[2])
531 break
531 break
532 except Exception:
532 except Exception:
533 log.exception('Failed to fetch SCM CONFIG')
533 log.exception('Failed to fetch SCM CONFIG')
534 return
534 return
535
535
536 username = dict_conf.get('username')
536 username = dict_conf.get('username')
537 if username:
537 if username:
538 environ['REMOTE_USER'] = username
538 environ['REMOTE_USER'] = username
539 # mercurial specific, some extension api rely on this
539 # mercurial specific, some extension api rely on this
540 environ['HGUSER'] = username
540 environ['HGUSER'] = username
541
541
542 ip = dict_conf.get('ip')
542 ip = dict_conf.get('ip')
543 if ip:
543 if ip:
544 environ['REMOTE_HOST'] = ip
544 environ['REMOTE_HOST'] = ip
545
545
546 if _is_request_chunked(environ):
546 if _is_request_chunked(environ):
547 # set the compatibility flag for webob
547 # set the compatibility flag for webob
548 environ['wsgi.input_terminated'] = True
548 environ['wsgi.input_terminated'] = True
549
549
550 def hg_proxy(self):
550 def hg_proxy(self):
551 @wsgiapp
551 @wsgiapp
552 def _hg_proxy(environ, start_response):
552 def _hg_proxy(environ, start_response):
553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
554 return app(environ, start_response)
554 return app(environ, start_response)
555 return _hg_proxy
555 return _hg_proxy
556
556
557 def git_proxy(self):
557 def git_proxy(self):
558 @wsgiapp
558 @wsgiapp
559 def _git_proxy(environ, start_response):
559 def _git_proxy(environ, start_response):
560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
561 return app(environ, start_response)
561 return app(environ, start_response)
562 return _git_proxy
562 return _git_proxy
563
563
564 def hg_stream(self):
564 def hg_stream(self):
565 if self._use_echo_app:
565 if self._use_echo_app:
566 @wsgiapp
566 @wsgiapp
567 def _hg_stream(environ, start_response):
567 def _hg_stream(environ, start_response):
568 app = EchoApp('fake_path', 'fake_name', None)
568 app = EchoApp('fake_path', 'fake_name', None)
569 return app(environ, start_response)
569 return app(environ, start_response)
570 return _hg_stream
570 return _hg_stream
571 else:
571 else:
572 @wsgiapp
572 @wsgiapp
573 def _hg_stream(environ, start_response):
573 def _hg_stream(environ, start_response):
574 log.debug('http-app: handling hg stream')
574 log.debug('http-app: handling hg stream')
575 repo_path = environ['HTTP_X_RC_REPO_PATH']
575 repo_path = environ['HTTP_X_RC_REPO_PATH']
576 repo_name = environ['HTTP_X_RC_REPO_NAME']
576 repo_name = environ['HTTP_X_RC_REPO_NAME']
577 packed_config = base64.b64decode(
577 packed_config = base64.b64decode(
578 environ['HTTP_X_RC_REPO_CONFIG'])
578 environ['HTTP_X_RC_REPO_CONFIG'])
579 config = msgpack.unpackb(packed_config)
579 config = msgpack.unpackb(packed_config)
580 app = scm_app.create_hg_wsgi_app(
580 app = scm_app.create_hg_wsgi_app(
581 repo_path, repo_name, config)
581 repo_path, repo_name, config)
582
582
583 # Consistent path information for hgweb
583 # Consistent path information for hgweb
584 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
584 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
585 environ['REPO_NAME'] = repo_name
585 environ['REPO_NAME'] = repo_name
586 self.set_env_from_config(environ, config)
586 self.set_env_from_config(environ, config)
587
587
588 log.debug('http-app: starting app handler '
588 log.debug('http-app: starting app handler '
589 'with %s and process request', app)
589 'with %s and process request', app)
590 return app(environ, ResponseFilter(start_response))
590 return app(environ, ResponseFilter(start_response))
591 return _hg_stream
591 return _hg_stream
592
592
593 def git_stream(self):
593 def git_stream(self):
594 if self._use_echo_app:
594 if self._use_echo_app:
595 @wsgiapp
595 @wsgiapp
596 def _git_stream(environ, start_response):
596 def _git_stream(environ, start_response):
597 app = EchoApp('fake_path', 'fake_name', None)
597 app = EchoApp('fake_path', 'fake_name', None)
598 return app(environ, start_response)
598 return app(environ, start_response)
599 return _git_stream
599 return _git_stream
600 else:
600 else:
601 @wsgiapp
601 @wsgiapp
602 def _git_stream(environ, start_response):
602 def _git_stream(environ, start_response):
603 log.debug('http-app: handling git stream')
603 log.debug('http-app: handling git stream')
604 repo_path = environ['HTTP_X_RC_REPO_PATH']
604 repo_path = environ['HTTP_X_RC_REPO_PATH']
605 repo_name = environ['HTTP_X_RC_REPO_NAME']
605 repo_name = environ['HTTP_X_RC_REPO_NAME']
606 packed_config = base64.b64decode(
606 packed_config = base64.b64decode(
607 environ['HTTP_X_RC_REPO_CONFIG'])
607 environ['HTTP_X_RC_REPO_CONFIG'])
608 config = msgpack.unpackb(packed_config)
608 config = msgpack.unpackb(packed_config)
609
609
610 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
610 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
611 self.set_env_from_config(environ, config)
611 self.set_env_from_config(environ, config)
612
612
613 content_type = environ.get('CONTENT_TYPE', '')
613 content_type = environ.get('CONTENT_TYPE', '')
614
614
615 path = environ['PATH_INFO']
615 path = environ['PATH_INFO']
616 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
616 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
617 log.debug(
617 log.debug(
618 'LFS: Detecting if request `%s` is LFS server path based '
618 'LFS: Detecting if request `%s` is LFS server path based '
619 'on content type:`%s`, is_lfs:%s',
619 'on content type:`%s`, is_lfs:%s',
620 path, content_type, is_lfs_request)
620 path, content_type, is_lfs_request)
621
621
622 if not is_lfs_request:
622 if not is_lfs_request:
623 # fallback detection by path
623 # fallback detection by path
624 if GIT_LFS_PROTO_PAT.match(path):
624 if GIT_LFS_PROTO_PAT.match(path):
625 is_lfs_request = True
625 is_lfs_request = True
626 log.debug(
626 log.debug(
627 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
627 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
628 path, is_lfs_request)
628 path, is_lfs_request)
629
629
630 if is_lfs_request:
630 if is_lfs_request:
631 app = scm_app.create_git_lfs_wsgi_app(
631 app = scm_app.create_git_lfs_wsgi_app(
632 repo_path, repo_name, config)
632 repo_path, repo_name, config)
633 else:
633 else:
634 app = scm_app.create_git_wsgi_app(
634 app = scm_app.create_git_wsgi_app(
635 repo_path, repo_name, config)
635 repo_path, repo_name, config)
636
636
637 log.debug('http-app: starting app handler '
637 log.debug('http-app: starting app handler '
638 'with %s and process request', app)
638 'with %s and process request', app)
639
639
640 return app(environ, start_response)
640 return app(environ, start_response)
641
641
642 return _git_stream
642 return _git_stream
643
643
644 def handle_vcs_exception(self, exception, request):
644 def handle_vcs_exception(self, exception, request):
645 _vcs_kind = getattr(exception, '_vcs_kind', '')
645 _vcs_kind = getattr(exception, '_vcs_kind', '')
646 if _vcs_kind == 'repo_locked':
646 if _vcs_kind == 'repo_locked':
647 # Get custom repo-locked status code if present.
647 # Get custom repo-locked status code if present.
648 status_code = request.headers.get('X-RC-Locked-Status-Code')
648 status_code = request.headers.get('X-RC-Locked-Status-Code')
649 return HTTPRepoLocked(
649 return HTTPRepoLocked(
650 title=exception.message, status_code=status_code)
650 title=exception.message, status_code=status_code)
651
651
652 elif _vcs_kind == 'repo_branch_protected':
652 elif _vcs_kind == 'repo_branch_protected':
653 # Get custom repo-branch-protected status code if present.
653 # Get custom repo-branch-protected status code if present.
654 return HTTPRepoBranchProtected(title=exception.message)
654 return HTTPRepoBranchProtected(title=exception.message)
655
655
656 exc_info = request.exc_info
656 exc_info = request.exc_info
657 store_exception(id(exc_info), exc_info)
657 store_exception(id(exc_info), exc_info)
658
658
659 traceback_info = 'unavailable'
659 traceback_info = 'unavailable'
660 if request.exc_info:
660 if request.exc_info:
661 exc_type, exc_value, exc_tb = request.exc_info
661 exc_type, exc_value, exc_tb = request.exc_info
662 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
662 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
663
663
664 log.error(
664 log.error(
665 'error occurred handling this request for path: %s, \n tb: %s',
665 'error occurred handling this request for path: %s, \n tb: %s',
666 request.path, traceback_info)
666 request.path, traceback_info)
667 raise exception
667 raise exception
668
668
669
669
670 class ResponseFilter(object):
670 class ResponseFilter(object):
671
671
672 def __init__(self, start_response):
672 def __init__(self, start_response):
673 self._start_response = start_response
673 self._start_response = start_response
674
674
675 def __call__(self, status, response_headers, exc_info=None):
675 def __call__(self, status, response_headers, exc_info=None):
676 headers = tuple(
676 headers = tuple(
677 (h, v) for h, v in response_headers
677 (h, v) for h, v in response_headers
678 if not wsgiref.util.is_hop_by_hop(h))
678 if not wsgiref.util.is_hop_by_hop(h))
679 return self._start_response(status, headers, exc_info)
679 return self._start_response(status, headers, exc_info)
680
680
681
681
682 def main(global_config, **settings):
682 def main(global_config, **settings):
683 if MercurialFactory:
683 if MercurialFactory:
684 hgpatches.patch_largefiles_capabilities()
684 hgpatches.patch_largefiles_capabilities()
685 hgpatches.patch_subrepo_type_mapping()
685 hgpatches.patch_subrepo_type_mapping()
686
686
687 app = HTTPApplication(settings=settings, global_config=global_config)
687 app = HTTPApplication(settings=settings, global_config=global_config)
688 return app.wsgi_app()
688 return app.wsgi_app()
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,169 +1,169 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import os
21 import os
22 import time
22 import time
23 import datetime
23 import datetime
24 import msgpack
24 import msgpack
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import tempfile
27 import tempfile
28
28
29 from pyramid import compat
29 from pyramid import compat
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
34 global_prefix = 'vcsserver'
34 global_prefix = 'vcsserver'
35 exc_store_dir_name = 'rc_exception_store_v1'
35 exc_store_dir_name = 'rc_exception_store_v1'
36
36
37
37
38 def exc_serialize(exc_id, tb, exc_type):
38 def exc_serialize(exc_id, tb, exc_type):
39
39
40 data = {
40 data = {
41 'version': 'v1',
41 'version': 'v1',
42 'exc_id': exc_id,
42 'exc_id': exc_id,
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 'exc_timestamp': repr(time.time()),
44 'exc_timestamp': repr(time.time()),
45 'exc_message': tb,
45 'exc_message': tb,
46 'exc_type': exc_type,
46 'exc_type': exc_type,
47 }
47 }
48 return msgpack.packb(data), data
48 return msgpack.packb(data), data
49
49
50
50
51 def exc_unserialize(tb):
51 def exc_unserialize(tb):
52 return msgpack.unpackb(tb)
52 return msgpack.unpackb(tb)
53
53
54
54
55 def get_exc_store():
55 def get_exc_store():
56 """
56 """
57 Get and create exception store if it's not existing
57 Get and create exception store if it's not existing
58 """
58 """
59 import vcsserver as app
59 import vcsserver as app
60
60
61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
63
63
64 _exc_store_path = os.path.abspath(_exc_store_path)
64 _exc_store_path = os.path.abspath(_exc_store_path)
65 if not os.path.isdir(_exc_store_path):
65 if not os.path.isdir(_exc_store_path):
66 os.makedirs(_exc_store_path)
66 os.makedirs(_exc_store_path)
67 log.debug('Initializing exceptions store at %s', _exc_store_path)
67 log.debug('Initializing exceptions store at %s', _exc_store_path)
68 return _exc_store_path
68 return _exc_store_path
69
69
70
70
71 def _store_exception(exc_id, exc_info, prefix):
71 def _store_exception(exc_id, exc_info, prefix):
72 exc_type, exc_value, exc_traceback = exc_info
72 exc_type, exc_value, exc_traceback = exc_info
73
73
74 tb = ''.join(traceback.format_exception(
74 tb = ''.join(traceback.format_exception(
75 exc_type, exc_value, exc_traceback, None))
75 exc_type, exc_value, exc_traceback, None))
76
76
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78
78
79 if detailed_tb:
79 if detailed_tb:
80 if isinstance(detailed_tb, compat.string_types):
80 if isinstance(detailed_tb, compat.string_types):
81 remote_tb = [detailed_tb]
81 remote_tb = [detailed_tb]
82
82
83 tb += (
83 tb += (
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 '{}\n'
85 '{}\n'
86 '+++ END SOURCE EXCEPTION +++\n'
86 '+++ END SOURCE EXCEPTION +++\n'
87 ''.format('\n'.join(remote_tb))
87 ''.format('\n'.join(remote_tb))
88 )
88 )
89
89
90 # Avoid that remote_tb also appears in the frame
90 # Avoid that remote_tb also appears in the frame
91 del remote_tb
91 del remote_tb
92
92
93 exc_type_name = exc_type.__name__
93 exc_type_name = exc_type.__name__
94 exc_store_path = get_exc_store()
94 exc_store_path = get_exc_store()
95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
97 if not os.path.isdir(exc_store_path):
97 if not os.path.isdir(exc_store_path):
98 os.makedirs(exc_store_path)
98 os.makedirs(exc_store_path)
99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
100 with open(stored_exc_path, 'wb') as f:
100 with open(stored_exc_path, 'wb') as f:
101 f.write(exc_data)
101 f.write(exc_data)
102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
103
103
104
104
105 def store_exception(exc_id, exc_info, prefix=global_prefix):
105 def store_exception(exc_id, exc_info, prefix=global_prefix):
106 """
106 """
107 Example usage::
107 Example usage::
108
108
109 exc_info = sys.exc_info()
109 exc_info = sys.exc_info()
110 store_exception(id(exc_info), exc_info)
110 store_exception(id(exc_info), exc_info)
111 """
111 """
112
112
113 try:
113 try:
114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
115 except Exception:
115 except Exception:
116 log.exception('Failed to store exception `%s` information', exc_id)
116 log.exception('Failed to store exception `%s` information', exc_id)
117 # there's no way this can fail, it will crash server badly if it does.
117 # there's no way this can fail, it will crash server badly if it does.
118 pass
118 pass
119
119
120
120
121 def _find_exc_file(exc_id, prefix=global_prefix):
121 def _find_exc_file(exc_id, prefix=global_prefix):
122 exc_store_path = get_exc_store()
122 exc_store_path = get_exc_store()
123 if prefix:
123 if prefix:
124 exc_id = '{}_{}'.format(exc_id, prefix)
124 exc_id = '{}_{}'.format(exc_id, prefix)
125 else:
125 else:
126 # search without a prefix
126 # search without a prefix
127 exc_id = '{}'.format(exc_id)
127 exc_id = '{}'.format(exc_id)
128
128
129 # we need to search the store for such start pattern as above
129 # we need to search the store for such start pattern as above
130 for fname in os.listdir(exc_store_path):
130 for fname in os.listdir(exc_store_path):
131 if fname.startswith(exc_id):
131 if fname.startswith(exc_id):
132 exc_id = os.path.join(exc_store_path, fname)
132 exc_id = os.path.join(exc_store_path, fname)
133 break
133 break
134 continue
134 continue
135 else:
135 else:
136 exc_id = None
136 exc_id = None
137
137
138 return exc_id
138 return exc_id
139
139
140
140
141 def _read_exception(exc_id, prefix):
141 def _read_exception(exc_id, prefix):
142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
143 if exc_id_file_path:
143 if exc_id_file_path:
144 with open(exc_id_file_path, 'rb') as f:
144 with open(exc_id_file_path, 'rb') as f:
145 return exc_unserialize(f.read())
145 return exc_unserialize(f.read())
146 else:
146 else:
147 log.debug('Exception File `%s` not found', exc_id_file_path)
147 log.debug('Exception File `%s` not found', exc_id_file_path)
148 return None
148 return None
149
149
150
150
151 def read_exception(exc_id, prefix=global_prefix):
151 def read_exception(exc_id, prefix=global_prefix):
152 try:
152 try:
153 return _read_exception(exc_id=exc_id, prefix=prefix)
153 return _read_exception(exc_id=exc_id, prefix=prefix)
154 except Exception:
154 except Exception:
155 log.exception('Failed to read exception `%s` information', exc_id)
155 log.exception('Failed to read exception `%s` information', exc_id)
156 # there's no way this can fail, it will crash server badly if it does.
156 # there's no way this can fail, it will crash server badly if it does.
157 return None
157 return None
158
158
159
159
160 def delete_exception(exc_id, prefix=global_prefix):
160 def delete_exception(exc_id, prefix=global_prefix):
161 try:
161 try:
162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
163 if exc_id_file_path:
163 if exc_id_file_path:
164 os.remove(exc_id_file_path)
164 os.remove(exc_id_file_path)
165
165
166 except Exception:
166 except Exception:
167 log.exception('Failed to remove exception `%s` information', exc_id)
167 log.exception('Failed to remove exception `%s` information', exc_id)
168 # there's no way this can fail, it will crash server badly if it does.
168 # there's no way this can fail, it will crash server badly if it does.
169 pass
169 pass
@@ -1,65 +1,65 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import logging
21 import logging
22
22
23 from repoze.lru import LRUCache
23 from repoze.lru import LRUCache
24
24
25 from vcsserver.utils import safe_str
25 from vcsserver.utils import safe_str
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class LRUDict(LRUCache):
30 class LRUDict(LRUCache):
31 """
31 """
32 Wrapper to provide partial dict access
32 Wrapper to provide partial dict access
33 """
33 """
34
34
35 def __setitem__(self, key, value):
35 def __setitem__(self, key, value):
36 return self.put(key, value)
36 return self.put(key, value)
37
37
38 def __getitem__(self, key):
38 def __getitem__(self, key):
39 return self.get(key)
39 return self.get(key)
40
40
41 def __contains__(self, key):
41 def __contains__(self, key):
42 return bool(self.get(key))
42 return bool(self.get(key))
43
43
44 def __delitem__(self, key):
44 def __delitem__(self, key):
45 del self.data[key]
45 del self.data[key]
46
46
47 def keys(self):
47 def keys(self):
48 return self.data.keys()
48 return self.data.keys()
49
49
50
50
51 class LRUDictDebug(LRUDict):
51 class LRUDictDebug(LRUDict):
52 """
52 """
53 Wrapper to provide some debug options
53 Wrapper to provide some debug options
54 """
54 """
55 def _report_keys(self):
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 # trick for pformat print it more nicely
57 # trick for pformat print it more nicely
58 fmt = '\n'
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62
62
63 def __getitem__(self, key):
63 def __getitem__(self, key):
64 self._report_keys()
64 self._report_keys()
65 return self.get(key)
65 return self.get(key)
@@ -1,72 +1,72 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
27 "FileNamespaceBackend")
28
28
29 register_backend(
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
31 "RedisPickleBackend")
32
32
33 register_backend(
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
35 "RedisMsgPackBackend")
36
36
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40 from . import region_meta
40 from . import region_meta
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
42
42
43
43
44 def configure_dogpile_cache(settings):
44 def configure_dogpile_cache(settings):
45 cache_dir = settings.get('cache_dir')
45 cache_dir = settings.get('cache_dir')
46 if cache_dir:
46 if cache_dir:
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48
48
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50
50
51 # inspect available namespaces
51 # inspect available namespaces
52 avail_regions = set()
52 avail_regions = set()
53 for key in rc_cache_data.keys():
53 for key in rc_cache_data.keys():
54 namespace_name = key.split('.', 1)[0]
54 namespace_name = key.split('.', 1)[0]
55 avail_regions.add(namespace_name)
55 avail_regions.add(namespace_name)
56 log.debug('dogpile: found following cache regions: %s', avail_regions)
56 log.debug('dogpile: found following cache regions: %s', avail_regions)
57
57
58 # register them into namespace
58 # register them into namespace
59 for region_name in avail_regions:
59 for region_name in avail_regions:
60 new_region = make_region(
60 new_region = make_region(
61 name=region_name,
61 name=region_name,
62 function_key_generator=None
62 function_key_generator=None
63 )
63 )
64
64
65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
68 region_meta.dogpile_cache_regions[region_name] = new_region
68 region_meta.dogpile_cache_regions[region_name] = new_region
69
69
70
70
71 def includeme(config):
71 def includeme(config):
72 configure_dogpile_cache(config.registry.settings)
72 configure_dogpile_cache(config.registry.settings)
@@ -1,253 +1,253 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33
33
34
34
35 _default_max_size = 1024
35 _default_max_size = 1024
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
41 key_prefix = 'lru_mem_backend'
42 pickle_values = False
42 pickle_values = False
43
43
44 def __init__(self, arguments):
44 def __init__(self, arguments):
45 max_size = arguments.pop('max_size', _default_max_size)
45 max_size = arguments.pop('max_size', _default_max_size)
46
46
47 LRUDictClass = LRUDict
47 LRUDictClass = LRUDict
48 if arguments.pop('log_key_count', None):
48 if arguments.pop('log_key_count', None):
49 LRUDictClass = LRUDictDebug
49 LRUDictClass = LRUDictDebug
50
50
51 arguments['cache_dict'] = LRUDictClass(max_size)
51 arguments['cache_dict'] = LRUDictClass(max_size)
52 super(LRUMemoryBackend, self).__init__(arguments)
52 super(LRUMemoryBackend, self).__init__(arguments)
53
53
54 def delete(self, key):
54 def delete(self, key):
55 try:
55 try:
56 del self._cache[key]
56 del self._cache[key]
57 except KeyError:
57 except KeyError:
58 # we don't care if key isn't there at deletion
58 # we don't care if key isn't there at deletion
59 pass
59 pass
60
60
61 def delete_multi(self, keys):
61 def delete_multi(self, keys):
62 for key in keys:
62 for key in keys:
63 self.delete(key)
63 self.delete(key)
64
64
65
65
66 class PickleSerializer(object):
66 class PickleSerializer(object):
67
67
68 def _dumps(self, value, safe=False):
68 def _dumps(self, value, safe=False):
69 try:
69 try:
70 return compat.pickle.dumps(value)
70 return compat.pickle.dumps(value)
71 except Exception:
71 except Exception:
72 if safe:
72 if safe:
73 return NO_VALUE
73 return NO_VALUE
74 else:
74 else:
75 raise
75 raise
76
76
77 def _loads(self, value, safe=True):
77 def _loads(self, value, safe=True):
78 try:
78 try:
79 return compat.pickle.loads(value)
79 return compat.pickle.loads(value)
80 except Exception:
80 except Exception:
81 if safe:
81 if safe:
82 return NO_VALUE
82 return NO_VALUE
83 else:
83 else:
84 raise
84 raise
85
85
86
86
87 class MsgPackSerializer(object):
87 class MsgPackSerializer(object):
88
88
89 def _dumps(self, value, safe=False):
89 def _dumps(self, value, safe=False):
90 try:
90 try:
91 return msgpack.packb(value)
91 return msgpack.packb(value)
92 except Exception:
92 except Exception:
93 if safe:
93 if safe:
94 return NO_VALUE
94 return NO_VALUE
95 else:
95 else:
96 raise
96 raise
97
97
98 def _loads(self, value, safe=True):
98 def _loads(self, value, safe=True):
99 """
99 """
100 pickle maintained the `CachedValue` wrapper of the tuple
100 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
101 msgpack does not, so it must be added back in.
102 """
102 """
103 try:
103 try:
104 value = msgpack.unpackb(value, use_list=False)
104 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
105 return CachedValue(*value)
106 except Exception:
106 except Exception:
107 if safe:
107 if safe:
108 return NO_VALUE
108 return NO_VALUE
109 else:
109 else:
110 raise
110 raise
111
111
112
112
113 import fcntl
113 import fcntl
114 flock_org = fcntl.flock
114 flock_org = fcntl.flock
115
115
116
116
117 class CustomLockFactory(FileLock):
117 class CustomLockFactory(FileLock):
118
118
119 pass
119 pass
120
120
121
121
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
123 key_prefix = 'file_backend'
124
124
125 def __init__(self, arguments):
125 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
126 arguments['lock_factory'] = CustomLockFactory
127 super(FileNamespaceBackend, self).__init__(arguments)
127 super(FileNamespaceBackend, self).__init__(arguments)
128
128
129 def __repr__(self):
129 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
130 return '{} `{}`'.format(self.__class__, self.filename)
131
131
132 def list_keys(self, prefix=''):
132 def list_keys(self, prefix=''):
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
134
134
135 def cond(v):
135 def cond(v):
136 if not prefix:
136 if not prefix:
137 return True
137 return True
138
138
139 if v.startswith(prefix):
139 if v.startswith(prefix):
140 return True
140 return True
141 return False
141 return False
142
142
143 with self._dbm_file(True) as dbm:
143 with self._dbm_file(True) as dbm:
144
144
145 return filter(cond, dbm.keys())
145 return filter(cond, dbm.keys())
146
146
147 def get_store(self):
147 def get_store(self):
148 return self.filename
148 return self.filename
149
149
150 def get(self, key):
150 def get(self, key):
151 with self._dbm_file(False) as dbm:
151 with self._dbm_file(False) as dbm:
152 if hasattr(dbm, 'get'):
152 if hasattr(dbm, 'get'):
153 value = dbm.get(key, NO_VALUE)
153 value = dbm.get(key, NO_VALUE)
154 else:
154 else:
155 # gdbm objects lack a .get method
155 # gdbm objects lack a .get method
156 try:
156 try:
157 value = dbm[key]
157 value = dbm[key]
158 except KeyError:
158 except KeyError:
159 value = NO_VALUE
159 value = NO_VALUE
160 if value is not NO_VALUE:
160 if value is not NO_VALUE:
161 value = self._loads(value)
161 value = self._loads(value)
162 return value
162 return value
163
163
164 def set(self, key, value):
164 def set(self, key, value):
165 with self._dbm_file(True) as dbm:
165 with self._dbm_file(True) as dbm:
166 dbm[key] = self._dumps(value)
166 dbm[key] = self._dumps(value)
167
167
168 def set_multi(self, mapping):
168 def set_multi(self, mapping):
169 with self._dbm_file(True) as dbm:
169 with self._dbm_file(True) as dbm:
170 for key, value in mapping.items():
170 for key, value in mapping.items():
171 dbm[key] = self._dumps(value)
171 dbm[key] = self._dumps(value)
172
172
173
173
174 class BaseRedisBackend(redis_backend.RedisBackend):
174 class BaseRedisBackend(redis_backend.RedisBackend):
175
175
176 def _create_client(self):
176 def _create_client(self):
177 args = {}
177 args = {}
178
178
179 if self.url is not None:
179 if self.url is not None:
180 args.update(url=self.url)
180 args.update(url=self.url)
181
181
182 else:
182 else:
183 args.update(
183 args.update(
184 host=self.host, password=self.password,
184 host=self.host, password=self.password,
185 port=self.port, db=self.db
185 port=self.port, db=self.db
186 )
186 )
187
187
188 connection_pool = redis.ConnectionPool(**args)
188 connection_pool = redis.ConnectionPool(**args)
189
189
190 return redis.StrictRedis(connection_pool=connection_pool)
190 return redis.StrictRedis(connection_pool=connection_pool)
191
191
192 def list_keys(self, prefix=''):
192 def list_keys(self, prefix=''):
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 return self.client.keys(prefix)
194 return self.client.keys(prefix)
195
195
196 def get_store(self):
196 def get_store(self):
197 return self.client.connection_pool
197 return self.client.connection_pool
198
198
199 def get(self, key):
199 def get(self, key):
200 value = self.client.get(key)
200 value = self.client.get(key)
201 if value is None:
201 if value is None:
202 return NO_VALUE
202 return NO_VALUE
203 return self._loads(value)
203 return self._loads(value)
204
204
205 def get_multi(self, keys):
205 def get_multi(self, keys):
206 if not keys:
206 if not keys:
207 return []
207 return []
208 values = self.client.mget(keys)
208 values = self.client.mget(keys)
209 loads = self._loads
209 loads = self._loads
210 return [
210 return [
211 loads(v) if v is not None else NO_VALUE
211 loads(v) if v is not None else NO_VALUE
212 for v in values]
212 for v in values]
213
213
214 def set(self, key, value):
214 def set(self, key, value):
215 if self.redis_expiration_time:
215 if self.redis_expiration_time:
216 self.client.setex(key, self.redis_expiration_time,
216 self.client.setex(key, self.redis_expiration_time,
217 self._dumps(value))
217 self._dumps(value))
218 else:
218 else:
219 self.client.set(key, self._dumps(value))
219 self.client.set(key, self._dumps(value))
220
220
221 def set_multi(self, mapping):
221 def set_multi(self, mapping):
222 dumps = self._dumps
222 dumps = self._dumps
223 mapping = dict(
223 mapping = dict(
224 (k, dumps(v))
224 (k, dumps(v))
225 for k, v in mapping.items()
225 for k, v in mapping.items()
226 )
226 )
227
227
228 if not self.redis_expiration_time:
228 if not self.redis_expiration_time:
229 self.client.mset(mapping)
229 self.client.mset(mapping)
230 else:
230 else:
231 pipe = self.client.pipeline()
231 pipe = self.client.pipeline()
232 for key, value in mapping.items():
232 for key, value in mapping.items():
233 pipe.setex(key, self.redis_expiration_time, value)
233 pipe.setex(key, self.redis_expiration_time, value)
234 pipe.execute()
234 pipe.execute()
235
235
236 def get_mutex(self, key):
236 def get_mutex(self, key):
237 u = redis_backend.u
237 u = redis_backend.u
238 if self.distributed_lock:
238 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
239 lock_key = u('_lock_{0}').format(key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
242 else:
242 else:
243 return None
243 return None
244
244
245
245
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 key_prefix = 'redis_pickle_backend'
247 key_prefix = 'redis_pickle_backend'
248 pass
248 pass
249
249
250
250
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 key_prefix = 'redis_msgpack_backend'
252 key_prefix = 'redis_msgpack_backend'
253 pass
253 pass
@@ -1,26 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20
20
21 dogpile_config_defaults = {
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
23 }
24
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
26 dogpile_cache_regions = {}
@@ -1,153 +1,153 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import functools
20 import functools
21 from decorator import decorate
21 from decorator import decorate
22
22
23 from dogpile.cache import CacheRegion
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
24 from dogpile.cache.util import compat
25
25
26 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_str, sha1
27
27
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 class RhodeCodeCacheRegion(CacheRegion):
32 class RhodeCodeCacheRegion(CacheRegion):
33
33
34 def conditional_cache_on_arguments(
34 def conditional_cache_on_arguments(
35 self, namespace=None,
35 self, namespace=None,
36 expiration_time=None,
36 expiration_time=None,
37 should_cache_fn=None,
37 should_cache_fn=None,
38 to_str=compat.string_type,
38 to_str=compat.string_type,
39 function_key_generator=None,
39 function_key_generator=None,
40 condition=True):
40 condition=True):
41 """
41 """
42 Custom conditional decorator, that will not touch any dogpile internals if
42 Custom conditional decorator, that will not touch any dogpile internals if
43 condition isn't meet. This works a bit different than should_cache_fn
43 condition isn't meet. This works a bit different than should_cache_fn
44 And it's faster in cases we don't ever want to compute cached values
44 And it's faster in cases we don't ever want to compute cached values
45 """
45 """
46 expiration_time_is_callable = compat.callable(expiration_time)
46 expiration_time_is_callable = compat.callable(expiration_time)
47
47
48 if function_key_generator is None:
48 if function_key_generator is None:
49 function_key_generator = self.function_key_generator
49 function_key_generator = self.function_key_generator
50
50
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52
52
53 if not condition:
53 if not condition:
54 log.debug('Calling un-cached func:%s', user_func.func_name)
54 log.debug('Calling un-cached func:%s', user_func.func_name)
55 return user_func(*arg, **kw)
55 return user_func(*arg, **kw)
56
56
57 key = key_generator(*arg, **kw)
57 key = key_generator(*arg, **kw)
58
58
59 timeout = expiration_time() if expiration_time_is_callable \
59 timeout = expiration_time() if expiration_time_is_callable \
60 else expiration_time
60 else expiration_time
61
61
62 log.debug('Calling cached fn:%s', user_func.func_name)
62 log.debug('Calling cached fn:%s', user_func.func_name)
63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64
64
65 def cache_decorator(user_func):
65 def cache_decorator(user_func):
66 if to_str is compat.string_type:
66 if to_str is compat.string_type:
67 # backwards compatible
67 # backwards compatible
68 key_generator = function_key_generator(namespace, user_func)
68 key_generator = function_key_generator(namespace, user_func)
69 else:
69 else:
70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
71
71
72 def refresh(*arg, **kw):
72 def refresh(*arg, **kw):
73 """
73 """
74 Like invalidate, but regenerates the value instead
74 Like invalidate, but regenerates the value instead
75 """
75 """
76 key = key_generator(*arg, **kw)
76 key = key_generator(*arg, **kw)
77 value = user_func(*arg, **kw)
77 value = user_func(*arg, **kw)
78 self.set(key, value)
78 self.set(key, value)
79 return value
79 return value
80
80
81 def invalidate(*arg, **kw):
81 def invalidate(*arg, **kw):
82 key = key_generator(*arg, **kw)
82 key = key_generator(*arg, **kw)
83 self.delete(key)
83 self.delete(key)
84
84
85 def set_(value, *arg, **kw):
85 def set_(value, *arg, **kw):
86 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
87 self.set(key, value)
87 self.set(key, value)
88
88
89 def get(*arg, **kw):
89 def get(*arg, **kw):
90 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
91 return self.get(key)
91 return self.get(key)
92
92
93 user_func.set = set_
93 user_func.set = set_
94 user_func.invalidate = invalidate
94 user_func.invalidate = invalidate
95 user_func.get = get
95 user_func.get = get
96 user_func.refresh = refresh
96 user_func.refresh = refresh
97 user_func.key_generator = key_generator
97 user_func.key_generator = key_generator
98 user_func.original = user_func
98 user_func.original = user_func
99
99
100 # Use `decorate` to preserve the signature of :param:`user_func`.
100 # Use `decorate` to preserve the signature of :param:`user_func`.
101
101
102 return decorate(user_func, functools.partial(
102 return decorate(user_func, functools.partial(
103 get_or_create_for_user_func, key_generator))
103 get_or_create_for_user_func, key_generator))
104
104
105 return cache_decorator
105 return cache_decorator
106
106
107
107
108 def make_region(*arg, **kw):
108 def make_region(*arg, **kw):
109 return RhodeCodeCacheRegion(*arg, **kw)
109 return RhodeCodeCacheRegion(*arg, **kw)
110
110
111
111
112 def get_default_cache_settings(settings, prefixes=None):
112 def get_default_cache_settings(settings, prefixes=None):
113 prefixes = prefixes or []
113 prefixes = prefixes or []
114 cache_settings = {}
114 cache_settings = {}
115 for key in settings.keys():
115 for key in settings.keys():
116 for prefix in prefixes:
116 for prefix in prefixes:
117 if key.startswith(prefix):
117 if key.startswith(prefix):
118 name = key.split(prefix)[1].strip()
118 name = key.split(prefix)[1].strip()
119 val = settings[key]
119 val = settings[key]
120 if isinstance(val, compat.string_types):
120 if isinstance(val, compat.string_types):
121 val = val.strip()
121 val = val.strip()
122 cache_settings[name] = val
122 cache_settings[name] = val
123 return cache_settings
123 return cache_settings
124
124
125
125
126 def compute_key_from_params(*args):
126 def compute_key_from_params(*args):
127 """
127 """
128 Helper to compute key from given params to be used in cache manager
128 Helper to compute key from given params to be used in cache manager
129 """
129 """
130 return sha1("_".join(map(safe_str, args)))
130 return sha1("_".join(map(safe_str, args)))
131
131
132
132
133 def backend_key_generator(backend):
133 def backend_key_generator(backend):
134 """
134 """
135 Special wrapper that also sends over the backend to the key generator
135 Special wrapper that also sends over the backend to the key generator
136 """
136 """
137 def wrapper(namespace, fn):
137 def wrapper(namespace, fn):
138 return key_generator(backend, namespace, fn)
138 return key_generator(backend, namespace, fn)
139 return wrapper
139 return wrapper
140
140
141
141
142 def key_generator(backend, namespace, fn):
142 def key_generator(backend, namespace, fn):
143 fname = fn.__name__
143 fname = fn.__name__
144
144
145 def generate_key(*args):
145 def generate_key(*args):
146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 namespace_pref = namespace or 'default_namespace'
147 namespace_pref = namespace or 'default_namespace'
148 arg_key = compute_key_from_params(*args)
148 arg_key = compute_key_from_params(*args)
149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
150
150
151 return final_key
151 return final_key
152
152
153 return generate_key
153 return generate_key
@@ -1,27 +1,27 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 counter = 0
21 counter = 0
22
22
23
23
24 def get_request_counter(request):
24 def get_request_counter(request):
25 global counter
25 global counter
26 counter += 1
26 counter += 1
27 return counter
27 return counter
@@ -1,386 +1,386 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
24 import simplejson as json
25 import dulwich.protocol
25 import dulwich.protocol
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver import hooks, subprocessio
28 from vcsserver import hooks, subprocessio
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class FileWrapper(object):
34 class FileWrapper(object):
35 """File wrapper that ensures how much data is read from it."""
35 """File wrapper that ensures how much data is read from it."""
36
36
37 def __init__(self, fd, content_length):
37 def __init__(self, fd, content_length):
38 self.fd = fd
38 self.fd = fd
39 self.content_length = content_length
39 self.content_length = content_length
40 self.remain = content_length
40 self.remain = content_length
41
41
42 def read(self, size):
42 def read(self, size):
43 if size <= self.remain:
43 if size <= self.remain:
44 try:
44 try:
45 data = self.fd.read(size)
45 data = self.fd.read(size)
46 except socket.error:
46 except socket.error:
47 raise IOError(self)
47 raise IOError(self)
48 self.remain -= size
48 self.remain -= size
49 elif self.remain:
49 elif self.remain:
50 data = self.fd.read(self.remain)
50 data = self.fd.read(self.remain)
51 self.remain = 0
51 self.remain = 0
52 else:
52 else:
53 data = None
53 data = None
54 return data
54 return data
55
55
56 def __repr__(self):
56 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
57 return '<FileWrapper %s len: %s, read: %s>' % (
58 self.fd, self.content_length, self.content_length - self.remain
58 self.fd, self.content_length, self.content_length - self.remain
59 )
59 )
60
60
61
61
62 class GitRepository(object):
62 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
63 """WSGI app for handling Git smart protocol endpoints."""
64
64
65 git_folder_signature = frozenset(
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
79 extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82
82
83 if not valid_dir_signature:
83 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
84 raise OSError('%s missing git signature' % content_path)
85
85
86 self.content_path = content_path
86 self.content_path = content_path
87 self.repo_name = repo_name
87 self.repo_name = repo_name
88 self.extras = extras
88 self.extras = extras
89 self.git_path = git_path
89 self.git_path = git_path
90 self.update_server_info = update_server_info
90 self.update_server_info = update_server_info
91
91
92 def _get_fixedpath(self, path):
92 def _get_fixedpath(self, path):
93 """
93 """
94 Small fix for repo_path
94 Small fix for repo_path
95
95
96 :param path:
96 :param path:
97 """
97 """
98 path = path.split(self.repo_name, 1)[-1]
98 path = path.split(self.repo_name, 1)[-1]
99 if path.startswith('.git'):
99 if path.startswith('.git'):
100 # for bare repos we still get the .git prefix inside, we skip it
100 # for bare repos we still get the .git prefix inside, we skip it
101 # here, and remove from the service command
101 # here, and remove from the service command
102 path = path[4:]
102 path = path[4:]
103
103
104 return path.strip('/')
104 return path.strip('/')
105
105
106 def inforefs(self, request, unused_environ):
106 def inforefs(self, request, unused_environ):
107 """
107 """
108 WSGI Response producer for HTTP GET Git Smart
108 WSGI Response producer for HTTP GET Git Smart
109 HTTP /info/refs request.
109 HTTP /info/refs request.
110 """
110 """
111
111
112 git_command = request.GET.get('service')
112 git_command = request.GET.get('service')
113 if git_command not in self.commands:
113 if git_command not in self.commands:
114 log.debug('command %s not allowed', git_command)
114 log.debug('command %s not allowed', git_command)
115 return exc.HTTPForbidden()
115 return exc.HTTPForbidden()
116
116
117 # please, resist the urge to add '\n' to git capture and increment
117 # please, resist the urge to add '\n' to git capture and increment
118 # line count by 1.
118 # line count by 1.
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # a part of protocol.
120 # a part of protocol.
121 # The code in Git client not only does NOT need '\n', but actually
121 # The code in Git client not only does NOT need '\n', but actually
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # It reads binary, per number of bytes specified.
123 # It reads binary, per number of bytes specified.
124 # if you do add '\n' as part of data, count it.
124 # if you do add '\n' as part of data, count it.
125 server_advert = '# service=%s\n' % git_command
125 server_advert = '# service=%s\n' % git_command
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 try:
127 try:
128 gitenv = dict(os.environ)
128 gitenv = dict(os.environ)
129 # forget all configs
129 # forget all configs
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 '--advertise-refs', self.content_path]
132 '--advertise-refs', self.content_path]
133 out = subprocessio.SubprocessIOChunker(
133 out = subprocessio.SubprocessIOChunker(
134 command,
134 command,
135 env=gitenv,
135 env=gitenv,
136 starting_values=[packet_len + server_advert + '0000'],
136 starting_values=[packet_len + server_advert + '0000'],
137 shell=False
137 shell=False
138 )
138 )
139 except EnvironmentError:
139 except EnvironmentError:
140 log.exception('Error processing command')
140 log.exception('Error processing command')
141 raise exc.HTTPExpectationFailed()
141 raise exc.HTTPExpectationFailed()
142
142
143 resp = Response()
143 resp = Response()
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 resp.charset = None
145 resp.charset = None
146 resp.app_iter = out
146 resp.app_iter = out
147
147
148 return resp
148 return resp
149
149
150 def _get_want_capabilities(self, request):
150 def _get_want_capabilities(self, request):
151 """Read the capabilities found in the first want line of the request."""
151 """Read the capabilities found in the first want line of the request."""
152 pos = request.body_file_seekable.tell()
152 pos = request.body_file_seekable.tell()
153 first_line = request.body_file_seekable.readline()
153 first_line = request.body_file_seekable.readline()
154 request.body_file_seekable.seek(pos)
154 request.body_file_seekable.seek(pos)
155
155
156 return frozenset(
156 return frozenset(
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158
158
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 """
160 """
161 Construct a response with an empty PACK file.
161 Construct a response with an empty PACK file.
162
162
163 We use an empty PACK file, as that would trigger the failure of the pull
163 We use an empty PACK file, as that would trigger the failure of the pull
164 or clone command.
164 or clone command.
165
165
166 We also print in the error output a message explaining why the command
166 We also print in the error output a message explaining why the command
167 was aborted.
167 was aborted.
168
168
169 If aditionally, the user is accepting messages we send them the output
169 If aditionally, the user is accepting messages we send them the output
170 of the pre-pull hook.
170 of the pre-pull hook.
171
171
172 Note that for clients not supporting side-band we just send them the
172 Note that for clients not supporting side-band we just send them the
173 emtpy PACK file.
173 emtpy PACK file.
174 """
174 """
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 response = []
176 response = []
177 proto = dulwich.protocol.Protocol(None, response.append)
177 proto = dulwich.protocol.Protocol(None, response.append)
178 proto.write_pkt_line('NAK\n')
178 proto.write_pkt_line('NAK\n')
179 self._write_sideband_to_proto(pre_pull_messages, proto,
179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 capabilities)
180 capabilities)
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 # produces a fatal error in the client:
182 # produces a fatal error in the client:
183 # fatal: error in sideband demultiplexer
183 # fatal: error in sideband demultiplexer
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 proto.write_sideband(1, self.EMPTY_PACK)
185 proto.write_sideband(1, self.EMPTY_PACK)
186
186
187 # writes 0000
187 # writes 0000
188 proto.write_pkt_line(None)
188 proto.write_pkt_line(None)
189
189
190 return response
190 return response
191 else:
191 else:
192 return [self.EMPTY_PACK]
192 return [self.EMPTY_PACK]
193
193
194 def _write_sideband_to_proto(self, data, proto, capabilities):
194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 """
195 """
196 Write the data to the proto's sideband number 2.
196 Write the data to the proto's sideband number 2.
197
197
198 We do not use dulwich's write_sideband directly as it only supports
198 We do not use dulwich's write_sideband directly as it only supports
199 side-band-64k.
199 side-band-64k.
200 """
200 """
201 if not data:
201 if not data:
202 return
202 return
203
203
204 # N.B.(skreft): The values below are explained in the pack protocol
204 # N.B.(skreft): The values below are explained in the pack protocol
205 # documentation, section Packfile Data.
205 # documentation, section Packfile Data.
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 if 'side-band-64k' in capabilities:
207 if 'side-band-64k' in capabilities:
208 chunk_size = 65515
208 chunk_size = 65515
209 elif 'side-band' in capabilities:
209 elif 'side-band' in capabilities:
210 chunk_size = 995
210 chunk_size = 995
211 else:
211 else:
212 return
212 return
213
213
214 chunker = (
214 chunker = (
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216
216
217 for chunk in chunker:
217 for chunk in chunker:
218 proto.write_sideband(2, chunk)
218 proto.write_sideband(2, chunk)
219
219
220 def _get_messages(self, data, capabilities):
220 def _get_messages(self, data, capabilities):
221 """Return a list with packets for sending data in sideband number 2."""
221 """Return a list with packets for sending data in sideband number 2."""
222 response = []
222 response = []
223 proto = dulwich.protocol.Protocol(None, response.append)
223 proto = dulwich.protocol.Protocol(None, response.append)
224
224
225 self._write_sideband_to_proto(data, proto, capabilities)
225 self._write_sideband_to_proto(data, proto, capabilities)
226
226
227 return response
227 return response
228
228
229 def _inject_messages_to_response(self, response, capabilities,
229 def _inject_messages_to_response(self, response, capabilities,
230 start_messages, end_messages):
230 start_messages, end_messages):
231 """
231 """
232 Given a list response we inject the pre/post-pull messages.
232 Given a list response we inject the pre/post-pull messages.
233
233
234 We only inject the messages if the client supports sideband, and the
234 We only inject the messages if the client supports sideband, and the
235 response has the format:
235 response has the format:
236 0008NAK\n...0000
236 0008NAK\n...0000
237
237
238 Note that we do not check the no-progress capability as by default, git
238 Note that we do not check the no-progress capability as by default, git
239 sends it, which effectively would block all messages.
239 sends it, which effectively would block all messages.
240 """
240 """
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 return response
242 return response
243
243
244 if not start_messages and not end_messages:
244 if not start_messages and not end_messages:
245 return response
245 return response
246
246
247 # make a list out of response if it's an iterator
247 # make a list out of response if it's an iterator
248 # so we can investigate it for message injection.
248 # so we can investigate it for message injection.
249 if hasattr(response, '__iter__'):
249 if hasattr(response, '__iter__'):
250 response = list(response)
250 response = list(response)
251
251
252 if (not response[0].startswith('0008NAK\n') or
252 if (not response[0].startswith('0008NAK\n') or
253 not response[-1].endswith('0000')):
253 not response[-1].endswith('0000')):
254 return response
254 return response
255
255
256 new_response = ['0008NAK\n']
256 new_response = ['0008NAK\n']
257 new_response.extend(self._get_messages(start_messages, capabilities))
257 new_response.extend(self._get_messages(start_messages, capabilities))
258 if len(response) == 1:
258 if len(response) == 1:
259 new_response.append(response[0][8:-4])
259 new_response.append(response[0][8:-4])
260 else:
260 else:
261 new_response.append(response[0][8:])
261 new_response.append(response[0][8:])
262 new_response.extend(response[1:-1])
262 new_response.extend(response[1:-1])
263 new_response.append(response[-1][:-4])
263 new_response.append(response[-1][:-4])
264 new_response.extend(self._get_messages(end_messages, capabilities))
264 new_response.extend(self._get_messages(end_messages, capabilities))
265 new_response.append('0000')
265 new_response.append('0000')
266
266
267 return new_response
267 return new_response
268
268
269 def backend(self, request, environ):
269 def backend(self, request, environ):
270 """
270 """
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 Reads commands and data from HTTP POST's body.
272 Reads commands and data from HTTP POST's body.
273 returns an iterator obj with contents of git command's
273 returns an iterator obj with contents of git command's
274 response to stdout
274 response to stdout
275 """
275 """
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 # we probably want to have the same mechanism used by mercurial and
277 # we probably want to have the same mechanism used by mercurial and
278 # simplevcs.
278 # simplevcs.
279 # For that we would need to parse the output of the command looking for
279 # For that we would need to parse the output of the command looking for
280 # some signs of the HTTPLockedError, parse the data and reraise it in
280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 # pygrack. However, that would interfere with the streaming.
281 # pygrack. However, that would interfere with the streaming.
282 #
282 #
283 # Now the output of a blocked push is:
283 # Now the output of a blocked push is:
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 # POST git-receive-pack (1047 bytes)
285 # POST git-receive-pack (1047 bytes)
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 # ! [remote rejected] master -> master (pre-receive hook declined)
288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290
290
291 git_command = self._get_fixedpath(request.path_info)
291 git_command = self._get_fixedpath(request.path_info)
292 if git_command not in self.commands:
292 if git_command not in self.commands:
293 log.debug('command %s not allowed', git_command)
293 log.debug('command %s not allowed', git_command)
294 return exc.HTTPForbidden()
294 return exc.HTTPForbidden()
295
295
296 capabilities = None
296 capabilities = None
297 if git_command == 'git-upload-pack':
297 if git_command == 'git-upload-pack':
298 capabilities = self._get_want_capabilities(request)
298 capabilities = self._get_want_capabilities(request)
299
299
300 if 'CONTENT_LENGTH' in environ:
300 if 'CONTENT_LENGTH' in environ:
301 inputstream = FileWrapper(request.body_file_seekable,
301 inputstream = FileWrapper(request.body_file_seekable,
302 request.content_length)
302 request.content_length)
303 else:
303 else:
304 inputstream = request.body_file_seekable
304 inputstream = request.body_file_seekable
305
305
306 resp = Response()
306 resp = Response()
307 resp.content_type = ('application/x-%s-result' %
307 resp.content_type = ('application/x-%s-result' %
308 git_command.encode('utf8'))
308 git_command.encode('utf8'))
309 resp.charset = None
309 resp.charset = None
310
310
311 pre_pull_messages = ''
311 pre_pull_messages = ''
312 if git_command == 'git-upload-pack':
312 if git_command == 'git-upload-pack':
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 if status != 0:
314 if status != 0:
315 resp.app_iter = self._build_failed_pre_pull_response(
315 resp.app_iter = self._build_failed_pre_pull_response(
316 capabilities, pre_pull_messages)
316 capabilities, pre_pull_messages)
317 return resp
317 return resp
318
318
319 gitenv = dict(os.environ)
319 gitenv = dict(os.environ)
320 # forget all configs
320 # forget all configs
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 self.content_path]
324 self.content_path]
325 log.debug('handling cmd %s', cmd)
325 log.debug('handling cmd %s', cmd)
326
326
327 out = subprocessio.SubprocessIOChunker(
327 out = subprocessio.SubprocessIOChunker(
328 cmd,
328 cmd,
329 inputstream=inputstream,
329 inputstream=inputstream,
330 env=gitenv,
330 env=gitenv,
331 cwd=self.content_path,
331 cwd=self.content_path,
332 shell=False,
332 shell=False,
333 fail_on_stderr=False,
333 fail_on_stderr=False,
334 fail_on_return_code=False
334 fail_on_return_code=False
335 )
335 )
336
336
337 if self.update_server_info and git_command == 'git-receive-pack':
337 if self.update_server_info and git_command == 'git-receive-pack':
338 # We need to fully consume the iterator here, as the
338 # We need to fully consume the iterator here, as the
339 # update-server-info command needs to be run after the push.
339 # update-server-info command needs to be run after the push.
340 out = list(out)
340 out = list(out)
341
341
342 # Updating refs manually after each push.
342 # Updating refs manually after each push.
343 # This is required as some clients are exposing Git repos internally
343 # This is required as some clients are exposing Git repos internally
344 # with the dumb protocol.
344 # with the dumb protocol.
345 cmd = [self.git_path, 'update-server-info']
345 cmd = [self.git_path, 'update-server-info']
346 log.debug('handling cmd %s', cmd)
346 log.debug('handling cmd %s', cmd)
347 output = subprocessio.SubprocessIOChunker(
347 output = subprocessio.SubprocessIOChunker(
348 cmd,
348 cmd,
349 inputstream=inputstream,
349 inputstream=inputstream,
350 env=gitenv,
350 env=gitenv,
351 cwd=self.content_path,
351 cwd=self.content_path,
352 shell=False,
352 shell=False,
353 fail_on_stderr=False,
353 fail_on_stderr=False,
354 fail_on_return_code=False
354 fail_on_return_code=False
355 )
355 )
356 # Consume all the output so the subprocess finishes
356 # Consume all the output so the subprocess finishes
357 for _ in output:
357 for _ in output:
358 pass
358 pass
359
359
360 if git_command == 'git-upload-pack':
360 if git_command == 'git-upload-pack':
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 resp.app_iter = self._inject_messages_to_response(
362 resp.app_iter = self._inject_messages_to_response(
363 out, capabilities, pre_pull_messages, post_pull_messages)
363 out, capabilities, pre_pull_messages, post_pull_messages)
364 else:
364 else:
365 resp.app_iter = out
365 resp.app_iter = out
366
366
367 return resp
367 return resp
368
368
369 def __call__(self, environ, start_response):
369 def __call__(self, environ, start_response):
370 request = Request(environ)
370 request = Request(environ)
371 _path = self._get_fixedpath(request.path_info)
371 _path = self._get_fixedpath(request.path_info)
372 if _path.startswith('info/refs'):
372 if _path.startswith('info/refs'):
373 app = self.inforefs
373 app = self.inforefs
374 else:
374 else:
375 app = self.backend
375 app = self.backend
376
376
377 try:
377 try:
378 resp = app(request, environ)
378 resp = app(request, environ)
379 except exc.HTTPException as error:
379 except exc.HTTPException as error:
380 log.exception('HTTP Error')
380 log.exception('HTTP Error')
381 resp = error
381 resp = error
382 except Exception:
382 except Exception:
383 log.exception('Unknown error')
383 log.exception('Unknown error')
384 resp = exc.HTTPInternalServerError()
384 resp = exc.HTTPInternalServerError()
385
385
386 return resp(environ, start_response)
386 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver import scm_app, wsgi_app_caller
18 from vcsserver import scm_app, wsgi_app_caller
19
19
20
20
21 class GitRemoteWsgi(object):
21 class GitRemoteWsgi(object):
22 def handle(self, environ, input_data, *args, **kwargs):
22 def handle(self, environ, input_data, *args, **kwargs):
23 app = wsgi_app_caller.WSGIAppCaller(
23 app = wsgi_app_caller.WSGIAppCaller(
24 scm_app.create_git_wsgi_app(*args, **kwargs))
24 scm_app.create_git_wsgi_app(*args, **kwargs))
25
25
26 return app.handle(environ, input_data)
26 return app.handle(environ, input_data)
27
27
28
28
29 class HgRemoteWsgi(object):
29 class HgRemoteWsgi(object):
30 def handle(self, environ, input_data, *args, **kwargs):
30 def handle(self, environ, input_data, *args, **kwargs):
31 app = wsgi_app_caller.WSGIAppCaller(
31 app = wsgi_app_caller.WSGIAppCaller(
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33
33
34 return app.handle(environ, input_data)
34 return app.handle(environ, input_data)
@@ -1,235 +1,235 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.wireprotoserver
25 import mercurial.hgweb.common
25 import mercurial.hgweb.common
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 from mercurial.hgweb import request as requestmod
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
79 gen = self.run_wsgi(req, res)
80
80
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = gen.next()
84 data = gen.next()
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
88 except StopIteration:
88 except StopIteration:
89 pass
89 pass
90
90
91 if first_chunk:
91 if first_chunk:
92 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
93 return gen
93 return gen
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get('cmd', '')
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
103 res.setbodybytes('')
104 return res.sendresponse()
104 return res.sendresponse()
105
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
106 return super(HgWeb, self)._runwsgi(req, res, repo)
107
107
108
108
109 def make_hg_ui_from_config(repo_config):
109 def make_hg_ui_from_config(repo_config):
110 baseui = mercurial.ui.ui()
110 baseui = mercurial.ui.ui()
111
111
112 # clean the baseui object
112 # clean the baseui object
113 baseui._ocfg = mercurial.config.config()
113 baseui._ocfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
116
116
117 for section, option, value in repo_config:
117 for section, option, value in repo_config:
118 baseui.setconfig(section, option, value)
118 baseui.setconfig(section, option, value)
119
119
120 # make our hgweb quiet so it doesn't print output
120 # make our hgweb quiet so it doesn't print output
121 baseui.setconfig('ui', 'quiet', 'true')
121 baseui.setconfig('ui', 'quiet', 'true')
122
122
123 return baseui
123 return baseui
124
124
125
125
126 def update_hg_ui_from_hgrc(baseui, repo_path):
126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 path = os.path.join(repo_path, '.hg', 'hgrc')
127 path = os.path.join(repo_path, '.hg', 'hgrc')
128
128
129 if not os.path.isfile(path):
129 if not os.path.isfile(path):
130 log.debug('hgrc file is not present at %s, skipping...', path)
130 log.debug('hgrc file is not present at %s, skipping...', path)
131 return
131 return
132 log.debug('reading hgrc from %s', path)
132 log.debug('reading hgrc from %s', path)
133 cfg = mercurial.config.config()
133 cfg = mercurial.config.config()
134 cfg.read(path)
134 cfg.read(path)
135 for section in HG_UI_SECTIONS:
135 for section in HG_UI_SECTIONS:
136 for k, v in cfg.items(section):
136 for k, v in cfg.items(section):
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 baseui.setconfig(section, k, v)
138 baseui.setconfig(section, k, v)
139
139
140
140
141 def create_hg_wsgi_app(repo_path, repo_name, config):
141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 """
142 """
143 Prepares a WSGI application to handle Mercurial requests.
143 Prepares a WSGI application to handle Mercurial requests.
144
144
145 :param config: is a list of 3-item tuples representing a ConfigObject
145 :param config: is a list of 3-item tuples representing a ConfigObject
146 (it is the serialized version of the config object).
146 (it is the serialized version of the config object).
147 """
147 """
148 log.debug("Creating Mercurial WSGI application")
148 log.debug("Creating Mercurial WSGI application")
149
149
150 baseui = make_hg_ui_from_config(config)
150 baseui = make_hg_ui_from_config(config)
151 update_hg_ui_from_hgrc(baseui, repo_path)
151 update_hg_ui_from_hgrc(baseui, repo_path)
152
152
153 try:
153 try:
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 except mercurial.error.RequirementError as e:
155 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(e)(e)
156 raise exceptions.RequirementException(e)(e)
157
157
158
158
159 class GitHandler(object):
159 class GitHandler(object):
160 """
160 """
161 Handler for Git operations like push/pull etc
161 Handler for Git operations like push/pull etc
162 """
162 """
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 extras):
164 extras):
165 if not os.path.isdir(repo_location):
165 if not os.path.isdir(repo_location):
166 raise OSError(repo_location)
166 raise OSError(repo_location)
167 self.content_path = repo_location
167 self.content_path = repo_location
168 self.repo_name = repo_name
168 self.repo_name = repo_name
169 self.repo_location = repo_location
169 self.repo_location = repo_location
170 self.extras = extras
170 self.extras = extras
171 self.git_path = git_path
171 self.git_path = git_path
172 self.update_server_info = update_server_info
172 self.update_server_info = update_server_info
173
173
174 def __call__(self, environ, start_response):
174 def __call__(self, environ, start_response):
175 app = webob.exc.HTTPNotFound()
175 app = webob.exc.HTTPNotFound()
176 candidate_paths = (
176 candidate_paths = (
177 self.content_path, os.path.join(self.content_path, '.git'))
177 self.content_path, os.path.join(self.content_path, '.git'))
178
178
179 for content_path in candidate_paths:
179 for content_path in candidate_paths:
180 try:
180 try:
181 app = pygrack.GitRepository(
181 app = pygrack.GitRepository(
182 self.repo_name, content_path, self.git_path,
182 self.repo_name, content_path, self.git_path,
183 self.update_server_info, self.extras)
183 self.update_server_info, self.extras)
184 break
184 break
185 except OSError:
185 except OSError:
186 continue
186 continue
187
187
188 return app(environ, start_response)
188 return app(environ, start_response)
189
189
190
190
191 def create_git_wsgi_app(repo_path, repo_name, config):
191 def create_git_wsgi_app(repo_path, repo_name, config):
192 """
192 """
193 Creates a WSGI application to handle Git requests.
193 Creates a WSGI application to handle Git requests.
194
194
195 :param config: is a dictionary holding the extras.
195 :param config: is a dictionary holding the extras.
196 """
196 """
197 git_path = settings.GIT_EXECUTABLE
197 git_path = settings.GIT_EXECUTABLE
198 update_server_info = config.pop('git_update_server_info')
198 update_server_info = config.pop('git_update_server_info')
199 app = GitHandler(
199 app = GitHandler(
200 repo_path, repo_name, git_path, update_server_info, config)
200 repo_path, repo_name, git_path, update_server_info, config)
201
201
202 return app
202 return app
203
203
204
204
205 class GitLFSHandler(object):
205 class GitLFSHandler(object):
206 """
206 """
207 Handler for Git LFS operations
207 Handler for Git LFS operations
208 """
208 """
209
209
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 extras):
211 extras):
212 if not os.path.isdir(repo_location):
212 if not os.path.isdir(repo_location):
213 raise OSError(repo_location)
213 raise OSError(repo_location)
214 self.content_path = repo_location
214 self.content_path = repo_location
215 self.repo_name = repo_name
215 self.repo_name = repo_name
216 self.repo_location = repo_location
216 self.repo_location = repo_location
217 self.extras = extras
217 self.extras = extras
218 self.git_path = git_path
218 self.git_path = git_path
219 self.update_server_info = update_server_info
219 self.update_server_info = update_server_info
220
220
221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
223 return app
223 return app
224
224
225
225
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 git_path = settings.GIT_EXECUTABLE
227 git_path = settings.GIT_EXECUTABLE
228 update_server_info = config.pop('git_update_server_info')
228 update_server_info = config.pop('git_update_server_info')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
232 app = GitLFSHandler(
232 app = GitLFSHandler(
233 repo_path, repo_name, git_path, update_server_info, config)
233 repo_path, repo_name, git_path, update_server_info, config)
234
234
235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,78 +1,78 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import gc
18 import gc
19 import logging
19 import logging
20 import os
20 import os
21 import time
21 import time
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class VcsServer(object):
27 class VcsServer(object):
28 """
28 """
29 Exposed remote interface of the vcsserver itself.
29 Exposed remote interface of the vcsserver itself.
30
30
31 This object can be used to manage the server remotely. Right now the main
31 This object can be used to manage the server remotely. Right now the main
32 use case is to allow to shut down the server.
32 use case is to allow to shut down the server.
33 """
33 """
34
34
35 _shutdown = False
35 _shutdown = False
36
36
37 def shutdown(self):
37 def shutdown(self):
38 self._shutdown = True
38 self._shutdown = True
39
39
40 def ping(self):
40 def ping(self):
41 """
41 """
42 Utility to probe a server connection.
42 Utility to probe a server connection.
43 """
43 """
44 log.debug("Received server ping.")
44 log.debug("Received server ping.")
45
45
46 def echo(self, data):
46 def echo(self, data):
47 """
47 """
48 Utility for performance testing.
48 Utility for performance testing.
49
49
50 Allows to pass in arbitrary data and will return this data.
50 Allows to pass in arbitrary data and will return this data.
51 """
51 """
52 log.debug("Received server echo.")
52 log.debug("Received server echo.")
53 return data
53 return data
54
54
55 def sleep(self, seconds):
55 def sleep(self, seconds):
56 """
56 """
57 Utility to simulate long running server interaction.
57 Utility to simulate long running server interaction.
58 """
58 """
59 log.debug("Sleeping %s seconds", seconds)
59 log.debug("Sleeping %s seconds", seconds)
60 time.sleep(seconds)
60 time.sleep(seconds)
61
61
62 def get_pid(self):
62 def get_pid(self):
63 """
63 """
64 Allows to discover the PID based on a proxy object.
64 Allows to discover the PID based on a proxy object.
65 """
65 """
66 return os.getpid()
66 return os.getpid()
67
67
68 def run_gc(self):
68 def run_gc(self):
69 """
69 """
70 Allows to trigger the garbage collector.
70 Allows to trigger the garbage collector.
71
71
72 Main intention is to support statistics gathering during test runs.
72 Main intention is to support statistics gathering during test runs.
73 """
73 """
74 freed_objects = gc.collect()
74 freed_objects = gc.collect()
75 return {
75 return {
76 'freed_objects': freed_objects,
76 'freed_objects': freed_objects,
77 'garbage': len(gc.garbage),
77 'garbage': len(gc.garbage),
78 }
78 }
@@ -1,22 +1,22 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
19 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
20 SVN_EXECUTABLE = 'svn'
20 SVN_EXECUTABLE = 'svn'
21 SVNLOOK_EXECUTABLE = 'svnlook'
21 SVNLOOK_EXECUTABLE = 'svnlook'
22 BINARY_DIR = ''
22 BINARY_DIR = ''
@@ -1,791 +1,791 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39 from vcsserver.vcs_base import RemoteBase
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 svn_compatible_versions_map = {
44 svn_compatible_versions_map = {
45 'pre-1.4-compatible': '1.3',
45 'pre-1.4-compatible': '1.3',
46 'pre-1.5-compatible': '1.4',
46 'pre-1.5-compatible': '1.4',
47 'pre-1.6-compatible': '1.5',
47 'pre-1.6-compatible': '1.5',
48 'pre-1.8-compatible': '1.7',
48 'pre-1.8-compatible': '1.7',
49 'pre-1.9-compatible': '1.8',
49 'pre-1.9-compatible': '1.8',
50 }
50 }
51
51
52 current_compatible_version = '1.12'
52 current_compatible_version = '1.12'
53
53
54
54
55 def reraise_safe_exceptions(func):
55 def reraise_safe_exceptions(func):
56 """Decorator for converting svn exceptions to something neutral."""
56 """Decorator for converting svn exceptions to something neutral."""
57 def wrapper(*args, **kwargs):
57 def wrapper(*args, **kwargs):
58 try:
58 try:
59 return func(*args, **kwargs)
59 return func(*args, **kwargs)
60 except Exception as e:
60 except Exception as e:
61 if not hasattr(e, '_vcs_kind'):
61 if not hasattr(e, '_vcs_kind'):
62 log.exception("Unhandled exception in svn remote call")
62 log.exception("Unhandled exception in svn remote call")
63 raise_from_original(exceptions.UnhandledException(e))
63 raise_from_original(exceptions.UnhandledException(e))
64 raise
64 raise
65 return wrapper
65 return wrapper
66
66
67
67
68 class SubversionFactory(RepoFactory):
68 class SubversionFactory(RepoFactory):
69 repo_type = 'svn'
69 repo_type = 'svn'
70
70
71 def _create_repo(self, wire, create, compatible_version):
71 def _create_repo(self, wire, create, compatible_version):
72 path = svn.core.svn_path_canonicalize(wire['path'])
72 path = svn.core.svn_path_canonicalize(wire['path'])
73 if create:
73 if create:
74 fs_config = {'compatible-version': current_compatible_version}
74 fs_config = {'compatible-version': current_compatible_version}
75 if compatible_version:
75 if compatible_version:
76
76
77 compatible_version_string = \
77 compatible_version_string = \
78 svn_compatible_versions_map.get(compatible_version) \
78 svn_compatible_versions_map.get(compatible_version) \
79 or compatible_version
79 or compatible_version
80 fs_config['compatible-version'] = compatible_version_string
80 fs_config['compatible-version'] = compatible_version_string
81
81
82 log.debug('Create SVN repo with config "%s"', fs_config)
82 log.debug('Create SVN repo with config "%s"', fs_config)
83 repo = svn.repos.create(path, "", "", None, fs_config)
83 repo = svn.repos.create(path, "", "", None, fs_config)
84 else:
84 else:
85 repo = svn.repos.open(path)
85 repo = svn.repos.open(path)
86
86
87 log.debug('Got SVN object: %s', repo)
87 log.debug('Got SVN object: %s', repo)
88 return repo
88 return repo
89
89
90 def repo(self, wire, create=False, compatible_version=None):
90 def repo(self, wire, create=False, compatible_version=None):
91 """
91 """
92 Get a repository instance for the given path.
92 Get a repository instance for the given path.
93 """
93 """
94 return self._create_repo(wire, create, compatible_version)
94 return self._create_repo(wire, create, compatible_version)
95
95
96
96
97 NODE_TYPE_MAPPING = {
97 NODE_TYPE_MAPPING = {
98 svn.core.svn_node_file: 'file',
98 svn.core.svn_node_file: 'file',
99 svn.core.svn_node_dir: 'dir',
99 svn.core.svn_node_dir: 'dir',
100 }
100 }
101
101
102
102
103 class SvnRemote(RemoteBase):
103 class SvnRemote(RemoteBase):
104
104
105 def __init__(self, factory, hg_factory=None):
105 def __init__(self, factory, hg_factory=None):
106 self._factory = factory
106 self._factory = factory
107 # TODO: Remove once we do not use internal Mercurial objects anymore
107 # TODO: Remove once we do not use internal Mercurial objects anymore
108 # for subversion
108 # for subversion
109 self._hg_factory = hg_factory
109 self._hg_factory = hg_factory
110
110
111 @reraise_safe_exceptions
111 @reraise_safe_exceptions
112 def discover_svn_version(self):
112 def discover_svn_version(self):
113 try:
113 try:
114 import svn.core
114 import svn.core
115 svn_ver = svn.core.SVN_VERSION
115 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
116 except ImportError:
117 svn_ver = None
117 svn_ver = None
118 return svn_ver
118 return svn_ver
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def is_empty(self, wire):
121 def is_empty(self, wire):
122
122
123 try:
123 try:
124 return self.lookup(wire, -1) == 0
124 return self.lookup(wire, -1) == 0
125 except Exception:
125 except Exception:
126 log.exception("failed to read object_store")
126 log.exception("failed to read object_store")
127 return False
127 return False
128
128
129 def check_url(self, url, config_items):
129 def check_url(self, url, config_items):
130 # this can throw exception if not installed, but we detect this
130 # this can throw exception if not installed, but we detect this
131 from hgsubversion import svnrepo
131 from hgsubversion import svnrepo
132
132
133 baseui = self._hg_factory._create_config(config_items)
133 baseui = self._hg_factory._create_config(config_items)
134 # uuid function get's only valid UUID from proper repo, else
134 # uuid function get's only valid UUID from proper repo, else
135 # throws exception
135 # throws exception
136 try:
136 try:
137 svnrepo.svnremoterepo(baseui, url).svn.uuid
137 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 except Exception:
138 except Exception:
139 tb = traceback.format_exc()
139 tb = traceback.format_exc()
140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 raise URLError(
141 raise URLError(
142 '"%s" is not a valid Subversion source url.' % (url, ))
142 '"%s" is not a valid Subversion source url.' % (url, ))
143 return True
143 return True
144
144
145 def is_path_valid_repository(self, wire, path):
145 def is_path_valid_repository(self, wire, path):
146
146
147 # NOTE(marcink): short circuit the check for SVN repo
147 # NOTE(marcink): short circuit the check for SVN repo
148 # the repos.open might be expensive to check, but we have one cheap
148 # the repos.open might be expensive to check, but we have one cheap
149 # pre condition that we can use, to check for 'format' file
149 # pre condition that we can use, to check for 'format' file
150
150
151 if not os.path.isfile(os.path.join(path, 'format')):
151 if not os.path.isfile(os.path.join(path, 'format')):
152 return False
152 return False
153
153
154 try:
154 try:
155 svn.repos.open(path)
155 svn.repos.open(path)
156 except svn.core.SubversionException:
156 except svn.core.SubversionException:
157 tb = traceback.format_exc()
157 tb = traceback.format_exc()
158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 return False
159 return False
160 return True
160 return True
161
161
162 @reraise_safe_exceptions
162 @reraise_safe_exceptions
163 def verify(self, wire,):
163 def verify(self, wire,):
164 repo_path = wire['path']
164 repo_path = wire['path']
165 if not self.is_path_valid_repository(wire, repo_path):
165 if not self.is_path_valid_repository(wire, repo_path):
166 raise Exception(
166 raise Exception(
167 "Path %s is not a valid Subversion repository." % repo_path)
167 "Path %s is not a valid Subversion repository." % repo_path)
168
168
169 cmd = ['svnadmin', 'info', repo_path]
169 cmd = ['svnadmin', 'info', repo_path]
170 stdout, stderr = subprocessio.run_command(cmd)
170 stdout, stderr = subprocessio.run_command(cmd)
171 return stdout
171 return stdout
172
172
173 def lookup(self, wire, revision):
173 def lookup(self, wire, revision):
174 if revision not in [-1, None, 'HEAD']:
174 if revision not in [-1, None, 'HEAD']:
175 raise NotImplementedError
175 raise NotImplementedError
176 repo = self._factory.repo(wire)
176 repo = self._factory.repo(wire)
177 fs_ptr = svn.repos.fs(repo)
177 fs_ptr = svn.repos.fs(repo)
178 head = svn.fs.youngest_rev(fs_ptr)
178 head = svn.fs.youngest_rev(fs_ptr)
179 return head
179 return head
180
180
181 def lookup_interval(self, wire, start_ts, end_ts):
181 def lookup_interval(self, wire, start_ts, end_ts):
182 repo = self._factory.repo(wire)
182 repo = self._factory.repo(wire)
183 fsobj = svn.repos.fs(repo)
183 fsobj = svn.repos.fs(repo)
184 start_rev = None
184 start_rev = None
185 end_rev = None
185 end_rev = None
186 if start_ts:
186 if start_ts:
187 start_ts_svn = apr_time_t(start_ts)
187 start_ts_svn = apr_time_t(start_ts)
188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 else:
189 else:
190 start_rev = 1
190 start_rev = 1
191 if end_ts:
191 if end_ts:
192 end_ts_svn = apr_time_t(end_ts)
192 end_ts_svn = apr_time_t(end_ts)
193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 else:
194 else:
195 end_rev = svn.fs.youngest_rev(fsobj)
195 end_rev = svn.fs.youngest_rev(fsobj)
196 return start_rev, end_rev
196 return start_rev, end_rev
197
197
198 def revision_properties(self, wire, revision):
198 def revision_properties(self, wire, revision):
199
199
200 cache_on, context_uid, repo_id = self._cache_on(wire)
200 cache_on, context_uid, repo_id = self._cache_on(wire)
201 @self.region.conditional_cache_on_arguments(condition=cache_on)
201 @self.region.conditional_cache_on_arguments(condition=cache_on)
202 def _revision_properties(_repo_id, _revision):
202 def _revision_properties(_repo_id, _revision):
203 repo = self._factory.repo(wire)
203 repo = self._factory.repo(wire)
204 fs_ptr = svn.repos.fs(repo)
204 fs_ptr = svn.repos.fs(repo)
205 return svn.fs.revision_proplist(fs_ptr, revision)
205 return svn.fs.revision_proplist(fs_ptr, revision)
206 return _revision_properties(repo_id, revision)
206 return _revision_properties(repo_id, revision)
207
207
208 def revision_changes(self, wire, revision):
208 def revision_changes(self, wire, revision):
209
209
210 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
211 fsobj = svn.repos.fs(repo)
211 fsobj = svn.repos.fs(repo)
212 rev_root = svn.fs.revision_root(fsobj, revision)
212 rev_root = svn.fs.revision_root(fsobj, revision)
213
213
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 base_dir = ""
216 base_dir = ""
217 send_deltas = False
217 send_deltas = False
218 svn.repos.replay2(
218 svn.repos.replay2(
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 editor_ptr, editor_baton, None)
220 editor_ptr, editor_baton, None)
221
221
222 added = []
222 added = []
223 changed = []
223 changed = []
224 removed = []
224 removed = []
225
225
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 for path, change in editor.changes.iteritems():
227 for path, change in editor.changes.iteritems():
228 # TODO: Decide what to do with directory nodes. Subversion can add
228 # TODO: Decide what to do with directory nodes. Subversion can add
229 # empty directories.
229 # empty directories.
230
230
231 if change.item_kind == svn.core.svn_node_dir:
231 if change.item_kind == svn.core.svn_node_dir:
232 continue
232 continue
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 added.append(path)
234 added.append(path)
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 svn.repos.CHANGE_ACTION_REPLACE]:
236 svn.repos.CHANGE_ACTION_REPLACE]:
237 changed.append(path)
237 changed.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 removed.append(path)
239 removed.append(path)
240 else:
240 else:
241 raise NotImplementedError(
241 raise NotImplementedError(
242 "Action %s not supported on path %s" % (
242 "Action %s not supported on path %s" % (
243 change.action, path))
243 change.action, path))
244
244
245 changes = {
245 changes = {
246 'added': added,
246 'added': added,
247 'changed': changed,
247 'changed': changed,
248 'removed': removed,
248 'removed': removed,
249 }
249 }
250 return changes
250 return changes
251
251
252 @reraise_safe_exceptions
252 @reraise_safe_exceptions
253 def node_history(self, wire, path, revision, limit):
253 def node_history(self, wire, path, revision, limit):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
257 cross_copies = False
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
259 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
260 rev_root = svn.fs.revision_root(fsobj, revision)
261
261
262 history_revisions = []
262 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
263 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
264 history = svn.fs.history_prev(history, cross_copies)
265 while history:
265 while history:
266 __, node_revision = svn.fs.history_location(history)
266 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
267 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
268 if limit and len(history_revisions) >= limit:
269 break
269 break
270 history = svn.fs.history_prev(history, cross_copies)
270 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
271 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
273
274 def node_properties(self, wire, path, revision):
274 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 @self.region.conditional_cache_on_arguments(condition=cache_on)
276 @self.region.conditional_cache_on_arguments(condition=cache_on)
277 def _node_properties(_repo_id, _path, _revision):
277 def _node_properties(_repo_id, _path, _revision):
278 repo = self._factory.repo(wire)
278 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
279 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
280 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
281 return svn.fs.node_proplist(rev_root, path)
282 return _node_properties(repo_id, path, revision)
282 return _node_properties(repo_id, path, revision)
283
283
284 def file_annotate(self, wire, path, revision):
284 def file_annotate(self, wire, path, revision):
285 abs_path = 'file://' + urllib.pathname2url(
285 abs_path = 'file://' + urllib.pathname2url(
286 vcspath.join(wire['path'], path))
286 vcspath.join(wire['path'], path))
287 file_uri = svn.core.svn_path_canonicalize(abs_path)
287 file_uri = svn.core.svn_path_canonicalize(abs_path)
288
288
289 start_rev = svn_opt_revision_value_t(0)
289 start_rev = svn_opt_revision_value_t(0)
290 peg_rev = svn_opt_revision_value_t(revision)
290 peg_rev = svn_opt_revision_value_t(revision)
291 end_rev = peg_rev
291 end_rev = peg_rev
292
292
293 annotations = []
293 annotations = []
294
294
295 def receiver(line_no, revision, author, date, line, pool):
295 def receiver(line_no, revision, author, date, line, pool):
296 annotations.append((line_no, revision, line))
296 annotations.append((line_no, revision, line))
297
297
298 # TODO: Cannot use blame5, missing typemap function in the swig code
298 # TODO: Cannot use blame5, missing typemap function in the swig code
299 try:
299 try:
300 svn.client.blame2(
300 svn.client.blame2(
301 file_uri, peg_rev, start_rev, end_rev,
301 file_uri, peg_rev, start_rev, end_rev,
302 receiver, svn.client.create_context())
302 receiver, svn.client.create_context())
303 except svn.core.SubversionException as exc:
303 except svn.core.SubversionException as exc:
304 log.exception("Error during blame operation.")
304 log.exception("Error during blame operation.")
305 raise Exception(
305 raise Exception(
306 "Blame not supported or file does not exist at path %s. "
306 "Blame not supported or file does not exist at path %s. "
307 "Error %s." % (path, exc))
307 "Error %s." % (path, exc))
308
308
309 return annotations
309 return annotations
310
310
311 def get_node_type(self, wire, path, revision=None):
311 def get_node_type(self, wire, path, revision=None):
312
312
313 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 def _get_node_type(_repo_id, _path, _revision):
315 def _get_node_type(_repo_id, _path, _revision):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 fs_ptr = svn.repos.fs(repo)
317 fs_ptr = svn.repos.fs(repo)
318 if _revision is None:
318 if _revision is None:
319 _revision = svn.fs.youngest_rev(fs_ptr)
319 _revision = svn.fs.youngest_rev(fs_ptr)
320 root = svn.fs.revision_root(fs_ptr, _revision)
320 root = svn.fs.revision_root(fs_ptr, _revision)
321 node = svn.fs.check_path(root, path)
321 node = svn.fs.check_path(root, path)
322 return NODE_TYPE_MAPPING.get(node, None)
322 return NODE_TYPE_MAPPING.get(node, None)
323 return _get_node_type(repo_id, path, revision)
323 return _get_node_type(repo_id, path, revision)
324
324
325 def get_nodes(self, wire, path, revision=None):
325 def get_nodes(self, wire, path, revision=None):
326
326
327 cache_on, context_uid, repo_id = self._cache_on(wire)
327 cache_on, context_uid, repo_id = self._cache_on(wire)
328 @self.region.conditional_cache_on_arguments(condition=cache_on)
328 @self.region.conditional_cache_on_arguments(condition=cache_on)
329 def _get_nodes(_repo_id, _path, _revision):
329 def _get_nodes(_repo_id, _path, _revision):
330 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
331 fsobj = svn.repos.fs(repo)
331 fsobj = svn.repos.fs(repo)
332 if _revision is None:
332 if _revision is None:
333 _revision = svn.fs.youngest_rev(fsobj)
333 _revision = svn.fs.youngest_rev(fsobj)
334 root = svn.fs.revision_root(fsobj, _revision)
334 root = svn.fs.revision_root(fsobj, _revision)
335 entries = svn.fs.dir_entries(root, path)
335 entries = svn.fs.dir_entries(root, path)
336 result = []
336 result = []
337 for entry_path, entry_info in entries.iteritems():
337 for entry_path, entry_info in entries.iteritems():
338 result.append(
338 result.append(
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 return result
340 return result
341 return _get_nodes(repo_id, path, revision)
341 return _get_nodes(repo_id, path, revision)
342
342
343 def get_file_content(self, wire, path, rev=None):
343 def get_file_content(self, wire, path, rev=None):
344 repo = self._factory.repo(wire)
344 repo = self._factory.repo(wire)
345 fsobj = svn.repos.fs(repo)
345 fsobj = svn.repos.fs(repo)
346 if rev is None:
346 if rev is None:
347 rev = svn.fs.youngest_revision(fsobj)
347 rev = svn.fs.youngest_revision(fsobj)
348 root = svn.fs.revision_root(fsobj, rev)
348 root = svn.fs.revision_root(fsobj, rev)
349 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 return content.read()
350 return content.read()
351
351
352 def get_file_size(self, wire, path, revision=None):
352 def get_file_size(self, wire, path, revision=None):
353
353
354 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 @self.region.conditional_cache_on_arguments(condition=cache_on)
356 def _get_file_size(_repo_id, _path, _revision):
356 def _get_file_size(_repo_id, _path, _revision):
357 repo = self._factory.repo(wire)
357 repo = self._factory.repo(wire)
358 fsobj = svn.repos.fs(repo)
358 fsobj = svn.repos.fs(repo)
359 if _revision is None:
359 if _revision is None:
360 _revision = svn.fs.youngest_revision(fsobj)
360 _revision = svn.fs.youngest_revision(fsobj)
361 root = svn.fs.revision_root(fsobj, _revision)
361 root = svn.fs.revision_root(fsobj, _revision)
362 size = svn.fs.file_length(root, path)
362 size = svn.fs.file_length(root, path)
363 return size
363 return size
364 return _get_file_size(repo_id, path, revision)
364 return _get_file_size(repo_id, path, revision)
365
365
366 def create_repository(self, wire, compatible_version=None):
366 def create_repository(self, wire, compatible_version=None):
367 log.info('Creating Subversion repository in path "%s"', wire['path'])
367 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 self._factory.repo(wire, create=True,
368 self._factory.repo(wire, create=True,
369 compatible_version=compatible_version)
369 compatible_version=compatible_version)
370
370
371 def get_url_and_credentials(self, src_url):
371 def get_url_and_credentials(self, src_url):
372 obj = urlparse.urlparse(src_url)
372 obj = urlparse.urlparse(src_url)
373 username = obj.username or None
373 username = obj.username or None
374 password = obj.password or None
374 password = obj.password or None
375 return username, password, src_url
375 return username, password, src_url
376
376
377 def import_remote_repository(self, wire, src_url):
377 def import_remote_repository(self, wire, src_url):
378 repo_path = wire['path']
378 repo_path = wire['path']
379 if not self.is_path_valid_repository(wire, repo_path):
379 if not self.is_path_valid_repository(wire, repo_path):
380 raise Exception(
380 raise Exception(
381 "Path %s is not a valid Subversion repository." % repo_path)
381 "Path %s is not a valid Subversion repository." % repo_path)
382
382
383 username, password, src_url = self.get_url_and_credentials(src_url)
383 username, password, src_url = self.get_url_and_credentials(src_url)
384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 '--trust-server-cert-failures=unknown-ca']
385 '--trust-server-cert-failures=unknown-ca']
386 if username and password:
386 if username and password:
387 rdump_cmd += ['--username', username, '--password', password]
387 rdump_cmd += ['--username', username, '--password', password]
388 rdump_cmd += [src_url]
388 rdump_cmd += [src_url]
389
389
390 rdump = subprocess.Popen(
390 rdump = subprocess.Popen(
391 rdump_cmd,
391 rdump_cmd,
392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 load = subprocess.Popen(
393 load = subprocess.Popen(
394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395
395
396 # TODO: johbo: This can be a very long operation, might be better
396 # TODO: johbo: This can be a very long operation, might be better
397 # to track some kind of status and provide an api to check if the
397 # to track some kind of status and provide an api to check if the
398 # import is done.
398 # import is done.
399 rdump.wait()
399 rdump.wait()
400 load.wait()
400 load.wait()
401
401
402 log.debug('Return process ended with code: %s', rdump.returncode)
402 log.debug('Return process ended with code: %s', rdump.returncode)
403 if rdump.returncode != 0:
403 if rdump.returncode != 0:
404 errors = rdump.stderr.read()
404 errors = rdump.stderr.read()
405 log.error('svnrdump dump failed: statuscode %s: message: %s',
405 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 rdump.returncode, errors)
406 rdump.returncode, errors)
407 reason = 'UNKNOWN'
407 reason = 'UNKNOWN'
408 if 'svnrdump: E230001:' in errors:
408 if 'svnrdump: E230001:' in errors:
409 reason = 'INVALID_CERTIFICATE'
409 reason = 'INVALID_CERTIFICATE'
410
410
411 if reason == 'UNKNOWN':
411 if reason == 'UNKNOWN':
412 reason = 'UNKNOWN:{}'.format(errors)
412 reason = 'UNKNOWN:{}'.format(errors)
413 raise Exception(
413 raise Exception(
414 'Failed to dump the remote repository from %s. Reason:%s' % (
414 'Failed to dump the remote repository from %s. Reason:%s' % (
415 src_url, reason))
415 src_url, reason))
416 if load.returncode != 0:
416 if load.returncode != 0:
417 raise Exception(
417 raise Exception(
418 'Failed to load the dump of remote repository from %s.' %
418 'Failed to load the dump of remote repository from %s.' %
419 (src_url, ))
419 (src_url, ))
420
420
421 def commit(self, wire, message, author, timestamp, updated, removed):
421 def commit(self, wire, message, author, timestamp, updated, removed):
422 assert isinstance(message, str)
422 assert isinstance(message, str)
423 assert isinstance(author, str)
423 assert isinstance(author, str)
424
424
425 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
426 fsobj = svn.repos.fs(repo)
426 fsobj = svn.repos.fs(repo)
427
427
428 rev = svn.fs.youngest_rev(fsobj)
428 rev = svn.fs.youngest_rev(fsobj)
429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 txn_root = svn.fs.txn_root(txn)
430 txn_root = svn.fs.txn_root(txn)
431
431
432 for node in updated:
432 for node in updated:
433 TxnNodeProcessor(node, txn_root).update()
433 TxnNodeProcessor(node, txn_root).update()
434 for node in removed:
434 for node in removed:
435 TxnNodeProcessor(node, txn_root).remove()
435 TxnNodeProcessor(node, txn_root).remove()
436
436
437 commit_id = svn.repos.fs_commit_txn(repo, txn)
437 commit_id = svn.repos.fs_commit_txn(repo, txn)
438
438
439 if timestamp:
439 if timestamp:
440 apr_time = apr_time_t(timestamp)
440 apr_time = apr_time_t(timestamp)
441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443
443
444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 return commit_id
445 return commit_id
446
446
447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 ignore_whitespace=False, context=3):
448 ignore_whitespace=False, context=3):
449
449
450 wire.update(cache=False)
450 wire.update(cache=False)
451 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
452 diff_creator = SvnDiffer(
452 diff_creator = SvnDiffer(
453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 try:
454 try:
455 return diff_creator.generate_diff()
455 return diff_creator.generate_diff()
456 except svn.core.SubversionException as e:
456 except svn.core.SubversionException as e:
457 log.exception(
457 log.exception(
458 "Error during diff operation operation. "
458 "Error during diff operation operation. "
459 "Path might not exist %s, %s" % (path1, path2))
459 "Path might not exist %s, %s" % (path1, path2))
460 return ""
460 return ""
461
461
462 @reraise_safe_exceptions
462 @reraise_safe_exceptions
463 def is_large_file(self, wire, path):
463 def is_large_file(self, wire, path):
464 return False
464 return False
465
465
466 @reraise_safe_exceptions
466 @reraise_safe_exceptions
467 def is_binary(self, wire, rev, path):
467 def is_binary(self, wire, rev, path):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
469
469
470 @self.region.conditional_cache_on_arguments(condition=cache_on)
470 @self.region.conditional_cache_on_arguments(condition=cache_on)
471 def _is_binary(_repo_id, _rev, _path):
471 def _is_binary(_repo_id, _rev, _path):
472 raw_bytes = self.get_file_content(wire, path, rev)
472 raw_bytes = self.get_file_content(wire, path, rev)
473 return raw_bytes and '\0' in raw_bytes
473 return raw_bytes and '\0' in raw_bytes
474
474
475 return _is_binary(repo_id, rev, path)
475 return _is_binary(repo_id, rev, path)
476
476
477 @reraise_safe_exceptions
477 @reraise_safe_exceptions
478 def run_svn_command(self, wire, cmd, **opts):
478 def run_svn_command(self, wire, cmd, **opts):
479 path = wire.get('path', None)
479 path = wire.get('path', None)
480
480
481 if path and os.path.isdir(path):
481 if path and os.path.isdir(path):
482 opts['cwd'] = path
482 opts['cwd'] = path
483
483
484 safe_call = False
484 safe_call = False
485 if '_safe' in opts:
485 if '_safe' in opts:
486 safe_call = True
486 safe_call = True
487
487
488 svnenv = os.environ.copy()
488 svnenv = os.environ.copy()
489 svnenv.update(opts.pop('extra_env', {}))
489 svnenv.update(opts.pop('extra_env', {}))
490
490
491 _opts = {'env': svnenv, 'shell': False}
491 _opts = {'env': svnenv, 'shell': False}
492
492
493 try:
493 try:
494 _opts.update(opts)
494 _opts.update(opts)
495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496
496
497 return ''.join(p), ''.join(p.error)
497 return ''.join(p), ''.join(p.error)
498 except (EnvironmentError, OSError) as err:
498 except (EnvironmentError, OSError) as err:
499 cmd = ' '.join(cmd) # human friendly CMD
499 cmd = ' '.join(cmd) # human friendly CMD
500 tb_err = ("Couldn't run svn command (%s).\n"
500 tb_err = ("Couldn't run svn command (%s).\n"
501 "Original error was:%s\n"
501 "Original error was:%s\n"
502 "Call options:%s\n"
502 "Call options:%s\n"
503 % (cmd, err, _opts))
503 % (cmd, err, _opts))
504 log.exception(tb_err)
504 log.exception(tb_err)
505 if safe_call:
505 if safe_call:
506 return '', err
506 return '', err
507 else:
507 else:
508 raise exceptions.VcsException()(tb_err)
508 raise exceptions.VcsException()(tb_err)
509
509
510 @reraise_safe_exceptions
510 @reraise_safe_exceptions
511 def install_hooks(self, wire, force=False):
511 def install_hooks(self, wire, force=False):
512 from vcsserver.hook_utils import install_svn_hooks
512 from vcsserver.hook_utils import install_svn_hooks
513 repo_path = wire['path']
513 repo_path = wire['path']
514 binary_dir = settings.BINARY_DIR
514 binary_dir = settings.BINARY_DIR
515 executable = None
515 executable = None
516 if binary_dir:
516 if binary_dir:
517 executable = os.path.join(binary_dir, 'python')
517 executable = os.path.join(binary_dir, 'python')
518 return install_svn_hooks(
518 return install_svn_hooks(
519 repo_path, executable=executable, force_create=force)
519 repo_path, executable=executable, force_create=force)
520
520
521 @reraise_safe_exceptions
521 @reraise_safe_exceptions
522 def get_hooks_info(self, wire):
522 def get_hooks_info(self, wire):
523 from vcsserver.hook_utils import (
523 from vcsserver.hook_utils import (
524 get_svn_pre_hook_version, get_svn_post_hook_version)
524 get_svn_pre_hook_version, get_svn_post_hook_version)
525 repo_path = wire['path']
525 repo_path = wire['path']
526 return {
526 return {
527 'pre_version': get_svn_pre_hook_version(repo_path),
527 'pre_version': get_svn_pre_hook_version(repo_path),
528 'post_version': get_svn_post_hook_version(repo_path),
528 'post_version': get_svn_post_hook_version(repo_path),
529 }
529 }
530
530
531
531
532 class SvnDiffer(object):
532 class SvnDiffer(object):
533 """
533 """
534 Utility to create diffs based on difflib and the Subversion api
534 Utility to create diffs based on difflib and the Subversion api
535 """
535 """
536
536
537 binary_content = False
537 binary_content = False
538
538
539 def __init__(
539 def __init__(
540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 ignore_whitespace, context):
541 ignore_whitespace, context):
542 self.repo = repo
542 self.repo = repo
543 self.ignore_whitespace = ignore_whitespace
543 self.ignore_whitespace = ignore_whitespace
544 self.context = context
544 self.context = context
545
545
546 fsobj = svn.repos.fs(repo)
546 fsobj = svn.repos.fs(repo)
547
547
548 self.tgt_rev = tgt_rev
548 self.tgt_rev = tgt_rev
549 self.tgt_path = tgt_path or ''
549 self.tgt_path = tgt_path or ''
550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552
552
553 self.src_rev = src_rev
553 self.src_rev = src_rev
554 self.src_path = src_path or self.tgt_path
554 self.src_path = src_path or self.tgt_path
555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557
557
558 self._validate()
558 self._validate()
559
559
560 def _validate(self):
560 def _validate(self):
561 if (self.tgt_kind != svn.core.svn_node_none and
561 if (self.tgt_kind != svn.core.svn_node_none and
562 self.src_kind != svn.core.svn_node_none and
562 self.src_kind != svn.core.svn_node_none and
563 self.src_kind != self.tgt_kind):
563 self.src_kind != self.tgt_kind):
564 # TODO: johbo: proper error handling
564 # TODO: johbo: proper error handling
565 raise Exception(
565 raise Exception(
566 "Source and target are not compatible for diff generation. "
566 "Source and target are not compatible for diff generation. "
567 "Source type: %s, target type: %s" %
567 "Source type: %s, target type: %s" %
568 (self.src_kind, self.tgt_kind))
568 (self.src_kind, self.tgt_kind))
569
569
570 def generate_diff(self):
570 def generate_diff(self):
571 buf = StringIO.StringIO()
571 buf = StringIO.StringIO()
572 if self.tgt_kind == svn.core.svn_node_dir:
572 if self.tgt_kind == svn.core.svn_node_dir:
573 self._generate_dir_diff(buf)
573 self._generate_dir_diff(buf)
574 else:
574 else:
575 self._generate_file_diff(buf)
575 self._generate_file_diff(buf)
576 return buf.getvalue()
576 return buf.getvalue()
577
577
578 def _generate_dir_diff(self, buf):
578 def _generate_dir_diff(self, buf):
579 editor = DiffChangeEditor()
579 editor = DiffChangeEditor()
580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 svn.repos.dir_delta2(
581 svn.repos.dir_delta2(
582 self.src_root,
582 self.src_root,
583 self.src_path,
583 self.src_path,
584 '', # src_entry
584 '', # src_entry
585 self.tgt_root,
585 self.tgt_root,
586 self.tgt_path,
586 self.tgt_path,
587 editor_ptr, editor_baton,
587 editor_ptr, editor_baton,
588 authorization_callback_allow_all,
588 authorization_callback_allow_all,
589 False, # text_deltas
589 False, # text_deltas
590 svn.core.svn_depth_infinity, # depth
590 svn.core.svn_depth_infinity, # depth
591 False, # entry_props
591 False, # entry_props
592 False, # ignore_ancestry
592 False, # ignore_ancestry
593 )
593 )
594
594
595 for path, __, change in sorted(editor.changes):
595 for path, __, change in sorted(editor.changes):
596 self._generate_node_diff(
596 self._generate_node_diff(
597 buf, change, path, self.tgt_path, path, self.src_path)
597 buf, change, path, self.tgt_path, path, self.src_path)
598
598
599 def _generate_file_diff(self, buf):
599 def _generate_file_diff(self, buf):
600 change = None
600 change = None
601 if self.src_kind == svn.core.svn_node_none:
601 if self.src_kind == svn.core.svn_node_none:
602 change = "add"
602 change = "add"
603 elif self.tgt_kind == svn.core.svn_node_none:
603 elif self.tgt_kind == svn.core.svn_node_none:
604 change = "delete"
604 change = "delete"
605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 src_base, src_path = vcspath.split(self.src_path)
606 src_base, src_path = vcspath.split(self.src_path)
607 self._generate_node_diff(
607 self._generate_node_diff(
608 buf, change, tgt_path, tgt_base, src_path, src_base)
608 buf, change, tgt_path, tgt_base, src_path, src_base)
609
609
610 def _generate_node_diff(
610 def _generate_node_diff(
611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612
612
613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 # makes consistent behaviour with git/hg to return empty diff if
614 # makes consistent behaviour with git/hg to return empty diff if
615 # we compare same revisions
615 # we compare same revisions
616 return
616 return
617
617
618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 src_full_path = vcspath.join(src_base, src_path)
619 src_full_path = vcspath.join(src_base, src_path)
620
620
621 self.binary_content = False
621 self.binary_content = False
622 mime_type = self._get_mime_type(tgt_full_path)
622 mime_type = self._get_mime_type(tgt_full_path)
623
623
624 if mime_type and not mime_type.startswith('text'):
624 if mime_type and not mime_type.startswith('text'):
625 self.binary_content = True
625 self.binary_content = True
626 buf.write("=" * 67 + '\n')
626 buf.write("=" * 67 + '\n')
627 buf.write("Cannot display: file marked as a binary type.\n")
627 buf.write("Cannot display: file marked as a binary type.\n")
628 buf.write("svn:mime-type = %s\n" % mime_type)
628 buf.write("svn:mime-type = %s\n" % mime_type)
629 buf.write("Index: %s\n" % (tgt_path, ))
629 buf.write("Index: %s\n" % (tgt_path, ))
630 buf.write("=" * 67 + '\n')
630 buf.write("=" * 67 + '\n')
631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 'tgt_path': tgt_path})
632 'tgt_path': tgt_path})
633
633
634 if change == 'add':
634 if change == 'add':
635 # TODO: johbo: SVN is missing a zero here compared to git
635 # TODO: johbo: SVN is missing a zero here compared to git
636 buf.write("new file mode 10644\n")
636 buf.write("new file mode 10644\n")
637
637
638 #TODO(marcink): intro to binary detection of svn patches
638 #TODO(marcink): intro to binary detection of svn patches
639 # if self.binary_content:
639 # if self.binary_content:
640 # buf.write('GIT binary patch\n')
640 # buf.write('GIT binary patch\n')
641
641
642 buf.write("--- /dev/null\t(revision 0)\n")
642 buf.write("--- /dev/null\t(revision 0)\n")
643 src_lines = []
643 src_lines = []
644 else:
644 else:
645 if change == 'delete':
645 if change == 'delete':
646 buf.write("deleted file mode 10644\n")
646 buf.write("deleted file mode 10644\n")
647
647
648 #TODO(marcink): intro to binary detection of svn patches
648 #TODO(marcink): intro to binary detection of svn patches
649 # if self.binary_content:
649 # if self.binary_content:
650 # buf.write('GIT binary patch\n')
650 # buf.write('GIT binary patch\n')
651
651
652 buf.write("--- a/%s\t(revision %s)\n" % (
652 buf.write("--- a/%s\t(revision %s)\n" % (
653 src_path, self.src_rev))
653 src_path, self.src_rev))
654 src_lines = self._svn_readlines(self.src_root, src_full_path)
654 src_lines = self._svn_readlines(self.src_root, src_full_path)
655
655
656 if change == 'delete':
656 if change == 'delete':
657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 tgt_lines = []
658 tgt_lines = []
659 else:
659 else:
660 buf.write("+++ b/%s\t(revision %s)\n" % (
660 buf.write("+++ b/%s\t(revision %s)\n" % (
661 tgt_path, self.tgt_rev))
661 tgt_path, self.tgt_rev))
662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663
663
664 if not self.binary_content:
664 if not self.binary_content:
665 udiff = svn_diff.unified_diff(
665 udiff = svn_diff.unified_diff(
666 src_lines, tgt_lines, context=self.context,
666 src_lines, tgt_lines, context=self.context,
667 ignore_blank_lines=self.ignore_whitespace,
667 ignore_blank_lines=self.ignore_whitespace,
668 ignore_case=False,
668 ignore_case=False,
669 ignore_space_changes=self.ignore_whitespace)
669 ignore_space_changes=self.ignore_whitespace)
670 buf.writelines(udiff)
670 buf.writelines(udiff)
671
671
672 def _get_mime_type(self, path):
672 def _get_mime_type(self, path):
673 try:
673 try:
674 mime_type = svn.fs.node_prop(
674 mime_type = svn.fs.node_prop(
675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 except svn.core.SubversionException:
676 except svn.core.SubversionException:
677 mime_type = svn.fs.node_prop(
677 mime_type = svn.fs.node_prop(
678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 return mime_type
679 return mime_type
680
680
681 def _svn_readlines(self, fs_root, node_path):
681 def _svn_readlines(self, fs_root, node_path):
682 if self.binary_content:
682 if self.binary_content:
683 return []
683 return []
684 node_kind = svn.fs.check_path(fs_root, node_path)
684 node_kind = svn.fs.check_path(fs_root, node_path)
685 if node_kind not in (
685 if node_kind not in (
686 svn.core.svn_node_file, svn.core.svn_node_symlink):
686 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 return []
687 return []
688 content = svn.core.Stream(
688 content = svn.core.Stream(
689 svn.fs.file_contents(fs_root, node_path)).read()
689 svn.fs.file_contents(fs_root, node_path)).read()
690 return content.splitlines(True)
690 return content.splitlines(True)
691
691
692
692
693 class DiffChangeEditor(svn.delta.Editor):
693 class DiffChangeEditor(svn.delta.Editor):
694 """
694 """
695 Records changes between two given revisions
695 Records changes between two given revisions
696 """
696 """
697
697
698 def __init__(self):
698 def __init__(self):
699 self.changes = []
699 self.changes = []
700
700
701 def delete_entry(self, path, revision, parent_baton, pool=None):
701 def delete_entry(self, path, revision, parent_baton, pool=None):
702 self.changes.append((path, None, 'delete'))
702 self.changes.append((path, None, 'delete'))
703
703
704 def add_file(
704 def add_file(
705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 file_pool=None):
706 file_pool=None):
707 self.changes.append((path, 'file', 'add'))
707 self.changes.append((path, 'file', 'add'))
708
708
709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 self.changes.append((path, 'file', 'change'))
710 self.changes.append((path, 'file', 'change'))
711
711
712
712
713 def authorization_callback_allow_all(root, path, pool):
713 def authorization_callback_allow_all(root, path, pool):
714 return True
714 return True
715
715
716
716
717 class TxnNodeProcessor(object):
717 class TxnNodeProcessor(object):
718 """
718 """
719 Utility to process the change of one node within a transaction root.
719 Utility to process the change of one node within a transaction root.
720
720
721 It encapsulates the knowledge of how to add, update or remove
721 It encapsulates the knowledge of how to add, update or remove
722 a node for a given transaction root. The purpose is to support the method
722 a node for a given transaction root. The purpose is to support the method
723 `SvnRemote.commit`.
723 `SvnRemote.commit`.
724 """
724 """
725
725
726 def __init__(self, node, txn_root):
726 def __init__(self, node, txn_root):
727 assert isinstance(node['path'], str)
727 assert isinstance(node['path'], str)
728
728
729 self.node = node
729 self.node = node
730 self.txn_root = txn_root
730 self.txn_root = txn_root
731
731
732 def update(self):
732 def update(self):
733 self._ensure_parent_dirs()
733 self._ensure_parent_dirs()
734 self._add_file_if_node_does_not_exist()
734 self._add_file_if_node_does_not_exist()
735 self._update_file_content()
735 self._update_file_content()
736 self._update_file_properties()
736 self._update_file_properties()
737
737
738 def remove(self):
738 def remove(self):
739 svn.fs.delete(self.txn_root, self.node['path'])
739 svn.fs.delete(self.txn_root, self.node['path'])
740 # TODO: Clean up directory if empty
740 # TODO: Clean up directory if empty
741
741
742 def _ensure_parent_dirs(self):
742 def _ensure_parent_dirs(self):
743 curdir = vcspath.dirname(self.node['path'])
743 curdir = vcspath.dirname(self.node['path'])
744 dirs_to_create = []
744 dirs_to_create = []
745 while not self._svn_path_exists(curdir):
745 while not self._svn_path_exists(curdir):
746 dirs_to_create.append(curdir)
746 dirs_to_create.append(curdir)
747 curdir = vcspath.dirname(curdir)
747 curdir = vcspath.dirname(curdir)
748
748
749 for curdir in reversed(dirs_to_create):
749 for curdir in reversed(dirs_to_create):
750 log.debug('Creating missing directory "%s"', curdir)
750 log.debug('Creating missing directory "%s"', curdir)
751 svn.fs.make_dir(self.txn_root, curdir)
751 svn.fs.make_dir(self.txn_root, curdir)
752
752
753 def _svn_path_exists(self, path):
753 def _svn_path_exists(self, path):
754 path_status = svn.fs.check_path(self.txn_root, path)
754 path_status = svn.fs.check_path(self.txn_root, path)
755 return path_status != svn.core.svn_node_none
755 return path_status != svn.core.svn_node_none
756
756
757 def _add_file_if_node_does_not_exist(self):
757 def _add_file_if_node_does_not_exist(self):
758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 if kind == svn.core.svn_node_none:
759 if kind == svn.core.svn_node_none:
760 svn.fs.make_file(self.txn_root, self.node['path'])
760 svn.fs.make_file(self.txn_root, self.node['path'])
761
761
762 def _update_file_content(self):
762 def _update_file_content(self):
763 assert isinstance(self.node['content'], str)
763 assert isinstance(self.node['content'], str)
764 handler, baton = svn.fs.apply_textdelta(
764 handler, baton = svn.fs.apply_textdelta(
765 self.txn_root, self.node['path'], None, None)
765 self.txn_root, self.node['path'], None, None)
766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767
767
768 def _update_file_properties(self):
768 def _update_file_properties(self):
769 properties = self.node.get('properties', {})
769 properties = self.node.get('properties', {})
770 for key, value in properties.iteritems():
770 for key, value in properties.iteritems():
771 svn.fs.change_node_prop(
771 svn.fs.change_node_prop(
772 self.txn_root, self.node['path'], key, value)
772 self.txn_root, self.node['path'], key, value)
773
773
774
774
775 def apr_time_t(timestamp):
775 def apr_time_t(timestamp):
776 """
776 """
777 Convert a Python timestamp into APR timestamp type apr_time_t
777 Convert a Python timestamp into APR timestamp type apr_time_t
778 """
778 """
779 return timestamp * 1E6
779 return timestamp * 1E6
780
780
781
781
782 def svn_opt_revision_value_t(num):
782 def svn_opt_revision_value_t(num):
783 """
783 """
784 Put `num` into a `svn_opt_revision_value_t` structure.
784 Put `num` into a `svn_opt_revision_value_t` structure.
785 """
785 """
786 value = svn.core.svn_opt_revision_value_t()
786 value = svn.core.svn_opt_revision_value_t()
787 value.number = num
787 value.number = num
788 revision = svn.core.svn_opt_revision_t()
788 revision = svn.core.svn_opt_revision_t()
789 revision.kind = svn.core.svn_opt_revision_number
789 revision.kind = svn.core.svn_opt_revision_number
790 revision.value = value
790 revision.value = value
791 return revision
791 return revision
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print('Using vcsserver port %s' % (port, ))
43 print('Using vcsserver port %s' % (port, ))
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class ContextINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
61 key, val = ini_params.items()[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
72
72
73
73
74 def no_newline_id_generator(test_name):
74 def no_newline_id_generator(test_name):
75 """
75 """
76 Generates a test name without spaces or newlines characters. Used for
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
77 nicer output of progress of test
78 """
78 """
79 org_name = test_name
79 org_name = test_name
80 test_name = str(test_name)\
80 test_name = str(test_name)\
81 .replace('\n', '_N') \
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
84 .replace(' ', '_S')
85
85
86 return test_name or 'test-with-empty-name'
86 return test_name or 'test-with-empty-name'
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire={}, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102
102
103 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
104
105 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 factory = Mock()
106 factory = Mock()
107 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
108
108
109 def fake_function():
109 def fake_function():
110 return None
110 return None
111
111
112 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
113
113
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 for method_name, method in methods:
115 for method_name, method in methods:
116 if not method_name.startswith('_'):
116 if not method_name.startswith('_'):
117 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
118
118
119 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 ])
126 ])
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
129 def fake_method():
129 def fake_method():
130 raise side_effect
130 raise side_effect
131
131
132 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
133 fake_method()
133 fake_method()
134 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
135 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
136
136
137
137
138 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
139 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 del repo
144 del repo
145 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
146
146
147
147
148 class TestGitFactory(object):
148 class TestGitFactory(object):
149 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
150
150
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
153 factory = git.GitFactory()
153 factory = git.GitFactory()
154 wire = {
154 wire = {
155 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
156 }
156 }
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 with isdir_patcher:
158 with isdir_patcher:
159 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
160 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestDiff(object):
29 class TestDiff(object):
30 def test_raising_safe_exception_when_lookup_failed(self):
30 def test_raising_safe_exception_when_lookup_failed(self):
31
31
32 factory = Mock()
32 factory = Mock()
33 hg_remote = hg.HgRemote(factory)
33 hg_remote = hg.HgRemote(factory)
34 with patch('mercurial.patch.diff') as diff_mock:
34 with patch('mercurial.patch.diff') as diff_mock:
35 diff_mock.side_effect = LookupError(
35 diff_mock.side_effect = LookupError(
36 'deadbeef', 'index', 'message')
36 'deadbeef', 'index', 'message')
37 with pytest.raises(Exception) as exc_info:
37 with pytest.raises(Exception) as exc_info:
38 hg_remote.diff(
38 hg_remote.diff(
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 file_filter=None, opt_git=True, opt_ignorews=True,
40 file_filter=None, opt_git=True, opt_ignorews=True,
41 context=3)
41 context=3)
42 assert type(exc_info.value) == Exception
42 assert type(exc_info.value) == Exception
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44
44
45
45
46 class TestReraiseSafeExceptions(object):
46 class TestReraiseSafeExceptions(object):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 factory = Mock()
48 factory = Mock()
49 hg_remote = hg.HgRemote(factory)
49 hg_remote = hg.HgRemote(factory)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 decorator = hg.reraise_safe_exceptions(None)
51 decorator = hg.reraise_safe_exceptions(None)
52 for method_name, method in methods:
52 for method_name, method in methods:
53 if not method_name.startswith('_'):
53 if not method_name.startswith('_'):
54 assert method.im_func.__code__ == decorator.__code__
54 assert method.im_func.__code__ == decorator.__code__
55
55
56 @pytest.mark.parametrize('side_effect, expected_type', [
56 @pytest.mark.parametrize('side_effect, expected_type', [
57 (hgcompat.Abort(), 'abort'),
57 (hgcompat.Abort(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
59 (hgcompat.RepoLookupError(), 'lookup'),
59 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.RepoError(), 'error'),
61 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RequirementError(), 'requirement'),
62 (hgcompat.RequirementError(), 'requirement'),
63 ])
63 ])
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 @hg.reraise_safe_exceptions
65 @hg.reraise_safe_exceptions
66 def fake_method():
66 def fake_method():
67 raise side_effect
67 raise side_effect
68
68
69 with pytest.raises(Exception) as exc_info:
69 with pytest.raises(Exception) as exc_info:
70 fake_method()
70 fake_method()
71 assert type(exc_info.value) == Exception
71 assert type(exc_info.value) == Exception
72 assert exc_info.value._vcs_kind == expected_type
72 assert exc_info.value._vcs_kind == expected_type
73
73
74 def test_keeps_original_traceback(self):
74 def test_keeps_original_traceback(self):
75 @hg.reraise_safe_exceptions
75 @hg.reraise_safe_exceptions
76 def fake_method():
76 def fake_method():
77 try:
77 try:
78 raise hgcompat.Abort()
78 raise hgcompat.Abort()
79 except:
79 except:
80 self.original_traceback = traceback.format_tb(
80 self.original_traceback = traceback.format_tb(
81 sys.exc_info()[2])
81 sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
85 fake_method()
85 fake_method()
86 except Exception:
86 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
88
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
91
91
92 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
94 def stub_method():
94 def stub_method():
95 raise ValueError('stub')
95 raise ValueError('stub')
96
96
97 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
98 stub_method()
98 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
100
100
101 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
103 def stub_method():
103 def stub_method():
104 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
105
105
106 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
107 stub_method()
107 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,124 +1,124 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 orig_capabilities):
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
38 hgcompat.largefiles.proto, stub_extensions)
38 hgcompat.largefiles.proto, stub_extensions)
39
39
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
41
41
42 stub_extensions.assert_called_once_with(stub_ui)
42 stub_extensions.assert_called_once_with(stub_ui)
43 assert LARGEFILES_CAPABILITY not in caps
43 assert LARGEFILES_CAPABILITY not in caps
44
44
45
45
46 def test_dynamic_capabilities_ignores_updated_capabilities(
46 def test_dynamic_capabilities_ignores_updated_capabilities(
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
48 orig_capabilities):
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
51 hgcompat.largefiles.proto, stub_extensions)
51 hgcompat.largefiles.proto, stub_extensions)
52
52
53 # This happens when the extension is loaded for the first time, important
53 # This happens when the extension is loaded for the first time, important
54 # to ensure that an updated function is correctly picked up.
54 # to ensure that an updated function is correctly picked up.
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
56 side_effect=Exception('Must not be called'))
56 side_effect=Exception('Must not be called'))
57
57
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
59
59
60
60
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
63 orig_capabilities):
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
65
65
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
67 hgcompat.largefiles.proto, stub_extensions)
67 hgcompat.largefiles.proto, stub_extensions)
68
68
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
70
70
71 stub_extensions.assert_called_once_with(stub_ui)
71 stub_extensions.assert_called_once_with(stub_ui)
72 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
73
73
74
74
75 def test_hgsubversion_import():
75 def test_hgsubversion_import():
76 from hgsubversion import svnrepo
76 from hgsubversion import svnrepo
77 assert svnrepo
77 assert svnrepo
78
78
79
79
80 @pytest.fixture
80 @pytest.fixture
81 def patched_capabilities(request):
81 def patched_capabilities(request):
82 """
82 """
83 Patch in `capabilitiesorig` and restore both capability functions.
83 Patch in `capabilitiesorig` and restore both capability functions.
84 """
84 """
85 lfproto = hgcompat.largefiles.proto
85 lfproto = hgcompat.largefiles.proto
86 orig_capabilities = lfproto._capabilities
86 orig_capabilities = lfproto._capabilities
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def restore():
89 def restore():
90 lfproto._capabilities = orig_capabilities
90 lfproto._capabilities = orig_capabilities
91
91
92
92
93 @pytest.fixture
93 @pytest.fixture
94 def stub_repo(stub_ui):
94 def stub_repo(stub_ui):
95 repo = mock.Mock()
95 repo = mock.Mock()
96 repo.ui = stub_ui
96 repo.ui = stub_ui
97 return repo
97 return repo
98
98
99
99
100 @pytest.fixture
100 @pytest.fixture
101 def stub_proto(stub_ui):
101 def stub_proto(stub_ui):
102 proto = mock.Mock()
102 proto = mock.Mock()
103 proto.ui = stub_ui
103 proto.ui = stub_ui
104 return proto
104 return proto
105
105
106
106
107 @pytest.fixture
107 @pytest.fixture
108 def orig_capabilities():
108 def orig_capabilities():
109 from mercurial.wireprotov1server import wireprotocaps
109 from mercurial.wireprotov1server import wireprotocaps
110
110
111 def _capabilities(repo, proto):
111 def _capabilities(repo, proto):
112 return wireprotocaps
112 return wireprotocaps
113 return _capabilities
113 return _capabilities
114
114
115
115
116 @pytest.fixture
116 @pytest.fixture
117 def stub_ui():
117 def stub_ui():
118 return hgcompat.ui.ui()
118 return hgcompat.ui.ui()
119
119
120
120
121 @pytest.fixture
121 @pytest.fixture
122 def stub_extensions():
122 def stub_extensions():
123 extensions = mock.Mock(return_value=tuple())
123 extensions = mock.Mock(return_value=tuple())
124 return extensions
124 return extensions
@@ -1,241 +1,241 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from SocketServer import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 def get_hg_ui(extras=None):
32 def get_hg_ui(extras=None):
33 """Create a Config object with a valid RC_SCM_DATA entry."""
33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 extras = extras or {}
34 extras = extras or {}
35 required_extras = {
35 required_extras = {
36 'username': '',
36 'username': '',
37 'repository': '',
37 'repository': '',
38 'locked_by': '',
38 'locked_by': '',
39 'scm': '',
39 'scm': '',
40 'make_lock': '',
40 'make_lock': '',
41 'action': '',
41 'action': '',
42 'ip': '',
42 'ip': '',
43 'hooks_uri': 'fake_hooks_uri',
43 'hooks_uri': 'fake_hooks_uri',
44 }
44 }
45 required_extras.update(extras)
45 required_extras.update(extras)
46 hg_ui = mercurial.ui.ui()
46 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48
48
49 return hg_ui
49 return hg_ui
50
50
51
51
52 def test_git_pre_receive_is_disabled():
52 def test_git_pre_receive_is_disabled():
53 extras = {'hooks': ['pull']}
53 extras = {'hooks': ['pull']}
54 response = hooks.git_pre_receive(None, None,
54 response = hooks.git_pre_receive(None, None,
55 {'RC_SCM_DATA': json.dumps(extras)})
55 {'RC_SCM_DATA': json.dumps(extras)})
56
56
57 assert response == 0
57 assert response == 0
58
58
59
59
60 def test_git_post_receive_is_disabled():
60 def test_git_post_receive_is_disabled():
61 extras = {'hooks': ['pull']}
61 extras = {'hooks': ['pull']}
62 response = hooks.git_post_receive(None, '',
62 response = hooks.git_post_receive(None, '',
63 {'RC_SCM_DATA': json.dumps(extras)})
63 {'RC_SCM_DATA': json.dumps(extras)})
64
64
65 assert response == 0
65 assert response == 0
66
66
67
67
68 def test_git_post_receive_calls_repo_size():
68 def test_git_post_receive_calls_repo_size():
69 extras = {'hooks': ['push', 'repo_size']}
69 extras = {'hooks': ['push', 'repo_size']}
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
71 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 expected_calls = [
75 expected_calls = [
76 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
78 ]
78 ]
79 assert call_hook_mock.call_args_list == expected_calls
79 assert call_hook_mock.call_args_list == expected_calls
80
80
81
81
82 def test_git_post_receive_does_not_call_disabled_repo_size():
82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 extras = {'hooks': ['push']}
83 extras = {'hooks': ['push']}
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
85 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 expected_calls = [
89 expected_calls = [
90 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
91 ]
91 ]
92 assert call_hook_mock.call_args_list == expected_calls
92 assert call_hook_mock.call_args_list == expected_calls
93
93
94
94
95 def test_repo_size_exception_does_not_affect_git_post_receive():
95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 extras = {'hooks': ['push', 'repo_size']}
96 extras = {'hooks': ['push', 'repo_size']}
97 status = 0
97 status = 0
98
98
99 def side_effect(name, *args, **kwargs):
99 def side_effect(name, *args, **kwargs):
100 if name == 'repo_size':
100 if name == 'repo_size':
101 raise Exception('Fake exception')
101 raise Exception('Fake exception')
102 else:
102 else:
103 return status
103 return status
104
104
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 call_hook_mock.side_effect = side_effect
106 call_hook_mock.side_effect = side_effect
107 result = hooks.git_post_receive(
107 result = hooks.git_post_receive(
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 assert result == status
109 assert result == status
110
110
111
111
112 def test_git_pre_pull_is_disabled():
112 def test_git_pre_pull_is_disabled():
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114
114
115
115
116 def test_git_post_pull_is_disabled():
116 def test_git_post_pull_is_disabled():
117 assert (
117 assert (
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119
119
120
120
121 class TestGetHooksClient(object):
121 class TestGetHooksClient(object):
122
122
123 def test_returns_http_client_when_protocol_matches(self):
123 def test_returns_http_client_when_protocol_matches(self):
124 hooks_uri = 'localhost:8000'
124 hooks_uri = 'localhost:8000'
125 result = hooks._get_hooks_client({
125 result = hooks._get_hooks_client({
126 'hooks_uri': hooks_uri,
126 'hooks_uri': hooks_uri,
127 'hooks_protocol': 'http'
127 'hooks_protocol': 'http'
128 })
128 })
129 assert isinstance(result, hooks.HooksHttpClient)
129 assert isinstance(result, hooks.HooksHttpClient)
130 assert result.hooks_uri == hooks_uri
130 assert result.hooks_uri == hooks_uri
131
131
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 fake_module = mock.Mock()
133 fake_module = mock.Mock()
134 import_patcher = mock.patch.object(
134 import_patcher = mock.patch.object(
135 hooks.importlib, 'import_module', return_value=fake_module)
135 hooks.importlib, 'import_module', return_value=fake_module)
136 fake_module_name = 'fake.module'
136 fake_module_name = 'fake.module'
137 with import_patcher as import_mock:
137 with import_patcher as import_mock:
138 result = hooks._get_hooks_client(
138 result = hooks._get_hooks_client(
139 {'hooks_module': fake_module_name})
139 {'hooks_module': fake_module_name})
140
140
141 import_mock.assert_called_once_with(fake_module_name)
141 import_mock.assert_called_once_with(fake_module_name)
142 assert isinstance(result, hooks.HooksDummyClient)
142 assert isinstance(result, hooks.HooksDummyClient)
143 assert result._hooks_module == fake_module
143 assert result._hooks_module == fake_module
144
144
145
145
146 class TestHooksHttpClient(object):
146 class TestHooksHttpClient(object):
147 def test_init_sets_hooks_uri(self):
147 def test_init_sets_hooks_uri(self):
148 uri = 'localhost:3000'
148 uri = 'localhost:3000'
149 client = hooks.HooksHttpClient(uri)
149 client = hooks.HooksHttpClient(uri)
150 assert client.hooks_uri == uri
150 assert client.hooks_uri == uri
151
151
152 def test_serialize_returns_json_string(self):
152 def test_serialize_returns_json_string(self):
153 client = hooks.HooksHttpClient('localhost:3000')
153 client = hooks.HooksHttpClient('localhost:3000')
154 hook_name = 'test'
154 hook_name = 'test'
155 extras = {
155 extras = {
156 'first': 1,
156 'first': 1,
157 'second': 'two'
157 'second': 'two'
158 }
158 }
159 result = client._serialize(hook_name, extras)
159 result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
160 expected_result = json.dumps({
161 'method': hook_name,
161 'method': hook_name,
162 'extras': extras
162 'extras': extras
163 })
163 })
164 assert result == expected_result
164 assert result == expected_result
165
165
166 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
167 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
168 hook_name = 'test'
168 hook_name = 'test'
169 extras = {
169 extras = {
170 'first': 1,
170 'first': 1,
171 'second': 'two'
171 'second': 'two'
172 }
172 }
173 result = client(hook_name, extras)
173 result = client(hook_name, extras)
174 expected_result = {
174 expected_result = {
175 'method': hook_name,
175 'method': hook_name,
176 'extras': extras
176 'extras': extras
177 }
177 }
178 assert result == expected_result
178 assert result == expected_result
179
179
180
180
181 class TestHooksDummyClient(object):
181 class TestHooksDummyClient(object):
182 def test_init_imports_hooks_module(self):
182 def test_init_imports_hooks_module(self):
183 hooks_module_name = 'rhodecode.fake.module'
183 hooks_module_name = 'rhodecode.fake.module'
184 hooks_module = mock.MagicMock()
184 hooks_module = mock.MagicMock()
185
185
186 import_patcher = mock.patch.object(
186 import_patcher = mock.patch.object(
187 hooks.importlib, 'import_module', return_value=hooks_module)
187 hooks.importlib, 'import_module', return_value=hooks_module)
188 with import_patcher as import_mock:
188 with import_patcher as import_mock:
189 client = hooks.HooksDummyClient(hooks_module_name)
189 client = hooks.HooksDummyClient(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
191 assert client._hooks_module == hooks_module
191 assert client._hooks_module == hooks_module
192
192
193 def test_call_returns_hook_result(self):
193 def test_call_returns_hook_result(self):
194 hooks_module_name = 'rhodecode.fake.module'
194 hooks_module_name = 'rhodecode.fake.module'
195 hooks_module = mock.MagicMock()
195 hooks_module = mock.MagicMock()
196 import_patcher = mock.patch.object(
196 import_patcher = mock.patch.object(
197 hooks.importlib, 'import_module', return_value=hooks_module)
197 hooks.importlib, 'import_module', return_value=hooks_module)
198 with import_patcher:
198 with import_patcher:
199 client = hooks.HooksDummyClient(hooks_module_name)
199 client = hooks.HooksDummyClient(hooks_module_name)
200
200
201 result = client('post_push', {})
201 result = client('post_push', {})
202 hooks_module.Hooks.assert_called_once_with()
202 hooks_module.Hooks.assert_called_once_with()
203 assert result == hooks_module.Hooks().__enter__().post_push()
203 assert result == hooks_module.Hooks().__enter__().post_push()
204
204
205
205
206 @pytest.fixture
206 @pytest.fixture
207 def http_mirror(request):
207 def http_mirror(request):
208 server = MirrorHttpServer()
208 server = MirrorHttpServer()
209 request.addfinalizer(server.stop)
209 request.addfinalizer(server.stop)
210 return server
210 return server
211
211
212
212
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 def do_POST(self):
214 def do_POST(self):
215 length = int(self.headers['Content-Length'])
215 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
216 body = self.rfile.read(length).decode('utf-8')
217 self.send_response(200)
217 self.send_response(200)
218 self.end_headers()
218 self.end_headers()
219 self.wfile.write(body)
219 self.wfile.write(body)
220
220
221
221
222 class MirrorHttpServer(object):
222 class MirrorHttpServer(object):
223 ip_address = '127.0.0.1'
223 ip_address = '127.0.0.1'
224 port = 0
224 port = 0
225
225
226 def __init__(self):
226 def __init__(self):
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 _, self.port = self._daemon.server_address
228 _, self.port = self._daemon.server_address
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 self._thread.daemon = True
230 self._thread.daemon = True
231 self._thread.start()
231 self._thread.start()
232
232
233 def stop(self):
233 def stop(self):
234 self._daemon.shutdown()
234 self._daemon.shutdown()
235 self._thread.join()
235 self._thread.join()
236 self._daemon = None
236 self._daemon = None
237 self._thread = None
237 self._thread = None
238
238
239 @property
239 @property
240 def uri(self):
240 def uri(self):
241 return '{}:{}'.format(self.ip_address, self.port)
241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,206 +1,206 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import stat
20 import stat
21 import pytest
21 import pytest
22 import vcsserver
22 import vcsserver
23 import tempfile
23 import tempfile
24 from vcsserver import hook_utils
24 from vcsserver import hook_utils
25 from vcsserver.tests.fixture import no_newline_id_generator
25 from vcsserver.tests.fixture import no_newline_id_generator
26 from vcsserver.utils import AttributeDict
26 from vcsserver.utils import AttributeDict
27
27
28
28
29 class TestCheckRhodecodeHook(object):
29 class TestCheckRhodecodeHook(object):
30
30
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 with open(hook, 'wb') as f:
33 with open(hook, 'wb') as f:
34 f.write('dummy test')
34 f.write('dummy test')
35 result = hook_utils.check_rhodecode_hook(hook)
35 result = hook_utils.check_rhodecode_hook(hook)
36 assert result is False
36 assert result is False
37
37
38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 result = hook_utils.check_rhodecode_hook(hook)
40 result = hook_utils.check_rhodecode_hook(hook)
41 assert result
41 assert result
42
42
43 @pytest.mark.parametrize("file_content, expected_result", [
43 @pytest.mark.parametrize("file_content, expected_result", [
44 ("RC_HOOK_VER = '3.3.3'\n", True),
44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 ("RC_HOOK = '3.3.3'\n", False),
45 ("RC_HOOK = '3.3.3'\n", False),
46 ], ids=no_newline_id_generator)
46 ], ids=no_newline_id_generator)
47 def test_signatures(self, file_content, expected_result, tmpdir):
47 def test_signatures(self, file_content, expected_result, tmpdir):
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 with open(hook, 'wb') as f:
49 with open(hook, 'wb') as f:
50 f.write(file_content)
50 f.write(file_content)
51
51
52 result = hook_utils.check_rhodecode_hook(hook)
52 result = hook_utils.check_rhodecode_hook(hook)
53
53
54 assert result is expected_result
54 assert result is expected_result
55
55
56
56
57 class BaseInstallHooks(object):
57 class BaseInstallHooks(object):
58 HOOK_FILES = ()
58 HOOK_FILES = ()
59
59
60 def _check_hook_file_mode(self, file_path):
60 def _check_hook_file_mode(self, file_path):
61 assert os.path.exists(file_path), 'path %s missing' % file_path
61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 stat_info = os.stat(file_path)
62 stat_info = os.stat(file_path)
63
63
64 file_mode = stat.S_IMODE(stat_info.st_mode)
64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 expected_mode = int('755', 8)
65 expected_mode = int('755', 8)
66 assert expected_mode == file_mode
66 assert expected_mode == file_mode
67
67
68 def _check_hook_file_content(self, file_path, executable):
68 def _check_hook_file_content(self, file_path, executable):
69 executable = executable or sys.executable
69 executable = executable or sys.executable
70 with open(file_path, 'rt') as hook_file:
70 with open(file_path, 'rt') as hook_file:
71 content = hook_file.read()
71 content = hook_file.read()
72
72
73 expected_env = '#!{}'.format(executable)
73 expected_env = '#!{}'.format(executable)
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 vcsserver.__version__)
75 vcsserver.__version__)
76 assert content.strip().startswith(expected_env)
76 assert content.strip().startswith(expected_env)
77 assert expected_rc_version in content
77 assert expected_rc_version in content
78
78
79 def _create_fake_hook(self, file_path, content):
79 def _create_fake_hook(self, file_path, content):
80 with open(file_path, 'w') as hook_file:
80 with open(file_path, 'w') as hook_file:
81 hook_file.write(content)
81 hook_file.write(content)
82
82
83 def create_dummy_repo(self, repo_type):
83 def create_dummy_repo(self, repo_type):
84 tmpdir = tempfile.mkdtemp()
84 tmpdir = tempfile.mkdtemp()
85 repo = AttributeDict()
85 repo = AttributeDict()
86 if repo_type == 'git':
86 if repo_type == 'git':
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 os.makedirs(repo.path)
88 os.makedirs(repo.path)
89 os.makedirs(os.path.join(repo.path, 'hooks'))
89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 repo.bare = True
90 repo.bare = True
91
91
92 elif repo_type == 'svn':
92 elif repo_type == 'svn':
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 os.makedirs(repo.path)
94 os.makedirs(repo.path)
95 os.makedirs(os.path.join(repo.path, 'hooks'))
95 os.makedirs(os.path.join(repo.path, 'hooks'))
96
96
97 return repo
97 return repo
98
98
99 def check_hooks(self, repo_path, repo_bare=True):
99 def check_hooks(self, repo_path, repo_bare=True):
100 for file_name in self.HOOK_FILES:
100 for file_name in self.HOOK_FILES:
101 if repo_bare:
101 if repo_bare:
102 file_path = os.path.join(repo_path, 'hooks', file_name)
102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 else:
103 else:
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 self._check_hook_file_mode(file_path)
105 self._check_hook_file_mode(file_path)
106 self._check_hook_file_content(file_path, sys.executable)
106 self._check_hook_file_content(file_path, sys.executable)
107
107
108
108
109 class TestInstallGitHooks(BaseInstallHooks):
109 class TestInstallGitHooks(BaseInstallHooks):
110 HOOK_FILES = ('pre-receive', 'post-receive')
110 HOOK_FILES = ('pre-receive', 'post-receive')
111
111
112 def test_hooks_are_installed(self):
112 def test_hooks_are_installed(self):
113 repo = self.create_dummy_repo('git')
113 repo = self.create_dummy_repo('git')
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 assert result
115 assert result
116 self.check_hooks(repo.path, repo.bare)
116 self.check_hooks(repo.path, repo.bare)
117
117
118 def test_hooks_are_replaced(self):
118 def test_hooks_are_replaced(self):
119 repo = self.create_dummy_repo('git')
119 repo = self.create_dummy_repo('git')
120 hooks_path = os.path.join(repo.path, 'hooks')
120 hooks_path = os.path.join(repo.path, 'hooks')
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 self._create_fake_hook(
122 self._create_fake_hook(
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124
124
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 assert result
126 assert result
127 self.check_hooks(repo.path, repo.bare)
127 self.check_hooks(repo.path, repo.bare)
128
128
129 def test_non_rc_hooks_are_not_replaced(self):
129 def test_non_rc_hooks_are_not_replaced(self):
130 repo = self.create_dummy_repo('git')
130 repo = self.create_dummy_repo('git')
131 hooks_path = os.path.join(repo.path, 'hooks')
131 hooks_path = os.path.join(repo.path, 'hooks')
132 non_rc_content = 'echo "non rc hook"\n'
132 non_rc_content = 'echo "non rc hook"\n'
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 self._create_fake_hook(
134 self._create_fake_hook(
135 file_path, content=non_rc_content)
135 file_path, content=non_rc_content)
136
136
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 assert result
138 assert result
139
139
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 with open(file_path, 'rt') as hook_file:
141 with open(file_path, 'rt') as hook_file:
142 content = hook_file.read()
142 content = hook_file.read()
143 assert content == non_rc_content
143 assert content == non_rc_content
144
144
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 repo = self.create_dummy_repo('git')
146 repo = self.create_dummy_repo('git')
147 hooks_path = os.path.join(repo.path, 'hooks')
147 hooks_path = os.path.join(repo.path, 'hooks')
148 non_rc_content = 'echo "non rc hook"\n'
148 non_rc_content = 'echo "non rc hook"\n'
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 self._create_fake_hook(
150 self._create_fake_hook(
151 file_path, content=non_rc_content)
151 file_path, content=non_rc_content)
152
152
153 result = hook_utils.install_git_hooks(
153 result = hook_utils.install_git_hooks(
154 repo.path, repo.bare, force_create=True)
154 repo.path, repo.bare, force_create=True)
155 assert result
155 assert result
156 self.check_hooks(repo.path, repo.bare)
156 self.check_hooks(repo.path, repo.bare)
157
157
158
158
159 class TestInstallSvnHooks(BaseInstallHooks):
159 class TestInstallSvnHooks(BaseInstallHooks):
160 HOOK_FILES = ('pre-commit', 'post-commit')
160 HOOK_FILES = ('pre-commit', 'post-commit')
161
161
162 def test_hooks_are_installed(self):
162 def test_hooks_are_installed(self):
163 repo = self.create_dummy_repo('svn')
163 repo = self.create_dummy_repo('svn')
164 result = hook_utils.install_svn_hooks(repo.path)
164 result = hook_utils.install_svn_hooks(repo.path)
165 assert result
165 assert result
166 self.check_hooks(repo.path)
166 self.check_hooks(repo.path)
167
167
168 def test_hooks_are_replaced(self):
168 def test_hooks_are_replaced(self):
169 repo = self.create_dummy_repo('svn')
169 repo = self.create_dummy_repo('svn')
170 hooks_path = os.path.join(repo.path, 'hooks')
170 hooks_path = os.path.join(repo.path, 'hooks')
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 self._create_fake_hook(
172 self._create_fake_hook(
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174
174
175 result = hook_utils.install_svn_hooks(repo.path)
175 result = hook_utils.install_svn_hooks(repo.path)
176 assert result
176 assert result
177 self.check_hooks(repo.path)
177 self.check_hooks(repo.path)
178
178
179 def test_non_rc_hooks_are_not_replaced(self):
179 def test_non_rc_hooks_are_not_replaced(self):
180 repo = self.create_dummy_repo('svn')
180 repo = self.create_dummy_repo('svn')
181 hooks_path = os.path.join(repo.path, 'hooks')
181 hooks_path = os.path.join(repo.path, 'hooks')
182 non_rc_content = 'echo "non rc hook"\n'
182 non_rc_content = 'echo "non rc hook"\n'
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 self._create_fake_hook(
184 self._create_fake_hook(
185 file_path, content=non_rc_content)
185 file_path, content=non_rc_content)
186
186
187 result = hook_utils.install_svn_hooks(repo.path)
187 result = hook_utils.install_svn_hooks(repo.path)
188 assert result
188 assert result
189
189
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 with open(file_path, 'rt') as hook_file:
191 with open(file_path, 'rt') as hook_file:
192 content = hook_file.read()
192 content = hook_file.read()
193 assert content == non_rc_content
193 assert content == non_rc_content
194
194
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 repo = self.create_dummy_repo('svn')
196 repo = self.create_dummy_repo('svn')
197 hooks_path = os.path.join(repo.path, 'hooks')
197 hooks_path = os.path.join(repo.path, 'hooks')
198 non_rc_content = 'echo "non rc hook"\n'
198 non_rc_content = 'echo "non rc hook"\n'
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 self._create_fake_hook(
200 self._create_fake_hook(
201 file_path, content=non_rc_content)
201 file_path, content=non_rc_content)
202
202
203 result = hook_utils.install_svn_hooks(
203 result = hook_utils.install_svn_hooks(
204 repo.path, force_create=True)
204 repo.path, force_create=True)
205 assert result
205 assert result
206 self.check_hooks(repo.path, )
206 self.check_hooks(repo.path, )
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import http_main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 http_main.main({})
28 http_main.main({})
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 http_main.main({})
38 http_main.main({})
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
47 ('', ''),
48 (None, None),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
55 ])
56 def test_obfuscate_qs(given, expected):
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19
19
20 import dulwich.protocol
20 import dulwich.protocol
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import webob
23 import webob
24 import webtest
24 import webtest
25
25
26 from vcsserver import hooks, pygrack
26 from vcsserver import hooks, pygrack
27
27
28 # pylint: disable=redefined-outer-name,protected-access
28 # pylint: disable=redefined-outer-name,protected-access
29
29
30
30
31 @pytest.fixture()
31 @pytest.fixture()
32 def pygrack_instance(tmpdir):
32 def pygrack_instance(tmpdir):
33 """
33 """
34 Creates a pygrack app instance.
34 Creates a pygrack app instance.
35
35
36 Right now, it does not much helpful regarding the passed directory.
36 Right now, it does not much helpful regarding the passed directory.
37 It just contains the required folders to pass the signature test.
37 It just contains the required folders to pass the signature test.
38 """
38 """
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 tmpdir.mkdir(dir_name)
40 tmpdir.mkdir(dir_name)
41
41
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43
43
44
44
45 @pytest.fixture()
45 @pytest.fixture()
46 def pygrack_app(pygrack_instance):
46 def pygrack_app(pygrack_instance):
47 """
47 """
48 Creates a pygrack app wrapped in webtest.TestApp.
48 Creates a pygrack app wrapped in webtest.TestApp.
49 """
49 """
50 return webtest.TestApp(pygrack_instance)
50 return webtest.TestApp(pygrack_instance)
51
51
52
52
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 403
57 assert response.status_int == 403
58
58
59
59
60 def test_invalid_endpoint_returns_403(pygrack_app):
60 def test_invalid_endpoint_returns_403(pygrack_app):
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62
62
63 assert response.status_int == 403
63 assert response.status_int == 403
64
64
65
65
66 @pytest.mark.parametrize('sideband', [
66 @pytest.mark.parametrize('sideband', [
67 'side-band-64k',
67 'side-band-64k',
68 'side-band',
68 'side-band',
69 'side-band no-progress',
69 'side-band no-progress',
70 ])
70 ])
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 request = ''.join([
72 request = ''.join([
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 'multi_ack %s ofs-delta\n' % sideband,
74 'multi_ack %s ofs-delta\n' % sideband,
75 '0000',
75 '0000',
76 '0009done\n',
76 '0009done\n',
77 ])
77 ])
78 with mock.patch('vcsserver.hooks.git_pre_pull',
78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 return_value=hooks.HookResponse(1, 'foo')):
79 return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
83
83
84 data = io.BytesIO(response.body)
84 data = io.BytesIO(response.body)
85 proto = dulwich.protocol.Protocol(data.read, None)
85 proto = dulwich.protocol.Protocol(data.read, None)
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
94
94
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 request = ''.join([
96 request = ''.join([
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 'multi_ack ofs-delta\n'
98 'multi_ack ofs-delta\n'
99 '0000',
99 '0000',
100 '0009done\n',
100 '0009done\n',
101 ])
101 ])
102 with mock.patch('vcsserver.hooks.git_pre_pull',
102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 return_value=hooks.HookResponse(1, 'foo')):
103 return_value=hooks.HookResponse(1, 'foo')):
104 response = pygrack_app.post(
104 response = pygrack_app.post(
105 '/git-upload-pack', params=request,
105 '/git-upload-pack', params=request,
106 content_type='application/x-git-upload-pack')
106 content_type='application/x-git-upload-pack')
107
107
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109
109
110
110
111 def test_pull_has_hook_messages(pygrack_app):
111 def test_pull_has_hook_messages(pygrack_app):
112 request = ''.join([
112 request = ''.join([
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 'multi_ack side-band-64k ofs-delta\n'
114 'multi_ack side-band-64k ofs-delta\n'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118 with mock.patch('vcsserver.hooks.git_pre_pull',
118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 return_value=hooks.HookResponse(0, 'foo')):
119 return_value=hooks.HookResponse(0, 'foo')):
120 with mock.patch('vcsserver.hooks.git_post_pull',
120 with mock.patch('vcsserver.hooks.git_post_pull',
121 return_value=hooks.HookResponse(1, 'bar')):
121 return_value=hooks.HookResponse(1, 'bar')):
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 return_value=['0008NAK\n0009subp\n0000']):
123 return_value=['0008NAK\n0009subp\n0000']):
124 response = pygrack_app.post(
124 response = pygrack_app.post(
125 '/git-upload-pack', params=request,
125 '/git-upload-pack', params=request,
126 content_type='application/x-git-upload-pack')
126 content_type='application/x-git-upload-pack')
127
127
128 data = io.BytesIO(response.body)
128 data = io.BytesIO(response.body)
129 proto = dulwich.protocol.Protocol(data.read, None)
129 proto = dulwich.protocol.Protocol(data.read, None)
130 packets = list(proto.read_pkt_seq())
130 packets = list(proto.read_pkt_seq())
131
131
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133
133
134
134
135 def test_get_want_capabilities(pygrack_instance):
135 def test_get_want_capabilities(pygrack_instance):
136 data = io.BytesIO(
136 data = io.BytesIO(
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139
139
140 request = webob.Request({
140 request = webob.Request({
141 'wsgi.input': data,
141 'wsgi.input': data,
142 'REQUEST_METHOD': 'POST',
142 'REQUEST_METHOD': 'POST',
143 'webob.is_body_seekable': True
143 'webob.is_body_seekable': True
144 })
144 })
145
145
146 capabilities = pygrack_instance._get_want_capabilities(request)
146 capabilities = pygrack_instance._get_want_capabilities(request)
147
147
148 assert capabilities == frozenset(
148 assert capabilities == frozenset(
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 assert data.tell() == 0
150 assert data.tell() == 0
151
151
152
152
153 @pytest.mark.parametrize('data,capabilities,expected', [
153 @pytest.mark.parametrize('data,capabilities,expected', [
154 ('foo', [], []),
154 ('foo', [], []),
155 ('', ['side-band-64k'], []),
155 ('', ['side-band-64k'], []),
156 ('', ['side-band'], []),
156 ('', ['side-band'], []),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 ], ids=[
163 ], ids=[
164 'foo-empty',
164 'foo-empty',
165 'empty-64k', 'empty',
165 'empty-64k', 'empty',
166 'foo-64k', 'foo',
166 'foo-64k', 'foo',
167 'f-1000-64k', 'f-1000',
167 'f-1000-64k', 'f-1000',
168 'f-65520-64k', 'f-65520'])
168 'f-65520-64k', 'f-65520'])
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 messages = pygrack_instance._get_messages(data, capabilities)
170 messages = pygrack_instance._get_messages(data, capabilities)
171
171
172 assert messages == expected
172 assert messages == expected
173
173
174
174
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 # Unexpected response
176 # Unexpected response
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 # No sideband
178 # No sideband
179 ('no-sideband', [], 'foo', 'bar'),
179 ('no-sideband', [], 'foo', 'bar'),
180 # No messages
180 # No messages
181 ('no-messages', ['side-band-64k'], '', ''),
181 ('no-messages', ['side-band-64k'], '', ''),
182 ])
182 ])
183 def test_inject_messages_to_response_nothing_to_do(
183 def test_inject_messages_to_response_nothing_to_do(
184 pygrack_instance, response, capabilities, pre_pull_messages,
184 pygrack_instance, response, capabilities, pre_pull_messages,
185 post_pull_messages):
185 post_pull_messages):
186 new_response = pygrack_instance._inject_messages_to_response(
186 new_response = pygrack_instance._inject_messages_to_response(
187 response, capabilities, pre_pull_messages, post_pull_messages)
187 response, capabilities, pre_pull_messages, post_pull_messages)
188
188
189 assert new_response == response
189 assert new_response == response
190
190
191
191
192 @pytest.mark.parametrize('capabilities', [
192 @pytest.mark.parametrize('capabilities', [
193 ['side-band'],
193 ['side-band'],
194 ['side-band-64k'],
194 ['side-band-64k'],
195 ])
195 ])
196 def test_inject_messages_to_response_single_element(pygrack_instance,
196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 capabilities):
197 capabilities):
198 response = ['0008NAK\n0009subp\n0000']
198 response = ['0008NAK\n0009subp\n0000']
199 new_response = pygrack_instance._inject_messages_to_response(
199 new_response = pygrack_instance._inject_messages_to_response(
200 response, capabilities, 'foo', 'bar')
200 response, capabilities, 'foo', 'bar')
201
201
202 expected_response = [
202 expected_response = [
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204
204
205 assert new_response == expected_response
205 assert new_response == expected_response
206
206
207
207
208 @pytest.mark.parametrize('capabilities', [
208 @pytest.mark.parametrize('capabilities', [
209 ['side-band'],
209 ['side-band'],
210 ['side-band-64k'],
210 ['side-band-64k'],
211 ])
211 ])
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 capabilities):
213 capabilities):
214 response = [
214 response = [
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 new_response = pygrack_instance._inject_messages_to_response(
216 new_response = pygrack_instance._inject_messages_to_response(
217 response, capabilities, 'foo', 'bar')
217 response, capabilities, 'foo', 'bar')
218
218
219 expected_response = [
219 expected_response = [
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 '000asubp4\n', '0008\x02bar', '0000'
221 '000asubp4\n', '0008\x02bar', '0000'
222 ]
222 ]
223
223
224 assert new_response == expected_response
224 assert new_response == expected_response
225
225
226
226
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229
229
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231
231
232
232
233 @pytest.mark.parametrize('capabilities', [
233 @pytest.mark.parametrize('capabilities', [
234 ['side-band'],
234 ['side-band'],
235 ['side-band-64k'],
235 ['side-band-64k'],
236 ['side-band-64k', 'no-progress'],
236 ['side-band-64k', 'no-progress'],
237 ])
237 ])
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 response = pygrack_instance._build_failed_pre_pull_response(
239 response = pygrack_instance._build_failed_pre_pull_response(
240 capabilities, 'foo')
240 capabilities, 'foo')
241
241
242 expected_response = [
242 expected_response = [
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 pygrack.GitRepository.EMPTY_PACK),
245 pygrack.GitRepository.EMPTY_PACK),
246 '0000',
246 '0000',
247 ]
247 ]
248
248
249 assert response == expected_response
249 assert response == expected_response
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mercurial.hg
20 import mercurial.hg
21 import mercurial.ui
21 import mercurial.ui
22 import mercurial.error
22 import mercurial.error
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28
28
29
29
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 app = webtest.TestApp(scm_app.HgWeb(repo))
32 app = webtest.TestApp(scm_app.HgWeb(repo))
33
33
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35
35
36 assert response.status_int == 400
36 assert response.status_int == 400
37
37
38
38
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 config = (
41 config = (
42 ('paths', 'default', ''),
42 ('paths', 'default', ''),
43 )
43 )
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 with pytest.raises(Exception):
46 with pytest.raises(Exception):
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48
48
49
49
50 def test_git_returns_not_found(tmpdir):
50 def test_git_returns_not_found(tmpdir):
51 app = webtest.TestApp(
51 app = webtest.TestApp(
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53
53
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 404
57 assert response.status_int == 404
58
58
59
59
60 def test_git(tmpdir):
60 def test_git(tmpdir):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 tmpdir.mkdir(dir_name)
62 tmpdir.mkdir(dir_name)
63
63
64 app = webtest.TestApp(
64 app = webtest.TestApp(
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66
66
67 # We set service to git-upload-packs to trigger a 403
67 # We set service to git-upload-packs to trigger a 403
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 expect_errors=True)
69 expect_errors=True)
70
70
71 assert response.status_int == 403
71 assert response.status_int == 403
72
72
73
73
74 def test_git_fallbacks_to_git_folder(tmpdir):
74 def test_git_fallbacks_to_git_folder(tmpdir):
75 tmpdir.mkdir('.git')
75 tmpdir.mkdir('.git')
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 tmpdir.mkdir(os.path.join('.git', dir_name))
77 tmpdir.mkdir(os.path.join('.git', dir_name))
78
78
79 app = webtest.TestApp(
79 app = webtest.TestApp(
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81
81
82 # We set service to git-upload-packs to trigger a 403
82 # We set service to git-upload-packs to trigger a 403
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 expect_errors=True)
84 expect_errors=True)
85
85
86 assert response.status_int == 403
86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from vcsserver.server import VcsServer
23 from vcsserver.server import VcsServer
24
24
25
25
26 def test_provides_the_pid(server):
26 def test_provides_the_pid(server):
27 pid = server.get_pid()
27 pid = server.get_pid()
28 assert pid == os.getpid()
28 assert pid == os.getpid()
29
29
30
30
31 def test_allows_to_trigger_the_garbage_collector(server):
31 def test_allows_to_trigger_the_garbage_collector(server):
32 with mock.patch('gc.collect') as collect:
32 with mock.patch('gc.collect') as collect:
33 server.run_gc()
33 server.run_gc()
34 assert collect.called
34 assert collect.called
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def server():
38 def server():
39 return VcsServer()
39 return VcsServer()
@@ -1,155 +1,155 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25
25
26
26
27 class KindaFilelike(object): # pragma: no cover
27 class KindaFilelike(object): # pragma: no cover
28
28
29 def __init__(self, data, size):
29 def __init__(self, data, size):
30 chunks = size / len(data)
30 chunks = size / len(data)
31
31
32 self.stream = self._get_stream(data, chunks)
32 self.stream = self._get_stream(data, chunks)
33
33
34 def _get_stream(self, data, chunks):
34 def _get_stream(self, data, chunks):
35 for x in xrange(chunks):
35 for x in xrange(chunks):
36 yield data
36 yield data
37
37
38 def read(self, n):
38 def read(self, n):
39
39
40 buffer_stream = ''
40 buffer_stream = ''
41 for chunk in self.stream:
41 for chunk in self.stream:
42 buffer_stream += chunk
42 buffer_stream += chunk
43 if len(buffer_stream) >= n:
43 if len(buffer_stream) >= n:
44 break
44 break
45
45
46 # self.stream = self.bytes[n:]
46 # self.stream = self.bytes[n:]
47 return buffer_stream
47 return buffer_stream
48
48
49
49
50 @pytest.fixture(scope='module')
50 @pytest.fixture(scope='module')
51 def environ():
51 def environ():
52 """Delete coverage variables, as they make the tests fail."""
52 """Delete coverage variables, as they make the tests fail."""
53 env = dict(os.environ)
53 env = dict(os.environ)
54 for key in env.keys():
54 for key in env.keys():
55 if key.startswith('COV_CORE_'):
55 if key.startswith('COV_CORE_'):
56 del env[key]
56 del env[key]
57
57
58 return env
58 return env
59
59
60
60
61 def _get_python_args(script):
61 def _get_python_args(script):
62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63
63
64
64
65 def test_raise_exception_on_non_zero_return_code(environ):
65 def test_raise_exception_on_non_zero_return_code(environ):
66 args = _get_python_args('sys.exit(1)')
66 args = _get_python_args('sys.exit(1)')
67 with pytest.raises(EnvironmentError):
67 with pytest.raises(EnvironmentError):
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
69
69
70
70
71 def test_does_not_fail_on_non_zero_return_code(environ):
71 def test_does_not_fail_on_non_zero_return_code(environ):
72 args = _get_python_args('sys.exit(1)')
72 args = _get_python_args('sys.exit(1)')
73 output = ''.join(
73 output = ''.join(
74 subprocessio.SubprocessIOChunker(
74 subprocessio.SubprocessIOChunker(
75 args, shell=False, fail_on_return_code=False, env=environ
75 args, shell=False, fail_on_return_code=False, env=environ
76 )
76 )
77 )
77 )
78
78
79 assert output == ''
79 assert output == ''
80
80
81
81
82 def test_raise_exception_on_stderr(environ):
82 def test_raise_exception_on_stderr(environ):
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
84 with pytest.raises(EnvironmentError) as excinfo:
84 with pytest.raises(EnvironmentError) as excinfo:
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
86
86
87 assert 'exited due to an error:\nX' in str(excinfo.value)
87 assert 'exited due to an error:\nX' in str(excinfo.value)
88
88
89
89
90 def test_does_not_fail_on_stderr(environ):
90 def test_does_not_fail_on_stderr(environ):
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
92 output = ''.join(
92 output = ''.join(
93 subprocessio.SubprocessIOChunker(
93 subprocessio.SubprocessIOChunker(
94 args, shell=False, fail_on_stderr=False, env=environ
94 args, shell=False, fail_on_stderr=False, env=environ
95 )
95 )
96 )
96 )
97
97
98 assert output == ''
98 assert output == ''
99
99
100
100
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
102 def test_output_with_no_input(size, environ):
102 def test_output_with_no_input(size, environ):
103 print(type(environ))
103 print(type(environ))
104 data = 'X'
104 data = 'X'
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
107
107
108 assert output == data * size
108 assert output == data * size
109
109
110
110
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
112 def test_output_with_no_input_does_not_fail(size, environ):
112 def test_output_with_no_input_does_not_fail(size, environ):
113 data = 'X'
113 data = 'X'
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 output = ''.join(
115 output = ''.join(
116 subprocessio.SubprocessIOChunker(
116 subprocessio.SubprocessIOChunker(
117 args, shell=False, fail_on_return_code=False, env=environ
117 args, shell=False, fail_on_return_code=False, env=environ
118 )
118 )
119 )
119 )
120
120
121 print("{} {}".format(len(data * size), len(output)))
121 print("{} {}".format(len(data * size), len(output)))
122 assert output == data * size
122 assert output == data * size
123
123
124
124
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
126 def test_output_with_input(size, environ):
126 def test_output_with_input(size, environ):
127 data_len = size
127 data_len = size
128 inputstream = KindaFilelike('X', size)
128 inputstream = KindaFilelike('X', size)
129
129
130 # This acts like the cat command.
130 # This acts like the cat command.
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 output = ''.join(
132 output = ''.join(
133 subprocessio.SubprocessIOChunker(
133 subprocessio.SubprocessIOChunker(
134 args, shell=False, inputstream=inputstream, env=environ
134 args, shell=False, inputstream=inputstream, env=environ
135 )
135 )
136 )
136 )
137
137
138 assert len(output) == data_len
138 assert len(output) == data_len
139
139
140
140
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
142 def test_output_with_input_skipping_iterator(size, environ):
142 def test_output_with_input_skipping_iterator(size, environ):
143 data_len = size
143 data_len = size
144 inputstream = KindaFilelike('X', size)
144 inputstream = KindaFilelike('X', size)
145
145
146 # This acts like the cat command.
146 # This acts like the cat command.
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148
148
149 # Note: assigning the chunker makes sure that it is not deleted too early
149 # Note: assigning the chunker makes sure that it is not deleted too early
150 chunker = subprocessio.SubprocessIOChunker(
150 chunker = subprocessio.SubprocessIOChunker(
151 args, shell=False, inputstream=inputstream, env=environ
151 args, shell=False, inputstream=inputstream, env=environ
152 )
152 )
153 output = ''.join(chunker.output)
153 output = ''.join(chunker.output)
154
154
155 assert len(output) == data_len
155 assert len(output) == data_len
@@ -1,87 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23
23
24 class MockPopen(object):
24 class MockPopen(object):
25 def __init__(self, stderr):
25 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
26 self.stdout = io.BytesIO('')
27 self.stderr = io.BytesIO(stderr)
27 self.stderr = io.BytesIO(stderr)
28 self.returncode = 1
28 self.returncode = 1
29
29
30 def wait(self):
30 def wait(self):
31 pass
31 pass
32
32
33
33
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 ])
37 ])
38
38
39
39
40 @pytest.mark.parametrize('stderr,expected_reason', [
40 @pytest.mark.parametrize('stderr,expected_reason', [
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 @pytest.mark.xfail(sys.platform == "cygwin",
44 @pytest.mark.xfail(sys.platform == "cygwin",
45 reason="SVN not packaged for Cygwin")
45 reason="SVN not packaged for Cygwin")
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 from vcsserver import svn
47 from vcsserver import svn
48 factory = mock.Mock()
48 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
49 factory.repo = mock.Mock(return_value=mock.Mock())
50
50
51 remote = svn.SvnRemote(factory)
51 remote = svn.SvnRemote(factory)
52 remote.is_path_valid_repository = lambda wire, path: True
52 remote.is_path_valid_repository = lambda wire, path: True
53
53
54 with mock.patch('subprocess.Popen',
54 with mock.patch('subprocess.Popen',
55 return_value=MockPopen(stderr)):
55 return_value=MockPopen(stderr)):
56 with pytest.raises(Exception) as excinfo:
56 with pytest.raises(Exception) as excinfo:
57 remote.import_remote_repository({'path': 'path'}, 'url')
57 remote.import_remote_repository({'path': 'path'}, 'url')
58
58
59 expected_error_args = (
59 expected_error_args = (
60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
61
61
62 assert excinfo.value.args == expected_error_args
62 assert excinfo.value.args == expected_error_args
63
63
64
64
65 def test_svn_libraries_can_be_imported():
65 def test_svn_libraries_can_be_imported():
66 import svn
66 import svn
67 import svn.client
67 import svn.client
68 assert svn.client is not None
68 assert svn.client is not None
69
69
70
70
71 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
72 ('http://server.com', (None, None, 'http://server.com')),
72 ('http://server.com', (None, None, 'http://server.com')),
73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 ('<script>', (None, None, '<script>')),
75 ('<script>', (None, None, '<script>')),
76 ('http://', (None, None, 'http://')),
76 ('http://', (None, None, 'http://')),
77 ])
77 ])
78 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
79 from vcsserver import svn
79 from vcsserver import svn
80
80
81 factory = mock.Mock()
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
83
84 remote = svn.SvnRemote(factory)
84 remote = svn.SvnRemote(factory)
85 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
86
86
87 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
@@ -1,96 +1,96 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import wsgiref.simple_server
18 import wsgiref.simple_server
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22
22
23
23
24 # pylint: disable=protected-access,too-many-public-methods
24 # pylint: disable=protected-access,too-many-public-methods
25
25
26
26
27 @wsgiref.validate.validator
27 @wsgiref.validate.validator
28 def demo_app(environ, start_response):
28 def demo_app(environ, start_response):
29 """WSGI app used for testing."""
29 """WSGI app used for testing."""
30 data = [
30 data = [
31 'Hello World!\n',
31 'Hello World!\n',
32 'input_data=%s\n' % environ['wsgi.input'].read(),
32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 ]
33 ]
34 for key, value in sorted(environ.items()):
34 for key, value in sorted(environ.items()):
35 data.append('%s=%s\n' % (key, value))
35 data.append('%s=%s\n' % (key, value))
36
36
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write('Old school write method\n')
38 write('Old school write method\n')
39 write('***********************\n')
39 write('***********************\n')
40 return data
40 return data
41
41
42
42
43 BASE_ENVIRON = {
43 BASE_ENVIRON = {
44 'REQUEST_METHOD': 'GET',
44 'REQUEST_METHOD': 'GET',
45 'SERVER_NAME': 'localhost',
45 'SERVER_NAME': 'localhost',
46 'SERVER_PORT': '80',
46 'SERVER_PORT': '80',
47 'SCRIPT_NAME': '',
47 'SCRIPT_NAME': '',
48 'PATH_INFO': '/',
48 'PATH_INFO': '/',
49 'QUERY_STRING': '',
49 'QUERY_STRING': '',
50 'foo.var': 'bla',
50 'foo.var': 'bla',
51 }
51 }
52
52
53
53
54 def test_complete_environ():
54 def test_complete_environ():
55 environ = dict(BASE_ENVIRON)
55 environ = dict(BASE_ENVIRON)
56 data = "data"
56 data = "data"
57 wsgi_app_caller._complete_environ(environ, data)
57 wsgi_app_caller._complete_environ(environ, data)
58 wsgiref.validate.check_environ(environ)
58 wsgiref.validate.check_environ(environ)
59
59
60 assert data == environ['wsgi.input'].read()
60 assert data == environ['wsgi.input'].read()
61
61
62
62
63 def test_start_response():
63 def test_start_response():
64 start_response = wsgi_app_caller._StartResponse()
64 start_response = wsgi_app_caller._StartResponse()
65 status = '200 OK'
65 status = '200 OK'
66 headers = [('Content-Type', 'text/plain')]
66 headers = [('Content-Type', 'text/plain')]
67 start_response(status, headers)
67 start_response(status, headers)
68
68
69 assert status == start_response.status
69 assert status == start_response.status
70 assert headers == start_response.headers
70 assert headers == start_response.headers
71
71
72
72
73 def test_start_response_with_error():
73 def test_start_response_with_error():
74 start_response = wsgi_app_caller._StartResponse()
74 start_response = wsgi_app_caller._StartResponse()
75 status = '500 Internal Server Error'
75 status = '500 Internal Server Error'
76 headers = [('Content-Type', 'text/plain')]
76 headers = [('Content-Type', 'text/plain')]
77 start_response(status, headers, (None, None, None))
77 start_response(status, headers, (None, None, None))
78
78
79 assert status == start_response.status
79 assert status == start_response.status
80 assert headers == start_response.headers
80 assert headers == start_response.headers
81
81
82
82
83 def test_wsgi_app_caller():
83 def test_wsgi_app_caller():
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87 responses, status, headers = caller.handle(environ, input_data)
87 responses, status, headers = caller.handle(environ, input_data)
88 response = ''.join(responses)
88 response = ''.join(responses)
89
89
90 assert status == '200 OK'
90 assert status == '200 OK'
91 assert headers == [('Content-Type', 'text/plain')]
91 assert headers == [('Content-Type', 'text/plain')]
92 assert response.startswith(
92 assert response.startswith(
93 'Old school write method\n***********************\n')
93 'Old school write method\n***********************\n')
94 assert 'Hello World!\n' in response
94 assert 'Hello World!\n' in response
95 assert 'foo.var=bla\n' in response
95 assert 'foo.var=bla\n' in response
96 assert 'input_data=%s\n' % input_data in response
96 assert 'input_data=%s\n' % input_data in response
@@ -1,19 +1,19 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -1,64 +1,64 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import logging
19 import logging
20
20
21 import vcsserver
21 import vcsserver
22 from vcsserver.utils import safe_str
22 from vcsserver.utils import safe_str
23
23
24
24
25 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
26
26
27
27
28 def get_access_path(request):
28 def get_access_path(request):
29 environ = request.environ
29 environ = request.environ
30 return environ.get('PATH_INFO')
30 return environ.get('PATH_INFO')
31
31
32
32
33 def get_user_agent(environ):
33 def get_user_agent(environ):
34 return environ.get('HTTP_USER_AGENT')
34 return environ.get('HTTP_USER_AGENT')
35
35
36
36
37 class RequestWrapperTween(object):
37 class RequestWrapperTween(object):
38 def __init__(self, handler, registry):
38 def __init__(self, handler, registry):
39 self.handler = handler
39 self.handler = handler
40 self.registry = registry
40 self.registry = registry
41
41
42 # one-time configuration code goes here
42 # one-time configuration code goes here
43
43
44 def __call__(self, request):
44 def __call__(self, request):
45 start = time.time()
45 start = time.time()
46 try:
46 try:
47 response = self.handler(request)
47 response = self.handler(request)
48 finally:
48 finally:
49 end = time.time()
49 end = time.time()
50 total = end - start
50 total = end - start
51 count = request.request_count()
51 count = request.request_count()
52 _ver_ = vcsserver.__version__
52 _ver_ = vcsserver.__version__
53 log.info(
53 log.info(
54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
57
57
58 return response
58 return response
59
59
60
60
61 def includeme(config):
61 def includeme(config):
62 config.add_tween(
62 config.add_tween(
63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
64 )
64 )
@@ -1,110 +1,110 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18 import hashlib
19
19
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22
22
23 def safe_int(val, default=None):
23 def safe_int(val, default=None):
24 """
24 """
25 Returns int() of val if val is not convertable to int use default
25 Returns int() of val if val is not convertable to int use default
26 instead
26 instead
27
27
28 :param val:
28 :param val:
29 :param default:
29 :param default:
30 """
30 """
31
31
32 try:
32 try:
33 val = int(val)
33 val = int(val)
34 except (ValueError, TypeError):
34 except (ValueError, TypeError):
35 val = default
35 val = default
36
36
37 return val
37 return val
38
38
39
39
40 def safe_str(unicode_, to_encoding=None):
40 def safe_str(unicode_, to_encoding=None):
41 """
41 """
42 safe str function. Does few trick to turn unicode_ into string
42 safe str function. Does few trick to turn unicode_ into string
43
43
44 :param unicode_: unicode to encode
44 :param unicode_: unicode to encode
45 :param to_encoding: encode to this type UTF8 default
45 :param to_encoding: encode to this type UTF8 default
46 :rtype: str
46 :rtype: str
47 :returns: str object
47 :returns: str object
48 """
48 """
49 to_encoding = to_encoding or ['utf8']
49 to_encoding = to_encoding or ['utf8']
50 # if it's not basestr cast to str
50 # if it's not basestr cast to str
51 if not isinstance(unicode_, basestring):
51 if not isinstance(unicode_, basestring):
52 return str(unicode_)
52 return str(unicode_)
53
53
54 if isinstance(unicode_, str):
54 if isinstance(unicode_, str):
55 return unicode_
55 return unicode_
56
56
57 if not isinstance(to_encoding, (list, tuple)):
57 if not isinstance(to_encoding, (list, tuple)):
58 to_encoding = [to_encoding]
58 to_encoding = [to_encoding]
59
59
60 for enc in to_encoding:
60 for enc in to_encoding:
61 try:
61 try:
62 return unicode_.encode(enc)
62 return unicode_.encode(enc)
63 except UnicodeEncodeError:
63 except UnicodeEncodeError:
64 pass
64 pass
65
65
66 return unicode_.encode(to_encoding[0], 'replace')
66 return unicode_.encode(to_encoding[0], 'replace')
67
67
68
68
69 def safe_unicode(str_, from_encoding=None):
69 def safe_unicode(str_, from_encoding=None):
70 """
70 """
71 safe unicode function. Does few trick to turn str_ into unicode
71 safe unicode function. Does few trick to turn str_ into unicode
72
72
73 :param str_: string to decode
73 :param str_: string to decode
74 :param from_encoding: encode from this type UTF8 default
74 :param from_encoding: encode from this type UTF8 default
75 :rtype: unicode
75 :rtype: unicode
76 :returns: unicode object
76 :returns: unicode object
77 """
77 """
78 from_encoding = from_encoding or ['utf8']
78 from_encoding = from_encoding or ['utf8']
79
79
80 if isinstance(str_, unicode):
80 if isinstance(str_, unicode):
81 return str_
81 return str_
82
82
83 if not isinstance(from_encoding, (list, tuple)):
83 if not isinstance(from_encoding, (list, tuple)):
84 from_encoding = [from_encoding]
84 from_encoding = [from_encoding]
85
85
86 try:
86 try:
87 return unicode(str_)
87 return unicode(str_)
88 except UnicodeDecodeError:
88 except UnicodeDecodeError:
89 pass
89 pass
90
90
91 for enc in from_encoding:
91 for enc in from_encoding:
92 try:
92 try:
93 return unicode(str_, enc)
93 return unicode(str_, enc)
94 except UnicodeDecodeError:
94 except UnicodeDecodeError:
95 pass
95 pass
96
96
97 return unicode(str_, from_encoding[0], 'replace')
97 return unicode(str_, from_encoding[0], 'replace')
98
98
99
99
100 class AttributeDict(dict):
100 class AttributeDict(dict):
101 def __getattr__(self, attr):
101 def __getattr__(self, attr):
102 return self.get(attr, None)
102 return self.get(attr, None)
103 __setattr__ = dict.__setitem__
103 __setattr__ = dict.__setitem__
104 __delattr__ = dict.__delitem__
104 __delattr__ = dict.__delitem__
105
105
106
106
107 def sha1(val):
107 def sha1(val):
108 return hashlib.sha1(val).hexdigest()
108 return hashlib.sha1(val).hexdigest()
109
109
110
110
@@ -1,32 +1,32 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 class RemoteBase(object):
19 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
20 EMPTY_COMMIT = '0' * 40
21
21
22 @property
22 @property
23 def region(self):
23 def region(self):
24 return self._factory._cache_region
24 return self._factory._cache_region
25
25
26 def _cache_on(self, wire):
26 def _cache_on(self, wire):
27 context = wire.get('context', '')
27 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
28 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
29 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
30 cache = wire.get('cache', True)
31 cache_on = context and cache
31 cache_on = context and cache
32 return cache_on, context_uid, repo_id
32 return cache_on, context_uid, repo_id
@@ -1,116 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Extract the responses of a WSGI app."""
18 """Extract the responses of a WSGI app."""
19
19
20 __all__ = ('WSGIAppCaller',)
20 __all__ = ('WSGIAppCaller',)
21
21
22 import io
22 import io
23 import logging
23 import logging
24 import os
24 import os
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29 DEV_NULL = open(os.devnull)
29 DEV_NULL = open(os.devnull)
30
30
31
31
32 def _complete_environ(environ, input_data):
32 def _complete_environ(environ, input_data):
33 """Update the missing wsgi.* variables of a WSGI environment.
33 """Update the missing wsgi.* variables of a WSGI environment.
34
34
35 :param environ: WSGI environment to update
35 :param environ: WSGI environment to update
36 :type environ: dict
36 :type environ: dict
37 :param input_data: data to be read by the app
37 :param input_data: data to be read by the app
38 :type input_data: str
38 :type input_data: str
39 """
39 """
40 environ.update({
40 environ.update({
41 'wsgi.version': (1, 0),
41 'wsgi.version': (1, 0),
42 'wsgi.url_scheme': 'http',
42 'wsgi.url_scheme': 'http',
43 'wsgi.multithread': True,
43 'wsgi.multithread': True,
44 'wsgi.multiprocess': True,
44 'wsgi.multiprocess': True,
45 'wsgi.run_once': False,
45 'wsgi.run_once': False,
46 'wsgi.input': io.BytesIO(input_data),
46 'wsgi.input': io.BytesIO(input_data),
47 'wsgi.errors': DEV_NULL,
47 'wsgi.errors': DEV_NULL,
48 })
48 })
49
49
50
50
51 # pylint: disable=too-few-public-methods
51 # pylint: disable=too-few-public-methods
52 class _StartResponse(object):
52 class _StartResponse(object):
53 """Save the arguments of a start_response call."""
53 """Save the arguments of a start_response call."""
54
54
55 __slots__ = ['status', 'headers', 'content']
55 __slots__ = ['status', 'headers', 'content']
56
56
57 def __init__(self):
57 def __init__(self):
58 self.status = None
58 self.status = None
59 self.headers = None
59 self.headers = None
60 self.content = []
60 self.content = []
61
61
62 def __call__(self, status, headers, exc_info=None):
62 def __call__(self, status, headers, exc_info=None):
63 # TODO(skreft): do something meaningful with the exc_info
63 # TODO(skreft): do something meaningful with the exc_info
64 exc_info = None # avoid dangling circular reference
64 exc_info = None # avoid dangling circular reference
65 self.status = status
65 self.status = status
66 self.headers = headers
66 self.headers = headers
67
67
68 return self.write
68 return self.write
69
69
70 def write(self, content):
70 def write(self, content):
71 """Write method returning when calling this object.
71 """Write method returning when calling this object.
72
72
73 All the data written is then available in content.
73 All the data written is then available in content.
74 """
74 """
75 self.content.append(content)
75 self.content.append(content)
76
76
77
77
78 class WSGIAppCaller(object):
78 class WSGIAppCaller(object):
79 """Calls a WSGI app."""
79 """Calls a WSGI app."""
80
80
81 def __init__(self, app):
81 def __init__(self, app):
82 """
82 """
83 :param app: WSGI app to call
83 :param app: WSGI app to call
84 """
84 """
85 self.app = app
85 self.app = app
86
86
87 def handle(self, environ, input_data):
87 def handle(self, environ, input_data):
88 """Process a request with the WSGI app.
88 """Process a request with the WSGI app.
89
89
90 The returned data of the app is fully consumed into a list.
90 The returned data of the app is fully consumed into a list.
91
91
92 :param environ: WSGI environment to update
92 :param environ: WSGI environment to update
93 :type environ: dict
93 :type environ: dict
94 :param input_data: data to be read by the app
94 :param input_data: data to be read by the app
95 :type input_data: str
95 :type input_data: str
96
96
97 :returns: a tuple with the contents, status and headers
97 :returns: a tuple with the contents, status and headers
98 :rtype: (list<str>, str, list<(str, str)>)
98 :rtype: (list<str>, str, list<(str, str)>)
99 """
99 """
100 _complete_environ(environ, input_data)
100 _complete_environ(environ, input_data)
101 start_response = _StartResponse()
101 start_response = _StartResponse()
102 log.debug("Calling wrapped WSGI application")
102 log.debug("Calling wrapped WSGI application")
103 responses = self.app(environ, start_response)
103 responses = self.app(environ, start_response)
104 responses_list = list(responses)
104 responses_list = list(responses)
105 existing_responses = start_response.content
105 existing_responses = start_response.content
106 if existing_responses:
106 if existing_responses:
107 log.debug(
107 log.debug(
108 "Adding returned response to response written via write()")
108 "Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
112 log.debug("Closing iterator from WSGI application")
112 log.debug("Closing iterator from WSGI application")
113 responses.close()
113 responses.close()
114
114
115 log.debug("Handling of WSGI request done, returning response")
115 log.debug("Handling of WSGI request done, returning response")
116 return responses_list, start_response.status, start_response.headers
116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now