##// END OF EJS Templates
exceptions: use new wrapper that store the org exception inside the newly generated exceptions....
marcink -
r490:2961b1db default
parent child Browse files
Show More
@@ -1,70 +1,106 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 import functools
28 from pyramid.httpexceptions import HTTPLocked
28 from pyramid.httpexceptions import HTTPLocked
29
29
30
30
31 def _make_exception(kind, *args):
31 def _make_exception(kind, org_exc, *args):
32 """
32 """
33 Prepares a base `Exception` instance to be sent over the wire.
33 Prepares a base `Exception` instance to be sent over the wire.
34
34
35 To give our caller a hint what this is about, it will attach an attribute
35 To give our caller a hint what this is about, it will attach an attribute
36 `_vcs_kind` to the exception.
36 `_vcs_kind` to the exception.
37 """
37 """
38 exc = Exception(*args)
38 exc = Exception(*args)
39 exc._vcs_kind = kind
39 exc._vcs_kind = kind
40 exc._org_exc = org_exc
40 return exc
41 return exc
41
42
42
43
43 AbortException = functools.partial(_make_exception, 'abort')
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
48
44
49
45 ArchiveException = functools.partial(_make_exception, 'archive')
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
54
46
55
47 LookupException = functools.partial(_make_exception, 'lookup')
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
60
48
61
49 VcsException = functools.partial(_make_exception, 'error')
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
66
50
67
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
72
52
73
53 RequirementException = functools.partial(_make_exception, 'requirement')
74 def RequirementException(org_exc=None):
75 def _make_exception_wrapper(*args):
76 return _make_exception('requirement', org_exc, *args)
77 return _make_exception_wrapper
78
54
79
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
80 def UnhandledException(org_exc=None):
81 def _make_exception_wrapper(*args):
82 return _make_exception('unhandled', org_exc, *args)
83 return _make_exception_wrapper
84
56
85
57 URLError = functools.partial(_make_exception, 'url_error')
86 def URLError(org_exc=None):
87 def _make_exception_wrapper(*args):
88 return _make_exception('url_error', org_exc, *args)
89 return _make_exception_wrapper
58
90
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
91
92 def SubrepoMergeException(org_exc=None):
93 def _make_exception_wrapper(*args):
94 return _make_exception('subrepo_merge_error', org_exc, *args)
95 return _make_exception_wrapper
60
96
61
97
62 class HTTPRepoLocked(HTTPLocked):
98 class HTTPRepoLocked(HTTPLocked):
63 """
99 """
64 Subclass of HTTPLocked response that allows to set the title and status
100 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
101 code via constructor arguments.
66 """
102 """
67 def __init__(self, title, status_code=None, **kwargs):
103 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
104 self.code = status_code or HTTPLocked.code
69 self.title = title
105 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
106 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,671 +1,671 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 from dulwich import index, objects
28 from dulwich import index, objects
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.errors import (
30 from dulwich.errors import (
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
33 UnexpectedCommandError)
33 UnexpectedCommandError)
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
36
36
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
43
43
44 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
45 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
46 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
52 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
53 @wraps(func)
53 @wraps(func)
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e)(e.message)
60 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e)(e.message)
62 except Exception as e:
62 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
65 # exceptions, it's an exceptions from other handlers
65 # exceptions, it's an exceptions from other handlers
66 #if not hasattr(e, '_vcs_kind'):
66 #if not hasattr(e, '_vcs_kind'):
67 #log.exception("Unhandled exception in git remote call")
67 #log.exception("Unhandled exception in git remote call")
68 #raise_from_original(exceptions.UnhandledException)
68 #raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class Repo(DulwichRepo):
73 class Repo(DulwichRepo):
74 """
74 """
75 A wrapper for dulwich Repo class.
75 A wrapper for dulwich Repo class.
76
76
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 "Too many open files" error. We need to close all opened file descriptors
78 "Too many open files" error. We need to close all opened file descriptors
79 once the repo object is destroyed.
79 once the repo object is destroyed.
80
80
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 to 0.12.0 +
82 to 0.12.0 +
83 """
83 """
84 def __del__(self):
84 def __del__(self):
85 if hasattr(self, 'object_store'):
85 if hasattr(self, 'object_store'):
86 self.close()
86 self.close()
87
87
88
88
89 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90 repo_type = 'git'
91
91
92 def _create_repo(self, wire, create):
92 def _create_repo(self, wire, create):
93 repo_path = str_to_dulwich(wire['path'])
93 repo_path = str_to_dulwich(wire['path'])
94 return Repo(repo_path)
94 return Repo(repo_path)
95
95
96
96
97 class GitRemote(object):
97 class GitRemote(object):
98
98
99 def __init__(self, factory):
99 def __init__(self, factory):
100 self._factory = factory
100 self._factory = factory
101
101
102 self._bulk_methods = {
102 self._bulk_methods = {
103 "author": self.commit_attribute,
103 "author": self.commit_attribute,
104 "date": self.get_object_attrs,
104 "date": self.get_object_attrs,
105 "message": self.commit_attribute,
105 "message": self.commit_attribute,
106 "parents": self.commit_attribute,
106 "parents": self.commit_attribute,
107 "_commit": self.revision,
107 "_commit": self.revision,
108 }
108 }
109
109
110 def _wire_to_config(self, wire):
110 def _wire_to_config(self, wire):
111 if 'config' in wire:
111 if 'config' in wire:
112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
113 return {}
113 return {}
114
114
115 def _assign_ref(self, wire, ref, commit_id):
115 def _assign_ref(self, wire, ref, commit_id):
116 repo = self._factory.repo(wire)
116 repo = self._factory.repo(wire)
117 repo[ref] = commit_id
117 repo[ref] = commit_id
118
118
119 @reraise_safe_exceptions
119 @reraise_safe_exceptions
120 def add_object(self, wire, content):
120 def add_object(self, wire, content):
121 repo = self._factory.repo(wire)
121 repo = self._factory.repo(wire)
122 blob = objects.Blob()
122 blob = objects.Blob()
123 blob.set_raw_string(content)
123 blob.set_raw_string(content)
124 repo.object_store.add_object(blob)
124 repo.object_store.add_object(blob)
125 return blob.id
125 return blob.id
126
126
127 @reraise_safe_exceptions
127 @reraise_safe_exceptions
128 def assert_correct_path(self, wire):
128 def assert_correct_path(self, wire):
129 path = wire.get('path')
129 path = wire.get('path')
130 try:
130 try:
131 self._factory.repo(wire)
131 self._factory.repo(wire)
132 except NotGitRepository as e:
132 except NotGitRepository as e:
133 tb = traceback.format_exc()
133 tb = traceback.format_exc()
134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
135 return False
135 return False
136
136
137 return True
137 return True
138
138
139 @reraise_safe_exceptions
139 @reraise_safe_exceptions
140 def bare(self, wire):
140 def bare(self, wire):
141 repo = self._factory.repo(wire)
141 repo = self._factory.repo(wire)
142 return repo.bare
142 return repo.bare
143
143
144 @reraise_safe_exceptions
144 @reraise_safe_exceptions
145 def blob_as_pretty_string(self, wire, sha):
145 def blob_as_pretty_string(self, wire, sha):
146 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
147 return repo[sha].as_pretty_string()
147 return repo[sha].as_pretty_string()
148
148
149 @reraise_safe_exceptions
149 @reraise_safe_exceptions
150 def blob_raw_length(self, wire, sha):
150 def blob_raw_length(self, wire, sha):
151 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
152 blob = repo[sha]
152 blob = repo[sha]
153 return blob.raw_length()
153 return blob.raw_length()
154
154
155 def _parse_lfs_pointer(self, raw_content):
155 def _parse_lfs_pointer(self, raw_content):
156
156
157 spec_string = 'version https://git-lfs.github.com/spec'
157 spec_string = 'version https://git-lfs.github.com/spec'
158 if raw_content and raw_content.startswith(spec_string):
158 if raw_content and raw_content.startswith(spec_string):
159 pattern = re.compile(r"""
159 pattern = re.compile(r"""
160 (?:\n)?
160 (?:\n)?
161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
163 ^size[ ](?P<oid_size>[0-9]+)\n
163 ^size[ ](?P<oid_size>[0-9]+)\n
164 (?:\n)?
164 (?:\n)?
165 """, re.VERBOSE | re.MULTILINE)
165 """, re.VERBOSE | re.MULTILINE)
166 match = pattern.match(raw_content)
166 match = pattern.match(raw_content)
167 if match:
167 if match:
168 return match.groupdict()
168 return match.groupdict()
169
169
170 return {}
170 return {}
171
171
172 @reraise_safe_exceptions
172 @reraise_safe_exceptions
173 def is_large_file(self, wire, sha):
173 def is_large_file(self, wire, sha):
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 blob = repo[sha]
175 blob = repo[sha]
176 return self._parse_lfs_pointer(blob.as_raw_string())
176 return self._parse_lfs_pointer(blob.as_raw_string())
177
177
178 @reraise_safe_exceptions
178 @reraise_safe_exceptions
179 def in_largefiles_store(self, wire, oid):
179 def in_largefiles_store(self, wire, oid):
180 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
181 conf = self._wire_to_config(wire)
181 conf = self._wire_to_config(wire)
182
182
183 store_location = conf.get('vcs_git_lfs_store_location')
183 store_location = conf.get('vcs_git_lfs_store_location')
184 if store_location:
184 if store_location:
185 repo_name = repo.path
185 repo_name = repo.path
186 store = LFSOidStore(
186 store = LFSOidStore(
187 oid=oid, repo=repo_name, store_location=store_location)
187 oid=oid, repo=repo_name, store_location=store_location)
188 return store.has_oid()
188 return store.has_oid()
189
189
190 return False
190 return False
191
191
192 @reraise_safe_exceptions
192 @reraise_safe_exceptions
193 def store_path(self, wire, oid):
193 def store_path(self, wire, oid):
194 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
195 conf = self._wire_to_config(wire)
195 conf = self._wire_to_config(wire)
196
196
197 store_location = conf.get('vcs_git_lfs_store_location')
197 store_location = conf.get('vcs_git_lfs_store_location')
198 if store_location:
198 if store_location:
199 repo_name = repo.path
199 repo_name = repo.path
200 store = LFSOidStore(
200 store = LFSOidStore(
201 oid=oid, repo=repo_name, store_location=store_location)
201 oid=oid, repo=repo_name, store_location=store_location)
202 return store.oid_path
202 return store.oid_path
203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
204
204
205 @reraise_safe_exceptions
205 @reraise_safe_exceptions
206 def bulk_request(self, wire, rev, pre_load):
206 def bulk_request(self, wire, rev, pre_load):
207 result = {}
207 result = {}
208 for attr in pre_load:
208 for attr in pre_load:
209 try:
209 try:
210 method = self._bulk_methods[attr]
210 method = self._bulk_methods[attr]
211 args = [wire, rev]
211 args = [wire, rev]
212 if attr == "date":
212 if attr == "date":
213 args.extend(["commit_time", "commit_timezone"])
213 args.extend(["commit_time", "commit_timezone"])
214 elif attr in ["author", "message", "parents"]:
214 elif attr in ["author", "message", "parents"]:
215 args.append(attr)
215 args.append(attr)
216 result[attr] = method(*args)
216 result[attr] = method(*args)
217 except KeyError:
217 except KeyError as e:
218 raise exceptions.VcsException(
218 raise exceptions.VcsException(e)(
219 "Unknown bulk attribute: %s" % attr)
219 "Unknown bulk attribute: %s" % attr)
220 return result
220 return result
221
221
222 def _build_opener(self, url):
222 def _build_opener(self, url):
223 handlers = []
223 handlers = []
224 url_obj = url_parser(url)
224 url_obj = url_parser(url)
225 _, authinfo = url_obj.authinfo()
225 _, authinfo = url_obj.authinfo()
226
226
227 if authinfo:
227 if authinfo:
228 # create a password manager
228 # create a password manager
229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
230 passmgr.add_password(*authinfo)
230 passmgr.add_password(*authinfo)
231
231
232 handlers.extend((httpbasicauthhandler(passmgr),
232 handlers.extend((httpbasicauthhandler(passmgr),
233 httpdigestauthhandler(passmgr)))
233 httpdigestauthhandler(passmgr)))
234
234
235 return urllib2.build_opener(*handlers)
235 return urllib2.build_opener(*handlers)
236
236
237 @reraise_safe_exceptions
237 @reraise_safe_exceptions
238 def check_url(self, url, config):
238 def check_url(self, url, config):
239 url_obj = url_parser(url)
239 url_obj = url_parser(url)
240 test_uri, _ = url_obj.authinfo()
240 test_uri, _ = url_obj.authinfo()
241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
242 url_obj.query = obfuscate_qs(url_obj.query)
242 url_obj.query = obfuscate_qs(url_obj.query)
243 cleaned_uri = str(url_obj)
243 cleaned_uri = str(url_obj)
244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
245
245
246 if not test_uri.endswith('info/refs'):
246 if not test_uri.endswith('info/refs'):
247 test_uri = test_uri.rstrip('/') + '/info/refs'
247 test_uri = test_uri.rstrip('/') + '/info/refs'
248
248
249 o = self._build_opener(url)
249 o = self._build_opener(url)
250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
251
251
252 q = {"service": 'git-upload-pack'}
252 q = {"service": 'git-upload-pack'}
253 qs = '?%s' % urllib.urlencode(q)
253 qs = '?%s' % urllib.urlencode(q)
254 cu = "%s%s" % (test_uri, qs)
254 cu = "%s%s" % (test_uri, qs)
255 req = urllib2.Request(cu, None, {})
255 req = urllib2.Request(cu, None, {})
256
256
257 try:
257 try:
258 log.debug("Trying to open URL %s", cleaned_uri)
258 log.debug("Trying to open URL %s", cleaned_uri)
259 resp = o.open(req)
259 resp = o.open(req)
260 if resp.code != 200:
260 if resp.code != 200:
261 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError()('Return Code is not 200')
262 except Exception as e:
262 except Exception as e:
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
264 # means it cannot be cloned
264 # means it cannot be cloned
265 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
266
266
267 # now detect if it's proper git repo
267 # now detect if it's proper git repo
268 gitdata = resp.read()
268 gitdata = resp.read()
269 if 'service=git-upload-pack' in gitdata:
269 if 'service=git-upload-pack' in gitdata:
270 pass
270 pass
271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
272 # old style git can return some other format !
272 # old style git can return some other format !
273 pass
273 pass
274 else:
274 else:
275 raise exceptions.URLError(
275 raise exceptions.URLError()(
276 "url [%s] does not look like an git" % (cleaned_uri,))
276 "url [%s] does not look like an git" % (cleaned_uri,))
277
277
278 return True
278 return True
279
279
280 @reraise_safe_exceptions
280 @reraise_safe_exceptions
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
282 remote_refs = self.fetch(wire, url, apply_refs=False)
282 remote_refs = self.fetch(wire, url, apply_refs=False)
283 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
284 if isinstance(valid_refs, list):
284 if isinstance(valid_refs, list):
285 valid_refs = tuple(valid_refs)
285 valid_refs = tuple(valid_refs)
286
286
287 for k in remote_refs:
287 for k in remote_refs:
288 # only parse heads/tags and skip so called deferred tags
288 # only parse heads/tags and skip so called deferred tags
289 if k.startswith(valid_refs) and not k.endswith(deferred):
289 if k.startswith(valid_refs) and not k.endswith(deferred):
290 repo[k] = remote_refs[k]
290 repo[k] = remote_refs[k]
291
291
292 if update_after_clone:
292 if update_after_clone:
293 # we want to checkout HEAD
293 # we want to checkout HEAD
294 repo["HEAD"] = remote_refs["HEAD"]
294 repo["HEAD"] = remote_refs["HEAD"]
295 index.build_index_from_tree(repo.path, repo.index_path(),
295 index.build_index_from_tree(repo.path, repo.index_path(),
296 repo.object_store, repo["HEAD"].tree)
296 repo.object_store, repo["HEAD"].tree)
297
297
298 # TODO: this is quite complex, check if that can be simplified
298 # TODO: this is quite complex, check if that can be simplified
299 @reraise_safe_exceptions
299 @reraise_safe_exceptions
300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
301 repo = self._factory.repo(wire)
301 repo = self._factory.repo(wire)
302 object_store = repo.object_store
302 object_store = repo.object_store
303
303
304 # Create tree and populates it with blobs
304 # Create tree and populates it with blobs
305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
306
306
307 for node in updated:
307 for node in updated:
308 # Compute subdirs if needed
308 # Compute subdirs if needed
309 dirpath, nodename = vcspath.split(node['path'])
309 dirpath, nodename = vcspath.split(node['path'])
310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
311 parent = commit_tree
311 parent = commit_tree
312 ancestors = [('', parent)]
312 ancestors = [('', parent)]
313
313
314 # Tries to dig for the deepest existing tree
314 # Tries to dig for the deepest existing tree
315 while dirnames:
315 while dirnames:
316 curdir = dirnames.pop(0)
316 curdir = dirnames.pop(0)
317 try:
317 try:
318 dir_id = parent[curdir][1]
318 dir_id = parent[curdir][1]
319 except KeyError:
319 except KeyError:
320 # put curdir back into dirnames and stops
320 # put curdir back into dirnames and stops
321 dirnames.insert(0, curdir)
321 dirnames.insert(0, curdir)
322 break
322 break
323 else:
323 else:
324 # If found, updates parent
324 # If found, updates parent
325 parent = repo[dir_id]
325 parent = repo[dir_id]
326 ancestors.append((curdir, parent))
326 ancestors.append((curdir, parent))
327 # Now parent is deepest existing tree and we need to create
327 # Now parent is deepest existing tree and we need to create
328 # subtrees for dirnames (in reverse order)
328 # subtrees for dirnames (in reverse order)
329 # [this only applies for nodes from added]
329 # [this only applies for nodes from added]
330 new_trees = []
330 new_trees = []
331
331
332 blob = objects.Blob.from_string(node['content'])
332 blob = objects.Blob.from_string(node['content'])
333
333
334 if dirnames:
334 if dirnames:
335 # If there are trees which should be created we need to build
335 # If there are trees which should be created we need to build
336 # them now (in reverse order)
336 # them now (in reverse order)
337 reversed_dirnames = list(reversed(dirnames))
337 reversed_dirnames = list(reversed(dirnames))
338 curtree = objects.Tree()
338 curtree = objects.Tree()
339 curtree[node['node_path']] = node['mode'], blob.id
339 curtree[node['node_path']] = node['mode'], blob.id
340 new_trees.append(curtree)
340 new_trees.append(curtree)
341 for dirname in reversed_dirnames[:-1]:
341 for dirname in reversed_dirnames[:-1]:
342 newtree = objects.Tree()
342 newtree = objects.Tree()
343 newtree[dirname] = (DIR_STAT, curtree.id)
343 newtree[dirname] = (DIR_STAT, curtree.id)
344 new_trees.append(newtree)
344 new_trees.append(newtree)
345 curtree = newtree
345 curtree = newtree
346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
347 else:
347 else:
348 parent.add(
348 parent.add(
349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
350
350
351 new_trees.append(parent)
351 new_trees.append(parent)
352 # Update ancestors
352 # Update ancestors
353 reversed_ancestors = reversed(
353 reversed_ancestors = reversed(
354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
355 for parent, tree, path in reversed_ancestors:
355 for parent, tree, path in reversed_ancestors:
356 parent[path] = (DIR_STAT, tree.id)
356 parent[path] = (DIR_STAT, tree.id)
357 object_store.add_object(tree)
357 object_store.add_object(tree)
358
358
359 object_store.add_object(blob)
359 object_store.add_object(blob)
360 for tree in new_trees:
360 for tree in new_trees:
361 object_store.add_object(tree)
361 object_store.add_object(tree)
362
362
363 for node_path in removed:
363 for node_path in removed:
364 paths = node_path.split('/')
364 paths = node_path.split('/')
365 tree = commit_tree
365 tree = commit_tree
366 trees = [tree]
366 trees = [tree]
367 # Traverse deep into the forest...
367 # Traverse deep into the forest...
368 for path in paths:
368 for path in paths:
369 try:
369 try:
370 obj = repo[tree[path][1]]
370 obj = repo[tree[path][1]]
371 if isinstance(obj, objects.Tree):
371 if isinstance(obj, objects.Tree):
372 trees.append(obj)
372 trees.append(obj)
373 tree = obj
373 tree = obj
374 except KeyError:
374 except KeyError:
375 break
375 break
376 # Cut down the blob and all rotten trees on the way back...
376 # Cut down the blob and all rotten trees on the way back...
377 for path, tree in reversed(zip(paths, trees)):
377 for path, tree in reversed(zip(paths, trees)):
378 del tree[path]
378 del tree[path]
379 if tree:
379 if tree:
380 # This tree still has elements - don't remove it or any
380 # This tree still has elements - don't remove it or any
381 # of it's parents
381 # of it's parents
382 break
382 break
383
383
384 object_store.add_object(commit_tree)
384 object_store.add_object(commit_tree)
385
385
386 # Create commit
386 # Create commit
387 commit = objects.Commit()
387 commit = objects.Commit()
388 commit.tree = commit_tree.id
388 commit.tree = commit_tree.id
389 for k, v in commit_data.iteritems():
389 for k, v in commit_data.iteritems():
390 setattr(commit, k, v)
390 setattr(commit, k, v)
391 object_store.add_object(commit)
391 object_store.add_object(commit)
392
392
393 ref = 'refs/heads/%s' % branch
393 ref = 'refs/heads/%s' % branch
394 repo.refs[ref] = commit.id
394 repo.refs[ref] = commit.id
395
395
396 return commit.id
396 return commit.id
397
397
398 @reraise_safe_exceptions
398 @reraise_safe_exceptions
399 def fetch(self, wire, url, apply_refs=True, refs=None):
399 def fetch(self, wire, url, apply_refs=True, refs=None):
400 if url != 'default' and '://' not in url:
400 if url != 'default' and '://' not in url:
401 client = LocalGitClient(url)
401 client = LocalGitClient(url)
402 else:
402 else:
403 url_obj = url_parser(url)
403 url_obj = url_parser(url)
404 o = self._build_opener(url)
404 o = self._build_opener(url)
405 url, _ = url_obj.authinfo()
405 url, _ = url_obj.authinfo()
406 client = HttpGitClient(base_url=url, opener=o)
406 client = HttpGitClient(base_url=url, opener=o)
407 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
408
408
409 determine_wants = repo.object_store.determine_wants_all
409 determine_wants = repo.object_store.determine_wants_all
410 if refs:
410 if refs:
411 def determine_wants_requested(references):
411 def determine_wants_requested(references):
412 return [references[r] for r in references if r in refs]
412 return [references[r] for r in references if r in refs]
413 determine_wants = determine_wants_requested
413 determine_wants = determine_wants_requested
414
414
415 try:
415 try:
416 remote_refs = client.fetch(
416 remote_refs = client.fetch(
417 path=url, target=repo, determine_wants=determine_wants)
417 path=url, target=repo, determine_wants=determine_wants)
418 except NotGitRepository as e:
418 except NotGitRepository as e:
419 log.warning(
419 log.warning(
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
421 # Exception can contain unicode which we convert
421 # Exception can contain unicode which we convert
422 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(e)(repr(e))
423
423
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
425 # refs filtered by `determine_wants` function. We need to filter result
425 # refs filtered by `determine_wants` function. We need to filter result
426 # as well
426 # as well
427 if refs:
427 if refs:
428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
429
429
430 if apply_refs:
430 if apply_refs:
431 # TODO: johbo: Needs proper test coverage with a git repository
431 # TODO: johbo: Needs proper test coverage with a git repository
432 # that contains a tag object, so that we would end up with
432 # that contains a tag object, so that we would end up with
433 # a peeled ref at this point.
433 # a peeled ref at this point.
434 PEELED_REF_MARKER = '^{}'
434 PEELED_REF_MARKER = '^{}'
435 for k in remote_refs:
435 for k in remote_refs:
436 if k.endswith(PEELED_REF_MARKER):
436 if k.endswith(PEELED_REF_MARKER):
437 log.info("Skipping peeled reference %s", k)
437 log.info("Skipping peeled reference %s", k)
438 continue
438 continue
439 repo[k] = remote_refs[k]
439 repo[k] = remote_refs[k]
440
440
441 if refs:
441 if refs:
442 # mikhail: explicitly set the head to the last ref.
442 # mikhail: explicitly set the head to the last ref.
443 repo['HEAD'] = remote_refs[refs[-1]]
443 repo['HEAD'] = remote_refs[refs[-1]]
444
444
445 # TODO: mikhail: should we return remote_refs here to be
445 # TODO: mikhail: should we return remote_refs here to be
446 # consistent?
446 # consistent?
447 else:
447 else:
448 return remote_refs
448 return remote_refs
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def sync_push(self, wire, url, refs=None):
451 def sync_push(self, wire, url, refs=None):
452 if self.check_url(url, wire):
452 if self.check_url(url, wire):
453 repo = self._factory.repo(wire)
453 repo = self._factory.repo(wire)
454 self.run_git_command(
454 self.run_git_command(
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
456 _copts=['-c', 'core.askpass=""'],
456 _copts=['-c', 'core.askpass=""'],
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
458
458
459 @reraise_safe_exceptions
459 @reraise_safe_exceptions
460 def get_remote_refs(self, wire, url):
460 def get_remote_refs(self, wire, url):
461 repo = Repo(url)
461 repo = Repo(url)
462 return repo.get_refs()
462 return repo.get_refs()
463
463
464 @reraise_safe_exceptions
464 @reraise_safe_exceptions
465 def get_description(self, wire):
465 def get_description(self, wire):
466 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
467 return repo.get_description()
467 return repo.get_description()
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def get_file_history(self, wire, file_path, commit_id, limit):
470 def get_file_history(self, wire, file_path, commit_id, limit):
471 repo = self._factory.repo(wire)
471 repo = self._factory.repo(wire)
472 include = [commit_id]
472 include = [commit_id]
473 paths = [file_path]
473 paths = [file_path]
474
474
475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
476 return [x.commit.id for x in walker]
476 return [x.commit.id for x in walker]
477
477
478 @reraise_safe_exceptions
478 @reraise_safe_exceptions
479 def get_missing_revs(self, wire, rev1, rev2, path2):
479 def get_missing_revs(self, wire, rev1, rev2, path2):
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 LocalGitClient(thin_packs=False).fetch(path2, repo)
482
482
483 wire_remote = wire.copy()
483 wire_remote = wire.copy()
484 wire_remote['path'] = path2
484 wire_remote['path'] = path2
485 repo_remote = self._factory.repo(wire_remote)
485 repo_remote = self._factory.repo(wire_remote)
486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
487
487
488 revs = [
488 revs = [
489 x.commit.id
489 x.commit.id
490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
491 return revs
491 return revs
492
492
493 @reraise_safe_exceptions
493 @reraise_safe_exceptions
494 def get_object(self, wire, sha):
494 def get_object(self, wire, sha):
495 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
496 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
497 commit_id = obj.id
497 commit_id = obj.id
498
498
499 if isinstance(obj, Tag):
499 if isinstance(obj, Tag):
500 commit_id = obj.object[1]
500 commit_id = obj.object[1]
501
501
502 return {
502 return {
503 'id': obj.id,
503 'id': obj.id,
504 'type': obj.type_name,
504 'type': obj.type_name,
505 'commit_id': commit_id
505 'commit_id': commit_id
506 }
506 }
507
507
508 @reraise_safe_exceptions
508 @reraise_safe_exceptions
509 def get_object_attrs(self, wire, sha, *attrs):
509 def get_object_attrs(self, wire, sha, *attrs):
510 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
511 obj = repo.get_object(sha)
511 obj = repo.get_object(sha)
512 return list(getattr(obj, a) for a in attrs)
512 return list(getattr(obj, a) for a in attrs)
513
513
514 @reraise_safe_exceptions
514 @reraise_safe_exceptions
515 def get_refs(self, wire):
515 def get_refs(self, wire):
516 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
517 result = {}
517 result = {}
518 for ref, sha in repo.refs.as_dict().items():
518 for ref, sha in repo.refs.as_dict().items():
519 peeled_sha = repo.get_peeled(ref)
519 peeled_sha = repo.get_peeled(ref)
520 result[ref] = peeled_sha
520 result[ref] = peeled_sha
521 return result
521 return result
522
522
523 @reraise_safe_exceptions
523 @reraise_safe_exceptions
524 def get_refs_path(self, wire):
524 def get_refs_path(self, wire):
525 repo = self._factory.repo(wire)
525 repo = self._factory.repo(wire)
526 return repo.refs.path
526 return repo.refs.path
527
527
528 @reraise_safe_exceptions
528 @reraise_safe_exceptions
529 def head(self, wire):
529 def head(self, wire):
530 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
531 return repo.head()
531 return repo.head()
532
532
533 @reraise_safe_exceptions
533 @reraise_safe_exceptions
534 def init(self, wire):
534 def init(self, wire):
535 repo_path = str_to_dulwich(wire['path'])
535 repo_path = str_to_dulwich(wire['path'])
536 self.repo = Repo.init(repo_path)
536 self.repo = Repo.init(repo_path)
537
537
538 @reraise_safe_exceptions
538 @reraise_safe_exceptions
539 def init_bare(self, wire):
539 def init_bare(self, wire):
540 repo_path = str_to_dulwich(wire['path'])
540 repo_path = str_to_dulwich(wire['path'])
541 self.repo = Repo.init_bare(repo_path)
541 self.repo = Repo.init_bare(repo_path)
542
542
543 @reraise_safe_exceptions
543 @reraise_safe_exceptions
544 def revision(self, wire, rev):
544 def revision(self, wire, rev):
545 repo = self._factory.repo(wire)
545 repo = self._factory.repo(wire)
546 obj = repo[rev]
546 obj = repo[rev]
547 obj_data = {
547 obj_data = {
548 'id': obj.id,
548 'id': obj.id,
549 }
549 }
550 try:
550 try:
551 obj_data['tree'] = obj.tree
551 obj_data['tree'] = obj.tree
552 except AttributeError:
552 except AttributeError:
553 pass
553 pass
554 return obj_data
554 return obj_data
555
555
556 @reraise_safe_exceptions
556 @reraise_safe_exceptions
557 def commit_attribute(self, wire, rev, attr):
557 def commit_attribute(self, wire, rev, attr):
558 repo = self._factory.repo(wire)
558 repo = self._factory.repo(wire)
559 obj = repo[rev]
559 obj = repo[rev]
560 return getattr(obj, attr)
560 return getattr(obj, attr)
561
561
562 @reraise_safe_exceptions
562 @reraise_safe_exceptions
563 def set_refs(self, wire, key, value):
563 def set_refs(self, wire, key, value):
564 repo = self._factory.repo(wire)
564 repo = self._factory.repo(wire)
565 repo.refs[key] = value
565 repo.refs[key] = value
566
566
567 @reraise_safe_exceptions
567 @reraise_safe_exceptions
568 def remove_ref(self, wire, key):
568 def remove_ref(self, wire, key):
569 repo = self._factory.repo(wire)
569 repo = self._factory.repo(wire)
570 del repo.refs[key]
570 del repo.refs[key]
571
571
572 @reraise_safe_exceptions
572 @reraise_safe_exceptions
573 def tree_changes(self, wire, source_id, target_id):
573 def tree_changes(self, wire, source_id, target_id):
574 repo = self._factory.repo(wire)
574 repo = self._factory.repo(wire)
575 source = repo[source_id].tree if source_id else None
575 source = repo[source_id].tree if source_id else None
576 target = repo[target_id].tree
576 target = repo[target_id].tree
577 result = repo.object_store.tree_changes(source, target)
577 result = repo.object_store.tree_changes(source, target)
578 return list(result)
578 return list(result)
579
579
580 @reraise_safe_exceptions
580 @reraise_safe_exceptions
581 def tree_items(self, wire, tree_id):
581 def tree_items(self, wire, tree_id):
582 repo = self._factory.repo(wire)
582 repo = self._factory.repo(wire)
583 tree = repo[tree_id]
583 tree = repo[tree_id]
584
584
585 result = []
585 result = []
586 for item in tree.iteritems():
586 for item in tree.iteritems():
587 item_sha = item.sha
587 item_sha = item.sha
588 item_mode = item.mode
588 item_mode = item.mode
589
589
590 if FILE_MODE(item_mode) == GIT_LINK:
590 if FILE_MODE(item_mode) == GIT_LINK:
591 item_type = "link"
591 item_type = "link"
592 else:
592 else:
593 item_type = repo[item_sha].type_name
593 item_type = repo[item_sha].type_name
594
594
595 result.append((item.path, item_mode, item_sha, item_type))
595 result.append((item.path, item_mode, item_sha, item_type))
596 return result
596 return result
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def update_server_info(self, wire):
599 def update_server_info(self, wire):
600 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
601 update_server_info(repo)
601 update_server_info(repo)
602
602
603 @reraise_safe_exceptions
603 @reraise_safe_exceptions
604 def discover_git_version(self):
604 def discover_git_version(self):
605 stdout, _ = self.run_git_command(
605 stdout, _ = self.run_git_command(
606 {}, ['--version'], _bare=True, _safe=True)
606 {}, ['--version'], _bare=True, _safe=True)
607 prefix = 'git version'
607 prefix = 'git version'
608 if stdout.startswith(prefix):
608 if stdout.startswith(prefix):
609 stdout = stdout[len(prefix):]
609 stdout = stdout[len(prefix):]
610 return stdout.strip()
610 return stdout.strip()
611
611
612 @reraise_safe_exceptions
612 @reraise_safe_exceptions
613 def run_git_command(self, wire, cmd, **opts):
613 def run_git_command(self, wire, cmd, **opts):
614 path = wire.get('path', None)
614 path = wire.get('path', None)
615
615
616 if path and os.path.isdir(path):
616 if path and os.path.isdir(path):
617 opts['cwd'] = path
617 opts['cwd'] = path
618
618
619 if '_bare' in opts:
619 if '_bare' in opts:
620 _copts = []
620 _copts = []
621 del opts['_bare']
621 del opts['_bare']
622 else:
622 else:
623 _copts = ['-c', 'core.quotepath=false', ]
623 _copts = ['-c', 'core.quotepath=false', ]
624 safe_call = False
624 safe_call = False
625 if '_safe' in opts:
625 if '_safe' in opts:
626 # no exc on failure
626 # no exc on failure
627 del opts['_safe']
627 del opts['_safe']
628 safe_call = True
628 safe_call = True
629
629
630 if '_copts' in opts:
630 if '_copts' in opts:
631 _copts.extend(opts['_copts'] or [])
631 _copts.extend(opts['_copts'] or [])
632 del opts['_copts']
632 del opts['_copts']
633
633
634 gitenv = os.environ.copy()
634 gitenv = os.environ.copy()
635 gitenv.update(opts.pop('extra_env', {}))
635 gitenv.update(opts.pop('extra_env', {}))
636 # need to clean fix GIT_DIR !
636 # need to clean fix GIT_DIR !
637 if 'GIT_DIR' in gitenv:
637 if 'GIT_DIR' in gitenv:
638 del gitenv['GIT_DIR']
638 del gitenv['GIT_DIR']
639 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
640 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
641
641
642 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
643
643
644 try:
644 try:
645 _opts = {'env': gitenv, 'shell': False}
645 _opts = {'env': gitenv, 'shell': False}
646 _opts.update(opts)
646 _opts.update(opts)
647 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
648
648
649 return ''.join(p), ''.join(p.error)
649 return ''.join(p), ''.join(p.error)
650 except (EnvironmentError, OSError) as err:
650 except (EnvironmentError, OSError) as err:
651 cmd = ' '.join(cmd) # human friendly CMD
651 cmd = ' '.join(cmd) # human friendly CMD
652 tb_err = ("Couldn't run git command (%s).\n"
652 tb_err = ("Couldn't run git command (%s).\n"
653 "Original error was:%s\n" % (cmd, err))
653 "Original error was:%s\n" % (cmd, err))
654 log.exception(tb_err)
654 log.exception(tb_err)
655 if safe_call:
655 if safe_call:
656 return '', err
656 return '', err
657 else:
657 else:
658 raise exceptions.VcsException(tb_err)
658 raise exceptions.VcsException()(tb_err)
659
659
660 @reraise_safe_exceptions
660 @reraise_safe_exceptions
661 def install_hooks(self, wire, force=False):
661 def install_hooks(self, wire, force=False):
662 from vcsserver.hook_utils import install_git_hooks
662 from vcsserver.hook_utils import install_git_hooks
663 repo = self._factory.repo(wire)
663 repo = self._factory.repo(wire)
664 return install_git_hooks(repo.path, repo.bare, force_create=force)
664 return install_git_hooks(repo.path, repo.bare, force_create=force)
665
665
666
666
667 def str_to_dulwich(value):
667 def str_to_dulwich(value):
668 """
668 """
669 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 Dulwich 0.10.1a requires `unicode` objects to be passed in.
670 """
670 """
671 return value.decode(settings.WIRE_ENCODING)
671 return value.decode(settings.WIRE_ENCODING)
@@ -1,792 +1,793 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
60
60
61 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
64 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
68
68
69 return baseui
69 return baseui
70
70
71
71
72 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError:
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError:
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError:
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError:
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException(e))
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException(e))
91
91 raise
92 raise
92 return wrapper
93 return wrapper
93
94
94
95
95 class MercurialFactory(RepoFactory):
96 class MercurialFactory(RepoFactory):
96 repo_type = 'hg'
97 repo_type = 'hg'
97
98
98 def _create_config(self, config, hooks=True):
99 def _create_config(self, config, hooks=True):
99 if not hooks:
100 if not hooks:
100 hooks_to_clean = frozenset((
101 hooks_to_clean = frozenset((
101 'changegroup.repo_size', 'preoutgoing.pre_pull',
102 'changegroup.repo_size', 'preoutgoing.pre_pull',
102 'outgoing.pull_logger', 'prechangegroup.pre_push'))
103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
103 new_config = []
104 new_config = []
104 for section, option, value in config:
105 for section, option, value in config:
105 if section == 'hooks' and option in hooks_to_clean:
106 if section == 'hooks' and option in hooks_to_clean:
106 continue
107 continue
107 new_config.append((section, option, value))
108 new_config.append((section, option, value))
108 config = new_config
109 config = new_config
109
110
110 baseui = make_ui_from_config(config)
111 baseui = make_ui_from_config(config)
111 return baseui
112 return baseui
112
113
113 def _create_repo(self, wire, create):
114 def _create_repo(self, wire, create):
114 baseui = self._create_config(wire["config"])
115 baseui = self._create_config(wire["config"])
115 return localrepository(baseui, wire["path"], create)
116 return localrepository(baseui, wire["path"], create)
116
117
117
118
118 class HgRemote(object):
119 class HgRemote(object):
119
120
120 def __init__(self, factory):
121 def __init__(self, factory):
121 self._factory = factory
122 self._factory = factory
122
123
123 self._bulk_methods = {
124 self._bulk_methods = {
124 "affected_files": self.ctx_files,
125 "affected_files": self.ctx_files,
125 "author": self.ctx_user,
126 "author": self.ctx_user,
126 "branch": self.ctx_branch,
127 "branch": self.ctx_branch,
127 "children": self.ctx_children,
128 "children": self.ctx_children,
128 "date": self.ctx_date,
129 "date": self.ctx_date,
129 "message": self.ctx_description,
130 "message": self.ctx_description,
130 "parents": self.ctx_parents,
131 "parents": self.ctx_parents,
131 "status": self.ctx_status,
132 "status": self.ctx_status,
132 "obsolete": self.ctx_obsolete,
133 "obsolete": self.ctx_obsolete,
133 "phase": self.ctx_phase,
134 "phase": self.ctx_phase,
134 "hidden": self.ctx_hidden,
135 "hidden": self.ctx_hidden,
135 "_file_paths": self.ctx_list,
136 "_file_paths": self.ctx_list,
136 }
137 }
137
138
138 @reraise_safe_exceptions
139 @reraise_safe_exceptions
139 def discover_hg_version(self):
140 def discover_hg_version(self):
140 from mercurial import util
141 from mercurial import util
141 return util.version()
142 return util.version()
142
143
143 @reraise_safe_exceptions
144 @reraise_safe_exceptions
144 def archive_repo(self, archive_path, mtime, file_info, kind):
145 def archive_repo(self, archive_path, mtime, file_info, kind):
145 if kind == "tgz":
146 if kind == "tgz":
146 archiver = archival.tarit(archive_path, mtime, "gz")
147 archiver = archival.tarit(archive_path, mtime, "gz")
147 elif kind == "tbz2":
148 elif kind == "tbz2":
148 archiver = archival.tarit(archive_path, mtime, "bz2")
149 archiver = archival.tarit(archive_path, mtime, "bz2")
149 elif kind == 'zip':
150 elif kind == 'zip':
150 archiver = archival.zipit(archive_path, mtime)
151 archiver = archival.zipit(archive_path, mtime)
151 else:
152 else:
152 raise exceptions.ArchiveException(
153 raise exceptions.ArchiveException()(
153 'Remote does not support: "%s".' % kind)
154 'Remote does not support: "%s".' % kind)
154
155
155 for f_path, f_mode, f_is_link, f_content in file_info:
156 for f_path, f_mode, f_is_link, f_content in file_info:
156 archiver.addfile(f_path, f_mode, f_is_link, f_content)
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
157 archiver.done()
158 archiver.done()
158
159
159 @reraise_safe_exceptions
160 @reraise_safe_exceptions
160 def bookmarks(self, wire):
161 def bookmarks(self, wire):
161 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
162 return dict(repo._bookmarks)
163 return dict(repo._bookmarks)
163
164
164 @reraise_safe_exceptions
165 @reraise_safe_exceptions
165 def branches(self, wire, normal, closed):
166 def branches(self, wire, normal, closed):
166 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
167 iter_branches = repo.branchmap().iterbranches()
168 iter_branches = repo.branchmap().iterbranches()
168 bt = {}
169 bt = {}
169 for branch_name, _heads, tip, is_closed in iter_branches:
170 for branch_name, _heads, tip, is_closed in iter_branches:
170 if normal and not is_closed:
171 if normal and not is_closed:
171 bt[branch_name] = tip
172 bt[branch_name] = tip
172 if closed and is_closed:
173 if closed and is_closed:
173 bt[branch_name] = tip
174 bt[branch_name] = tip
174
175
175 return bt
176 return bt
176
177
177 @reraise_safe_exceptions
178 @reraise_safe_exceptions
178 def bulk_request(self, wire, rev, pre_load):
179 def bulk_request(self, wire, rev, pre_load):
179 result = {}
180 result = {}
180 for attr in pre_load:
181 for attr in pre_load:
181 try:
182 try:
182 method = self._bulk_methods[attr]
183 method = self._bulk_methods[attr]
183 result[attr] = method(wire, rev)
184 result[attr] = method(wire, rev)
184 except KeyError:
185 except KeyError as e:
185 raise exceptions.VcsException(
186 raise exceptions.VcsException(e)(
186 'Unknown bulk attribute: "%s"' % attr)
187 'Unknown bulk attribute: "%s"' % attr)
187 return result
188 return result
188
189
189 @reraise_safe_exceptions
190 @reraise_safe_exceptions
190 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
191 baseui = self._factory._create_config(wire["config"], hooks=hooks)
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
192 clone(baseui, source, dest, noupdate=not update_after_clone)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
193
194
194 @reraise_safe_exceptions
195 @reraise_safe_exceptions
195 def commitctx(
196 def commitctx(
196 self, wire, message, parents, commit_time, commit_timezone,
197 self, wire, message, parents, commit_time, commit_timezone,
197 user, files, extra, removed, updated):
198 user, files, extra, removed, updated):
198
199
199 def _filectxfn(_repo, memctx, path):
200 def _filectxfn(_repo, memctx, path):
200 """
201 """
201 Marks given path as added/changed/removed in a given _repo. This is
202 Marks given path as added/changed/removed in a given _repo. This is
202 for internal mercurial commit function.
203 for internal mercurial commit function.
203 """
204 """
204
205
205 # check if this path is removed
206 # check if this path is removed
206 if path in removed:
207 if path in removed:
207 # returning None is a way to mark node for removal
208 # returning None is a way to mark node for removal
208 return None
209 return None
209
210
210 # check if this path is added
211 # check if this path is added
211 for node in updated:
212 for node in updated:
212 if node['path'] == path:
213 if node['path'] == path:
213 return memfilectx(
214 return memfilectx(
214 _repo,
215 _repo,
215 changectx=memctx,
216 changectx=memctx,
216 path=node['path'],
217 path=node['path'],
217 data=node['content'],
218 data=node['content'],
218 islink=False,
219 islink=False,
219 isexec=bool(node['mode'] & stat.S_IXUSR),
220 isexec=bool(node['mode'] & stat.S_IXUSR),
220 copied=False)
221 copied=False)
221
222
222 raise exceptions.AbortException(
223 raise exceptions.AbortException()(
223 "Given path haven't been marked as added, "
224 "Given path haven't been marked as added, "
224 "changed or removed (%s)" % path)
225 "changed or removed (%s)" % path)
225
226
226 repo = self._factory.repo(wire)
227 repo = self._factory.repo(wire)
227
228
228 commit_ctx = memctx(
229 commit_ctx = memctx(
229 repo=repo,
230 repo=repo,
230 parents=parents,
231 parents=parents,
231 text=message,
232 text=message,
232 files=files,
233 files=files,
233 filectxfn=_filectxfn,
234 filectxfn=_filectxfn,
234 user=user,
235 user=user,
235 date=(commit_time, commit_timezone),
236 date=(commit_time, commit_timezone),
236 extra=extra)
237 extra=extra)
237
238
238 n = repo.commitctx(commit_ctx)
239 n = repo.commitctx(commit_ctx)
239 new_id = hex(n)
240 new_id = hex(n)
240
241
241 return new_id
242 return new_id
242
243
243 @reraise_safe_exceptions
244 @reraise_safe_exceptions
244 def ctx_branch(self, wire, revision):
245 def ctx_branch(self, wire, revision):
245 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
246 ctx = repo[revision]
247 ctx = repo[revision]
247 return ctx.branch()
248 return ctx.branch()
248
249
249 @reraise_safe_exceptions
250 @reraise_safe_exceptions
250 def ctx_children(self, wire, revision):
251 def ctx_children(self, wire, revision):
251 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
252 ctx = repo[revision]
253 ctx = repo[revision]
253 return [child.rev() for child in ctx.children()]
254 return [child.rev() for child in ctx.children()]
254
255
255 @reraise_safe_exceptions
256 @reraise_safe_exceptions
256 def ctx_date(self, wire, revision):
257 def ctx_date(self, wire, revision):
257 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
258 ctx = repo[revision]
259 ctx = repo[revision]
259 return ctx.date()
260 return ctx.date()
260
261
261 @reraise_safe_exceptions
262 @reraise_safe_exceptions
262 def ctx_description(self, wire, revision):
263 def ctx_description(self, wire, revision):
263 repo = self._factory.repo(wire)
264 repo = self._factory.repo(wire)
264 ctx = repo[revision]
265 ctx = repo[revision]
265 return ctx.description()
266 return ctx.description()
266
267
267 @reraise_safe_exceptions
268 @reraise_safe_exceptions
268 def ctx_diff(
269 def ctx_diff(
269 self, wire, revision, git=True, ignore_whitespace=True, context=3):
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
270 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
271 ctx = repo[revision]
272 ctx = repo[revision]
272 result = ctx.diff(
273 result = ctx.diff(
273 git=git, ignore_whitespace=ignore_whitespace, context=context)
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
274 return list(result)
275 return list(result)
275
276
276 @reraise_safe_exceptions
277 @reraise_safe_exceptions
277 def ctx_files(self, wire, revision):
278 def ctx_files(self, wire, revision):
278 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
279 ctx = repo[revision]
280 ctx = repo[revision]
280 return ctx.files()
281 return ctx.files()
281
282
282 @reraise_safe_exceptions
283 @reraise_safe_exceptions
283 def ctx_list(self, path, revision):
284 def ctx_list(self, path, revision):
284 repo = self._factory.repo(path)
285 repo = self._factory.repo(path)
285 ctx = repo[revision]
286 ctx = repo[revision]
286 return list(ctx)
287 return list(ctx)
287
288
288 @reraise_safe_exceptions
289 @reraise_safe_exceptions
289 def ctx_parents(self, wire, revision):
290 def ctx_parents(self, wire, revision):
290 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
291 ctx = repo[revision]
292 ctx = repo[revision]
292 return [parent.rev() for parent in ctx.parents()]
293 return [parent.rev() for parent in ctx.parents()]
293
294
294 @reraise_safe_exceptions
295 @reraise_safe_exceptions
295 def ctx_phase(self, wire, revision):
296 def ctx_phase(self, wire, revision):
296 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
297 ctx = repo[revision]
298 ctx = repo[revision]
298 # public=0, draft=1, secret=3
299 # public=0, draft=1, secret=3
299 return ctx.phase()
300 return ctx.phase()
300
301
301 @reraise_safe_exceptions
302 @reraise_safe_exceptions
302 def ctx_obsolete(self, wire, revision):
303 def ctx_obsolete(self, wire, revision):
303 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
304 ctx = repo[revision]
305 ctx = repo[revision]
305 return ctx.obsolete()
306 return ctx.obsolete()
306
307
307 @reraise_safe_exceptions
308 @reraise_safe_exceptions
308 def ctx_hidden(self, wire, revision):
309 def ctx_hidden(self, wire, revision):
309 repo = self._factory.repo(wire)
310 repo = self._factory.repo(wire)
310 ctx = repo[revision]
311 ctx = repo[revision]
311 return ctx.hidden()
312 return ctx.hidden()
312
313
313 @reraise_safe_exceptions
314 @reraise_safe_exceptions
314 def ctx_substate(self, wire, revision):
315 def ctx_substate(self, wire, revision):
315 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
316 ctx = repo[revision]
317 ctx = repo[revision]
317 return ctx.substate
318 return ctx.substate
318
319
319 @reraise_safe_exceptions
320 @reraise_safe_exceptions
320 def ctx_status(self, wire, revision):
321 def ctx_status(self, wire, revision):
321 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
322 ctx = repo[revision]
323 ctx = repo[revision]
323 status = repo[ctx.p1().node()].status(other=ctx.node())
324 status = repo[ctx.p1().node()].status(other=ctx.node())
324 # object of status (odd, custom named tuple in mercurial) is not
325 # object of status (odd, custom named tuple in mercurial) is not
325 # correctly serializable, we make it a list, as the underling
326 # correctly serializable, we make it a list, as the underling
326 # API expects this to be a list
327 # API expects this to be a list
327 return list(status)
328 return list(status)
328
329
329 @reraise_safe_exceptions
330 @reraise_safe_exceptions
330 def ctx_user(self, wire, revision):
331 def ctx_user(self, wire, revision):
331 repo = self._factory.repo(wire)
332 repo = self._factory.repo(wire)
332 ctx = repo[revision]
333 ctx = repo[revision]
333 return ctx.user()
334 return ctx.user()
334
335
335 @reraise_safe_exceptions
336 @reraise_safe_exceptions
336 def check_url(self, url, config):
337 def check_url(self, url, config):
337 _proto = None
338 _proto = None
338 if '+' in url[:url.find('://')]:
339 if '+' in url[:url.find('://')]:
339 _proto = url[0:url.find('+')]
340 _proto = url[0:url.find('+')]
340 url = url[url.find('+') + 1:]
341 url = url[url.find('+') + 1:]
341 handlers = []
342 handlers = []
342 url_obj = url_parser(url)
343 url_obj = url_parser(url)
343 test_uri, authinfo = url_obj.authinfo()
344 test_uri, authinfo = url_obj.authinfo()
344 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
345 url_obj.query = obfuscate_qs(url_obj.query)
346 url_obj.query = obfuscate_qs(url_obj.query)
346
347
347 cleaned_uri = str(url_obj)
348 cleaned_uri = str(url_obj)
348 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
349
350
350 if authinfo:
351 if authinfo:
351 # create a password manager
352 # create a password manager
352 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
353 passmgr.add_password(*authinfo)
354 passmgr.add_password(*authinfo)
354
355
355 handlers.extend((httpbasicauthhandler(passmgr),
356 handlers.extend((httpbasicauthhandler(passmgr),
356 httpdigestauthhandler(passmgr)))
357 httpdigestauthhandler(passmgr)))
357
358
358 o = urllib2.build_opener(*handlers)
359 o = urllib2.build_opener(*handlers)
359 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
360 ('Accept', 'application/mercurial-0.1')]
361 ('Accept', 'application/mercurial-0.1')]
361
362
362 q = {"cmd": 'between'}
363 q = {"cmd": 'between'}
363 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
364 qs = '?%s' % urllib.urlencode(q)
365 qs = '?%s' % urllib.urlencode(q)
365 cu = "%s%s" % (test_uri, qs)
366 cu = "%s%s" % (test_uri, qs)
366 req = urllib2.Request(cu, None, {})
367 req = urllib2.Request(cu, None, {})
367
368
368 try:
369 try:
369 log.debug("Trying to open URL %s", cleaned_uri)
370 log.debug("Trying to open URL %s", cleaned_uri)
370 resp = o.open(req)
371 resp = o.open(req)
371 if resp.code != 200:
372 if resp.code != 200:
372 raise exceptions.URLError('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
373 except Exception as e:
374 except Exception as e:
374 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 # means it cannot be cloned
376 # means it cannot be cloned
376 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
377
378
378 # now check if it's a proper hg repo, but don't do it for svn
379 # now check if it's a proper hg repo, but don't do it for svn
379 try:
380 try:
380 if _proto == 'svn':
381 if _proto == 'svn':
381 pass
382 pass
382 else:
383 else:
383 # check for pure hg repos
384 # check for pure hg repos
384 log.debug(
385 log.debug(
385 "Verifying if URL is a Mercurial repository: %s",
386 "Verifying if URL is a Mercurial repository: %s",
386 cleaned_uri)
387 cleaned_uri)
387 ui = make_ui_from_config(config)
388 ui = make_ui_from_config(config)
388 peer_checker = makepeer(ui, url)
389 peer_checker = makepeer(ui, url)
389 peer_checker.lookup('tip')
390 peer_checker.lookup('tip')
390 except Exception as e:
391 except Exception as e:
391 log.warning("URL is not a valid Mercurial repository: %s",
392 log.warning("URL is not a valid Mercurial repository: %s",
392 cleaned_uri)
393 cleaned_uri)
393 raise exceptions.URLError(
394 raise exceptions.URLError(e)(
394 "url [%s] does not look like an hg repo org_exc: %s"
395 "url [%s] does not look like an hg repo org_exc: %s"
395 % (cleaned_uri, e))
396 % (cleaned_uri, e))
396
397
397 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
398 return True
399 return True
399
400
400 @reraise_safe_exceptions
401 @reraise_safe_exceptions
401 def diff(
402 def diff(
402 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
403 context):
404 context):
404 repo = self._factory.repo(wire)
405 repo = self._factory.repo(wire)
405
406
406 if file_filter:
407 if file_filter:
407 match_filter = match(file_filter[0], '', [file_filter[1]])
408 match_filter = match(file_filter[0], '', [file_filter[1]])
408 else:
409 else:
409 match_filter = file_filter
410 match_filter = file_filter
410 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
411
412
412 try:
413 try:
413 return "".join(patch.diff(
414 return "".join(patch.diff(
414 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 except RepoLookupError:
416 except RepoLookupError as e:
416 raise exceptions.LookupException()
417 raise exceptions.LookupException(e)()
417
418
418 @reraise_safe_exceptions
419 @reraise_safe_exceptions
419 def file_history(self, wire, revision, path, limit):
420 def file_history(self, wire, revision, path, limit):
420 repo = self._factory.repo(wire)
421 repo = self._factory.repo(wire)
421
422
422 ctx = repo[revision]
423 ctx = repo[revision]
423 fctx = ctx.filectx(path)
424 fctx = ctx.filectx(path)
424
425
425 def history_iter():
426 def history_iter():
426 limit_rev = fctx.rev()
427 limit_rev = fctx.rev()
427 for obj in reversed(list(fctx.filelog())):
428 for obj in reversed(list(fctx.filelog())):
428 obj = fctx.filectx(obj)
429 obj = fctx.filectx(obj)
429 if limit_rev >= obj.rev():
430 if limit_rev >= obj.rev():
430 yield obj
431 yield obj
431
432
432 history = []
433 history = []
433 for cnt, obj in enumerate(history_iter()):
434 for cnt, obj in enumerate(history_iter()):
434 if limit and cnt >= limit:
435 if limit and cnt >= limit:
435 break
436 break
436 history.append(hex(obj.node()))
437 history.append(hex(obj.node()))
437
438
438 return [x for x in history]
439 return [x for x in history]
439
440
440 @reraise_safe_exceptions
441 @reraise_safe_exceptions
441 def file_history_untill(self, wire, revision, path, limit):
442 def file_history_untill(self, wire, revision, path, limit):
442 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
443 ctx = repo[revision]
444 ctx = repo[revision]
444 fctx = ctx.filectx(path)
445 fctx = ctx.filectx(path)
445
446
446 file_log = list(fctx.filelog())
447 file_log = list(fctx.filelog())
447 if limit:
448 if limit:
448 # Limit to the last n items
449 # Limit to the last n items
449 file_log = file_log[-limit:]
450 file_log = file_log[-limit:]
450
451
451 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
452
453
453 @reraise_safe_exceptions
454 @reraise_safe_exceptions
454 def fctx_annotate(self, wire, revision, path):
455 def fctx_annotate(self, wire, revision, path):
455 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
456 ctx = repo[revision]
457 ctx = repo[revision]
457 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
458
459
459 result = []
460 result = []
460 for i, annotate_obj in enumerate(fctx.annotate(), 1):
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
461 ln_no = i
462 ln_no = i
462 sha = hex(annotate_obj.fctx.node())
463 sha = hex(annotate_obj.fctx.node())
463 content = annotate_obj.text
464 content = annotate_obj.text
464 result.append((ln_no, sha, content))
465 result.append((ln_no, sha, content))
465 return result
466 return result
466
467
467 @reraise_safe_exceptions
468 @reraise_safe_exceptions
468 def fctx_data(self, wire, revision, path):
469 def fctx_data(self, wire, revision, path):
469 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
470 ctx = repo[revision]
471 ctx = repo[revision]
471 fctx = ctx.filectx(path)
472 fctx = ctx.filectx(path)
472 return fctx.data()
473 return fctx.data()
473
474
474 @reraise_safe_exceptions
475 @reraise_safe_exceptions
475 def fctx_flags(self, wire, revision, path):
476 def fctx_flags(self, wire, revision, path):
476 repo = self._factory.repo(wire)
477 repo = self._factory.repo(wire)
477 ctx = repo[revision]
478 ctx = repo[revision]
478 fctx = ctx.filectx(path)
479 fctx = ctx.filectx(path)
479 return fctx.flags()
480 return fctx.flags()
480
481
481 @reraise_safe_exceptions
482 @reraise_safe_exceptions
482 def fctx_size(self, wire, revision, path):
483 def fctx_size(self, wire, revision, path):
483 repo = self._factory.repo(wire)
484 repo = self._factory.repo(wire)
484 ctx = repo[revision]
485 ctx = repo[revision]
485 fctx = ctx.filectx(path)
486 fctx = ctx.filectx(path)
486 return fctx.size()
487 return fctx.size()
487
488
488 @reraise_safe_exceptions
489 @reraise_safe_exceptions
489 def get_all_commit_ids(self, wire, name):
490 def get_all_commit_ids(self, wire, name):
490 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
491 revs = repo.filtered(name).changelog.index
492 revs = repo.filtered(name).changelog.index
492 return map(lambda x: hex(x[7]), revs)[:-1]
493 return map(lambda x: hex(x[7]), revs)[:-1]
493
494
494 @reraise_safe_exceptions
495 @reraise_safe_exceptions
495 def get_config_value(self, wire, section, name, untrusted=False):
496 def get_config_value(self, wire, section, name, untrusted=False):
496 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
497 return repo.ui.config(section, name, untrusted=untrusted)
498 return repo.ui.config(section, name, untrusted=untrusted)
498
499
499 @reraise_safe_exceptions
500 @reraise_safe_exceptions
500 def get_config_bool(self, wire, section, name, untrusted=False):
501 def get_config_bool(self, wire, section, name, untrusted=False):
501 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
502 return repo.ui.configbool(section, name, untrusted=untrusted)
503 return repo.ui.configbool(section, name, untrusted=untrusted)
503
504
504 @reraise_safe_exceptions
505 @reraise_safe_exceptions
505 def get_config_list(self, wire, section, name, untrusted=False):
506 def get_config_list(self, wire, section, name, untrusted=False):
506 repo = self._factory.repo(wire)
507 repo = self._factory.repo(wire)
507 return repo.ui.configlist(section, name, untrusted=untrusted)
508 return repo.ui.configlist(section, name, untrusted=untrusted)
508
509
509 @reraise_safe_exceptions
510 @reraise_safe_exceptions
510 def is_large_file(self, wire, path):
511 def is_large_file(self, wire, path):
511 return largefiles.lfutil.isstandin(path)
512 return largefiles.lfutil.isstandin(path)
512
513
513 @reraise_safe_exceptions
514 @reraise_safe_exceptions
514 def in_largefiles_store(self, wire, sha):
515 def in_largefiles_store(self, wire, sha):
515 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
516 return largefiles.lfutil.instore(repo, sha)
517 return largefiles.lfutil.instore(repo, sha)
517
518
518 @reraise_safe_exceptions
519 @reraise_safe_exceptions
519 def in_user_cache(self, wire, sha):
520 def in_user_cache(self, wire, sha):
520 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
521 return largefiles.lfutil.inusercache(repo.ui, sha)
522 return largefiles.lfutil.inusercache(repo.ui, sha)
522
523
523 @reraise_safe_exceptions
524 @reraise_safe_exceptions
524 def store_path(self, wire, sha):
525 def store_path(self, wire, sha):
525 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
526 return largefiles.lfutil.storepath(repo, sha)
527 return largefiles.lfutil.storepath(repo, sha)
527
528
528 @reraise_safe_exceptions
529 @reraise_safe_exceptions
529 def link(self, wire, sha, path):
530 def link(self, wire, sha, path):
530 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
531 largefiles.lfutil.link(
532 largefiles.lfutil.link(
532 largefiles.lfutil.usercachepath(repo.ui, sha), path)
533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
533
534
534 @reraise_safe_exceptions
535 @reraise_safe_exceptions
535 def localrepository(self, wire, create=False):
536 def localrepository(self, wire, create=False):
536 self._factory.repo(wire, create=create)
537 self._factory.repo(wire, create=create)
537
538
538 @reraise_safe_exceptions
539 @reraise_safe_exceptions
539 def lookup(self, wire, revision, both):
540 def lookup(self, wire, revision, both):
540
541
541 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
542
543
543 if isinstance(revision, int):
544 if isinstance(revision, int):
544 # NOTE(marcink):
545 # NOTE(marcink):
545 # since Mercurial doesn't support indexes properly
546 # since Mercurial doesn't support indexes properly
546 # we need to shift accordingly by one to get proper index, e.g
547 # we need to shift accordingly by one to get proper index, e.g
547 # repo[-1] => repo[-2]
548 # repo[-1] => repo[-2]
548 # repo[0] => repo[-1]
549 # repo[0] => repo[-1]
549 # repo[1] => repo[2] we also never call repo[0] because
550 # repo[1] => repo[2] we also never call repo[0] because
550 # it's actually second commit
551 # it's actually second commit
551 if revision <= 0:
552 if revision <= 0:
552 revision = revision + -1
553 revision = revision + -1
553 else:
554 else:
554 revision = revision + 1
555 revision = revision + 1
555
556
556 try:
557 try:
557 ctx = repo[revision]
558 ctx = repo[revision]
558 except RepoLookupError:
559 except RepoLookupError as e:
559 raise exceptions.LookupException(revision)
560 raise exceptions.LookupException(e)(revision)
560 except LookupError as e:
561 except LookupError as e:
561 raise exceptions.LookupException(e.name)
562 raise exceptions.LookupException(e)(e.name)
562
563
563 if not both:
564 if not both:
564 return ctx.hex()
565 return ctx.hex()
565
566
566 ctx = repo[ctx.hex()]
567 ctx = repo[ctx.hex()]
567 return ctx.hex(), ctx.rev()
568 return ctx.hex(), ctx.rev()
568
569
569 @reraise_safe_exceptions
570 @reraise_safe_exceptions
570 def pull(self, wire, url, commit_ids=None):
571 def pull(self, wire, url, commit_ids=None):
571 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
572 # Disable any prompts for this repo
573 # Disable any prompts for this repo
573 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
574
575
575 remote = peer(repo, {}, url)
576 remote = peer(repo, {}, url)
576 # Disable any prompts for this remote
577 # Disable any prompts for this remote
577 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
578
579
579 if commit_ids:
580 if commit_ids:
580 commit_ids = [bin(commit_id) for commit_id in commit_ids]
581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
581
582
582 return exchange.pull(
583 return exchange.pull(
583 repo, remote, heads=commit_ids, force=None).cgresult
584 repo, remote, heads=commit_ids, force=None).cgresult
584
585
585 @reraise_safe_exceptions
586 @reraise_safe_exceptions
586 def sync_push(self, wire, url):
587 def sync_push(self, wire, url):
587 if self.check_url(url, wire['config']):
588 if self.check_url(url, wire['config']):
588 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
589
590
590 # Disable any prompts for this repo
591 # Disable any prompts for this repo
591 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
592
593
593 bookmarks = dict(repo._bookmarks).keys()
594 bookmarks = dict(repo._bookmarks).keys()
594 remote = peer(repo, {}, url)
595 remote = peer(repo, {}, url)
595 # Disable any prompts for this remote
596 # Disable any prompts for this remote
596 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
597
598
598 return exchange.push(
599 return exchange.push(
599 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
600
601
601 @reraise_safe_exceptions
602 @reraise_safe_exceptions
602 def revision(self, wire, rev):
603 def revision(self, wire, rev):
603 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
604 ctx = repo[rev]
605 ctx = repo[rev]
605 return ctx.rev()
606 return ctx.rev()
606
607
607 @reraise_safe_exceptions
608 @reraise_safe_exceptions
608 def rev_range(self, wire, filter):
609 def rev_range(self, wire, filter):
609 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
610 revisions = [rev for rev in revrange(repo, filter)]
611 revisions = [rev for rev in revrange(repo, filter)]
611 return revisions
612 return revisions
612
613
613 @reraise_safe_exceptions
614 @reraise_safe_exceptions
614 def rev_range_hash(self, wire, node):
615 def rev_range_hash(self, wire, node):
615 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
616
617
617 def get_revs(repo, rev_opt):
618 def get_revs(repo, rev_opt):
618 if rev_opt:
619 if rev_opt:
619 revs = revrange(repo, rev_opt)
620 revs = revrange(repo, rev_opt)
620 if len(revs) == 0:
621 if len(revs) == 0:
621 return (nullrev, nullrev)
622 return (nullrev, nullrev)
622 return max(revs), min(revs)
623 return max(revs), min(revs)
623 else:
624 else:
624 return len(repo) - 1, 0
625 return len(repo) - 1, 0
625
626
626 stop, start = get_revs(repo, [node + ':'])
627 stop, start = get_revs(repo, [node + ':'])
627 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
628 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
628 return revs
629 return revs
629
630
630 @reraise_safe_exceptions
631 @reraise_safe_exceptions
631 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
632 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
632 other_path = kwargs.pop('other_path', None)
633 other_path = kwargs.pop('other_path', None)
633
634
634 # case when we want to compare two independent repositories
635 # case when we want to compare two independent repositories
635 if other_path and other_path != wire["path"]:
636 if other_path and other_path != wire["path"]:
636 baseui = self._factory._create_config(wire["config"])
637 baseui = self._factory._create_config(wire["config"])
637 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
638 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
638 else:
639 else:
639 repo = self._factory.repo(wire)
640 repo = self._factory.repo(wire)
640 return list(repo.revs(rev_spec, *args))
641 return list(repo.revs(rev_spec, *args))
641
642
642 @reraise_safe_exceptions
643 @reraise_safe_exceptions
643 def strip(self, wire, revision, update, backup):
644 def strip(self, wire, revision, update, backup):
644 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
645 ctx = repo[revision]
646 ctx = repo[revision]
646 hgext_strip(
647 hgext_strip(
647 repo.baseui, repo, ctx.node(), update=update, backup=backup)
648 repo.baseui, repo, ctx.node(), update=update, backup=backup)
648
649
649 @reraise_safe_exceptions
650 @reraise_safe_exceptions
650 def verify(self, wire,):
651 def verify(self, wire,):
651 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
652 baseui = self._factory._create_config(wire['config'])
653 baseui = self._factory._create_config(wire['config'])
653 baseui.setconfig('ui', 'quiet', 'false')
654 baseui.setconfig('ui', 'quiet', 'false')
654 output = io.BytesIO()
655 output = io.BytesIO()
655
656
656 def write(data, **unused_kwargs):
657 def write(data, **unused_kwargs):
657 output.write(data)
658 output.write(data)
658 baseui.write = write
659 baseui.write = write
659
660
660 repo.ui = baseui
661 repo.ui = baseui
661 verify.verify(repo)
662 verify.verify(repo)
662 return output.getvalue()
663 return output.getvalue()
663
664
664 @reraise_safe_exceptions
665 @reraise_safe_exceptions
665 def tag(self, wire, name, revision, message, local, user,
666 def tag(self, wire, name, revision, message, local, user,
666 tag_time, tag_timezone):
667 tag_time, tag_timezone):
667 repo = self._factory.repo(wire)
668 repo = self._factory.repo(wire)
668 ctx = repo[revision]
669 ctx = repo[revision]
669 node = ctx.node()
670 node = ctx.node()
670
671
671 date = (tag_time, tag_timezone)
672 date = (tag_time, tag_timezone)
672 try:
673 try:
673 hg_tag.tag(repo, name, node, message, local, user, date)
674 hg_tag.tag(repo, name, node, message, local, user, date)
674 except Abort as e:
675 except Abort as e:
675 log.exception("Tag operation aborted")
676 log.exception("Tag operation aborted")
676 # Exception can contain unicode which we convert
677 # Exception can contain unicode which we convert
677 raise exceptions.AbortException(repr(e))
678 raise exceptions.AbortException(e)(repr(e))
678
679
679 @reraise_safe_exceptions
680 @reraise_safe_exceptions
680 def tags(self, wire):
681 def tags(self, wire):
681 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
682 return repo.tags()
683 return repo.tags()
683
684
684 @reraise_safe_exceptions
685 @reraise_safe_exceptions
685 def update(self, wire, node=None, clean=False):
686 def update(self, wire, node=None, clean=False):
686 repo = self._factory.repo(wire)
687 repo = self._factory.repo(wire)
687 baseui = self._factory._create_config(wire['config'])
688 baseui = self._factory._create_config(wire['config'])
688 commands.update(baseui, repo, node=node, clean=clean)
689 commands.update(baseui, repo, node=node, clean=clean)
689
690
690 @reraise_safe_exceptions
691 @reraise_safe_exceptions
691 def identify(self, wire):
692 def identify(self, wire):
692 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
693 baseui = self._factory._create_config(wire['config'])
694 baseui = self._factory._create_config(wire['config'])
694 output = io.BytesIO()
695 output = io.BytesIO()
695 baseui.write = output.write
696 baseui.write = output.write
696 # This is required to get a full node id
697 # This is required to get a full node id
697 baseui.debugflag = True
698 baseui.debugflag = True
698 commands.identify(baseui, repo, id=True)
699 commands.identify(baseui, repo, id=True)
699
700
700 return output.getvalue()
701 return output.getvalue()
701
702
702 @reraise_safe_exceptions
703 @reraise_safe_exceptions
703 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
704 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
704 hooks=True):
705 hooks=True):
705 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
706 baseui = self._factory._create_config(wire['config'], hooks=hooks)
707 baseui = self._factory._create_config(wire['config'], hooks=hooks)
707
708
708 # Mercurial internally has a lot of logic that checks ONLY if
709 # Mercurial internally has a lot of logic that checks ONLY if
709 # option is defined, we just pass those if they are defined then
710 # option is defined, we just pass those if they are defined then
710 opts = {}
711 opts = {}
711 if bookmark:
712 if bookmark:
712 opts['bookmark'] = bookmark
713 opts['bookmark'] = bookmark
713 if branch:
714 if branch:
714 opts['branch'] = branch
715 opts['branch'] = branch
715 if revision:
716 if revision:
716 opts['rev'] = revision
717 opts['rev'] = revision
717
718
718 commands.pull(baseui, repo, source, **opts)
719 commands.pull(baseui, repo, source, **opts)
719
720
720 @reraise_safe_exceptions
721 @reraise_safe_exceptions
721 def heads(self, wire, branch=None):
722 def heads(self, wire, branch=None):
722 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
723 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
724 output = io.BytesIO()
725 output = io.BytesIO()
725
726
726 def write(data, **unused_kwargs):
727 def write(data, **unused_kwargs):
727 output.write(data)
728 output.write(data)
728
729
729 baseui.write = write
730 baseui.write = write
730 if branch:
731 if branch:
731 args = [branch]
732 args = [branch]
732 else:
733 else:
733 args = []
734 args = []
734 commands.heads(baseui, repo, template='{node} ', *args)
735 commands.heads(baseui, repo, template='{node} ', *args)
735
736
736 return output.getvalue()
737 return output.getvalue()
737
738
738 @reraise_safe_exceptions
739 @reraise_safe_exceptions
739 def ancestor(self, wire, revision1, revision2):
740 def ancestor(self, wire, revision1, revision2):
740 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
741 changelog = repo.changelog
742 changelog = repo.changelog
742 lookup = repo.lookup
743 lookup = repo.lookup
743 a = changelog.ancestor(lookup(revision1), lookup(revision2))
744 a = changelog.ancestor(lookup(revision1), lookup(revision2))
744 return hex(a)
745 return hex(a)
745
746
746 @reraise_safe_exceptions
747 @reraise_safe_exceptions
747 def push(self, wire, revisions, dest_path, hooks=True,
748 def push(self, wire, revisions, dest_path, hooks=True,
748 push_branches=False):
749 push_branches=False):
749 repo = self._factory.repo(wire)
750 repo = self._factory.repo(wire)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751 commands.push(baseui, repo, dest=dest_path, rev=revisions,
752 commands.push(baseui, repo, dest=dest_path, rev=revisions,
752 new_branch=push_branches)
753 new_branch=push_branches)
753
754
754 @reraise_safe_exceptions
755 @reraise_safe_exceptions
755 def merge(self, wire, revision):
756 def merge(self, wire, revision):
756 repo = self._factory.repo(wire)
757 repo = self._factory.repo(wire)
757 baseui = self._factory._create_config(wire['config'])
758 baseui = self._factory._create_config(wire['config'])
758 repo.ui.setconfig('ui', 'merge', 'internal:dump')
759 repo.ui.setconfig('ui', 'merge', 'internal:dump')
759
760
760 # In case of sub repositories are used mercurial prompts the user in
761 # In case of sub repositories are used mercurial prompts the user in
761 # case of merge conflicts or different sub repository sources. By
762 # case of merge conflicts or different sub repository sources. By
762 # setting the interactive flag to `False` mercurial doesn't prompt the
763 # setting the interactive flag to `False` mercurial doesn't prompt the
763 # used but instead uses a default value.
764 # used but instead uses a default value.
764 repo.ui.setconfig('ui', 'interactive', False)
765 repo.ui.setconfig('ui', 'interactive', False)
765
766
766 commands.merge(baseui, repo, rev=revision)
767 commands.merge(baseui, repo, rev=revision)
767
768
768 @reraise_safe_exceptions
769 @reraise_safe_exceptions
769 def commit(self, wire, message, username, close_branch=False):
770 def commit(self, wire, message, username, close_branch=False):
770 repo = self._factory.repo(wire)
771 repo = self._factory.repo(wire)
771 baseui = self._factory._create_config(wire['config'])
772 baseui = self._factory._create_config(wire['config'])
772 repo.ui.setconfig('ui', 'username', username)
773 repo.ui.setconfig('ui', 'username', username)
773 commands.commit(baseui, repo, message=message, close_branch=close_branch)
774 commands.commit(baseui, repo, message=message, close_branch=close_branch)
774
775
775 @reraise_safe_exceptions
776 @reraise_safe_exceptions
776 def rebase(self, wire, source=None, dest=None, abort=False):
777 def rebase(self, wire, source=None, dest=None, abort=False):
777 repo = self._factory.repo(wire)
778 repo = self._factory.repo(wire)
778 baseui = self._factory._create_config(wire['config'])
779 baseui = self._factory._create_config(wire['config'])
779 repo.ui.setconfig('ui', 'merge', 'internal:dump')
780 repo.ui.setconfig('ui', 'merge', 'internal:dump')
780 rebase.rebase(
781 rebase.rebase(
781 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
782 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
782
783
783 @reraise_safe_exceptions
784 @reraise_safe_exceptions
784 def bookmark(self, wire, bookmark, revision=None):
785 def bookmark(self, wire, bookmark, revision=None):
785 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
786 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
787 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
788 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
788
789
789 @reraise_safe_exceptions
790 @reraise_safe_exceptions
790 def install_hooks(self, wire, force=False):
791 def install_hooks(self, wire, force=False):
791 # we don't need any special hooks for Mercurial
792 # we don't need any special hooks for Mercurial
792 pass
793 pass
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto._capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto._capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(orig, repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 return calc_capabilities(orig, repo, proto)
56 else:
56 else:
57 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
58 return orig(repo, proto)
58 return orig(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
66 from exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,572 +1,572 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55 return json.loads(response.read())
55 return json.loads(response.read())
56
56
57 def _serialize(self, hook_name, extras):
57 def _serialize(self, hook_name, extras):
58 data = {
58 data = {
59 'method': hook_name,
59 'method': hook_name,
60 'extras': extras
60 'extras': extras
61 }
61 }
62 return json.dumps(data)
62 return json.dumps(data)
63
63
64
64
65 class HooksDummyClient(object):
65 class HooksDummyClient(object):
66 def __init__(self, hooks_module):
66 def __init__(self, hooks_module):
67 self._hooks_module = importlib.import_module(hooks_module)
67 self._hooks_module = importlib.import_module(hooks_module)
68
68
69 def __call__(self, hook_name, extras):
69 def __call__(self, hook_name, extras):
70 with self._hooks_module.Hooks() as hooks:
70 with self._hooks_module.Hooks() as hooks:
71 return getattr(hooks, hook_name)(extras)
71 return getattr(hooks, hook_name)(extras)
72
72
73
73
74 class RemoteMessageWriter(object):
74 class RemoteMessageWriter(object):
75 """Writer base class."""
75 """Writer base class."""
76 def write(self, message):
76 def write(self, message):
77 raise NotImplementedError()
77 raise NotImplementedError()
78
78
79
79
80 class HgMessageWriter(RemoteMessageWriter):
80 class HgMessageWriter(RemoteMessageWriter):
81 """Writer that knows how to send messages to mercurial clients."""
81 """Writer that knows how to send messages to mercurial clients."""
82
82
83 def __init__(self, ui):
83 def __init__(self, ui):
84 self.ui = ui
84 self.ui = ui
85
85
86 def write(self, message):
86 def write(self, message):
87 # TODO: Check why the quiet flag is set by default.
87 # TODO: Check why the quiet flag is set by default.
88 old = self.ui.quiet
88 old = self.ui.quiet
89 self.ui.quiet = False
89 self.ui.quiet = False
90 self.ui.status(message.encode('utf-8'))
90 self.ui.status(message.encode('utf-8'))
91 self.ui.quiet = old
91 self.ui.quiet = old
92
92
93
93
94 class GitMessageWriter(RemoteMessageWriter):
94 class GitMessageWriter(RemoteMessageWriter):
95 """Writer that knows how to send messages to git clients."""
95 """Writer that knows how to send messages to git clients."""
96
96
97 def __init__(self, stdout=None):
97 def __init__(self, stdout=None):
98 self.stdout = stdout or sys.stdout
98 self.stdout = stdout or sys.stdout
99
99
100 def write(self, message):
100 def write(self, message):
101 self.stdout.write(message.encode('utf-8'))
101 self.stdout.write(message.encode('utf-8'))
102
102
103
103
104 class SvnMessageWriter(RemoteMessageWriter):
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
105 """Writer that knows how to send messages to svn clients."""
106
106
107 def __init__(self, stderr=None):
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
109 self.stderr = stderr or sys.stderr
110
110
111 def write(self, message):
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
112 self.stderr.write(message.encode('utf-8'))
113
113
114
114
115 def _handle_exception(result):
115 def _handle_exception(result):
116 exception_class = result.get('exception')
116 exception_class = result.get('exception')
117 exception_traceback = result.get('exception_traceback')
117 exception_traceback = result.get('exception_traceback')
118
118
119 if exception_traceback:
119 if exception_traceback:
120 log.error('Got traceback from remote call:%s', exception_traceback)
120 log.error('Got traceback from remote call:%s', exception_traceback)
121
121
122 if exception_class == 'HTTPLockedRC':
122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 elif exception_class == 'RepositoryError':
124 elif exception_class == 'RepositoryError':
125 raise exceptions.VcsException(*result['exception_args'])
125 raise exceptions.VcsException()(*result['exception_args'])
126 elif exception_class:
126 elif exception_class:
127 raise Exception('Got remote exception "%s" with args "%s"' %
127 raise Exception('Got remote exception "%s" with args "%s"' %
128 (exception_class, result['exception_args']))
128 (exception_class, result['exception_args']))
129
129
130
130
131 def _get_hooks_client(extras):
131 def _get_hooks_client(extras):
132 if 'hooks_uri' in extras:
132 if 'hooks_uri' in extras:
133 protocol = extras.get('hooks_protocol')
133 protocol = extras.get('hooks_protocol')
134 return HooksHttpClient(extras['hooks_uri'])
134 return HooksHttpClient(extras['hooks_uri'])
135 else:
135 else:
136 return HooksDummyClient(extras['hooks_module'])
136 return HooksDummyClient(extras['hooks_module'])
137
137
138
138
139 def _call_hook(hook_name, extras, writer):
139 def _call_hook(hook_name, extras, writer):
140 hooks_client = _get_hooks_client(extras)
140 hooks_client = _get_hooks_client(extras)
141 log.debug('Hooks, using client:%s', hooks_client)
141 log.debug('Hooks, using client:%s', hooks_client)
142 result = hooks_client(hook_name, extras)
142 result = hooks_client(hook_name, extras)
143 log.debug('Hooks got result: %s', result)
143 log.debug('Hooks got result: %s', result)
144 writer.write(result['output'])
144 writer.write(result['output'])
145 _handle_exception(result)
145 _handle_exception(result)
146
146
147 return result['status']
147 return result['status']
148
148
149
149
150 def _extras_from_ui(ui):
150 def _extras_from_ui(ui):
151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
152 if not hook_data:
152 if not hook_data:
153 # maybe it's inside environ ?
153 # maybe it's inside environ ?
154 env_hook_data = os.environ.get('RC_SCM_DATA')
154 env_hook_data = os.environ.get('RC_SCM_DATA')
155 if env_hook_data:
155 if env_hook_data:
156 hook_data = env_hook_data
156 hook_data = env_hook_data
157
157
158 extras = {}
158 extras = {}
159 if hook_data:
159 if hook_data:
160 extras = json.loads(hook_data)
160 extras = json.loads(hook_data)
161 return extras
161 return extras
162
162
163
163
164 def _rev_range_hash(repo, node):
164 def _rev_range_hash(repo, node):
165
165
166 commits = []
166 commits = []
167 start = repo[node].rev()
167 start = repo[node].rev()
168 for rev in xrange(start, len(repo)):
168 for rev in xrange(start, len(repo)):
169 ctx = repo[rev]
169 ctx = repo[rev]
170 commit_id = mercurial.node.hex(ctx.node())
170 commit_id = mercurial.node.hex(ctx.node())
171 branch = ctx.branch()
171 branch = ctx.branch()
172 commits.append((commit_id, branch))
172 commits.append((commit_id, branch))
173
173
174 return commits
174 return commits
175
175
176
176
177 def repo_size(ui, repo, **kwargs):
177 def repo_size(ui, repo, **kwargs):
178 extras = _extras_from_ui(ui)
178 extras = _extras_from_ui(ui)
179 return _call_hook('repo_size', extras, HgMessageWriter(ui))
179 return _call_hook('repo_size', extras, HgMessageWriter(ui))
180
180
181
181
182 def pre_pull(ui, repo, **kwargs):
182 def pre_pull(ui, repo, **kwargs):
183 extras = _extras_from_ui(ui)
183 extras = _extras_from_ui(ui)
184 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
184 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
185
185
186
186
187 def pre_pull_ssh(ui, repo, **kwargs):
187 def pre_pull_ssh(ui, repo, **kwargs):
188 extras = _extras_from_ui(ui)
188 extras = _extras_from_ui(ui)
189 if extras and extras.get('SSH'):
189 if extras and extras.get('SSH'):
190 return pre_pull(ui, repo, **kwargs)
190 return pre_pull(ui, repo, **kwargs)
191 return 0
191 return 0
192
192
193
193
194 def post_pull(ui, repo, **kwargs):
194 def post_pull(ui, repo, **kwargs):
195 extras = _extras_from_ui(ui)
195 extras = _extras_from_ui(ui)
196 return _call_hook('post_pull', extras, HgMessageWriter(ui))
196 return _call_hook('post_pull', extras, HgMessageWriter(ui))
197
197
198
198
199 def post_pull_ssh(ui, repo, **kwargs):
199 def post_pull_ssh(ui, repo, **kwargs):
200 extras = _extras_from_ui(ui)
200 extras = _extras_from_ui(ui)
201 if extras and extras.get('SSH'):
201 if extras and extras.get('SSH'):
202 return post_pull(ui, repo, **kwargs)
202 return post_pull(ui, repo, **kwargs)
203 return 0
203 return 0
204
204
205
205
206 def pre_push(ui, repo, node=None, **kwargs):
206 def pre_push(ui, repo, node=None, **kwargs):
207 extras = _extras_from_ui(ui)
207 extras = _extras_from_ui(ui)
208
208
209 rev_data = []
209 rev_data = []
210 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
210 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
211 branches = collections.defaultdict(list)
211 branches = collections.defaultdict(list)
212 for commit_id, branch in _rev_range_hash(repo, node):
212 for commit_id, branch in _rev_range_hash(repo, node):
213 branches[branch].append(commit_id)
213 branches[branch].append(commit_id)
214
214
215 for branch, commits in branches.iteritems():
215 for branch, commits in branches.iteritems():
216 old_rev = kwargs.get('node_last') or commits[0]
216 old_rev = kwargs.get('node_last') or commits[0]
217 rev_data.append({
217 rev_data.append({
218 'old_rev': old_rev,
218 'old_rev': old_rev,
219 'new_rev': commits[-1],
219 'new_rev': commits[-1],
220 'ref': '',
220 'ref': '',
221 'type': 'branch',
221 'type': 'branch',
222 'name': branch,
222 'name': branch,
223 })
223 })
224
224
225 extras['commit_ids'] = rev_data
225 extras['commit_ids'] = rev_data
226 return _call_hook('pre_push', extras, HgMessageWriter(ui))
226 return _call_hook('pre_push', extras, HgMessageWriter(ui))
227
227
228
228
229 def pre_push_ssh(ui, repo, node=None, **kwargs):
229 def pre_push_ssh(ui, repo, node=None, **kwargs):
230 if _extras_from_ui(ui).get('SSH'):
230 if _extras_from_ui(ui).get('SSH'):
231 return pre_push(ui, repo, node, **kwargs)
231 return pre_push(ui, repo, node, **kwargs)
232
232
233 return 0
233 return 0
234
234
235
235
236 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
236 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
237 extras = _extras_from_ui(ui)
237 extras = _extras_from_ui(ui)
238 if extras.get('SSH'):
238 if extras.get('SSH'):
239 permission = extras['SSH_PERMISSIONS']
239 permission = extras['SSH_PERMISSIONS']
240
240
241 if 'repository.write' == permission or 'repository.admin' == permission:
241 if 'repository.write' == permission or 'repository.admin' == permission:
242 return 0
242 return 0
243
243
244 # non-zero ret code
244 # non-zero ret code
245 return 1
245 return 1
246
246
247 return 0
247 return 0
248
248
249
249
250 def post_push(ui, repo, node, **kwargs):
250 def post_push(ui, repo, node, **kwargs):
251 extras = _extras_from_ui(ui)
251 extras = _extras_from_ui(ui)
252
252
253 commit_ids = []
253 commit_ids = []
254 branches = []
254 branches = []
255 bookmarks = []
255 bookmarks = []
256 tags = []
256 tags = []
257
257
258 for commit_id, branch in _rev_range_hash(repo, node):
258 for commit_id, branch in _rev_range_hash(repo, node):
259 commit_ids.append(commit_id)
259 commit_ids.append(commit_id)
260 if branch not in branches:
260 if branch not in branches:
261 branches.append(branch)
261 branches.append(branch)
262
262
263 if hasattr(ui, '_rc_pushkey_branches'):
263 if hasattr(ui, '_rc_pushkey_branches'):
264 bookmarks = ui._rc_pushkey_branches
264 bookmarks = ui._rc_pushkey_branches
265
265
266 extras['commit_ids'] = commit_ids
266 extras['commit_ids'] = commit_ids
267 extras['new_refs'] = {
267 extras['new_refs'] = {
268 'branches': branches,
268 'branches': branches,
269 'bookmarks': bookmarks,
269 'bookmarks': bookmarks,
270 'tags': tags
270 'tags': tags
271 }
271 }
272
272
273 return _call_hook('post_push', extras, HgMessageWriter(ui))
273 return _call_hook('post_push', extras, HgMessageWriter(ui))
274
274
275
275
276 def post_push_ssh(ui, repo, node, **kwargs):
276 def post_push_ssh(ui, repo, node, **kwargs):
277 if _extras_from_ui(ui).get('SSH'):
277 if _extras_from_ui(ui).get('SSH'):
278 return post_push(ui, repo, node, **kwargs)
278 return post_push(ui, repo, node, **kwargs)
279 return 0
279 return 0
280
280
281
281
282 def key_push(ui, repo, **kwargs):
282 def key_push(ui, repo, **kwargs):
283 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
283 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
284 # store new bookmarks in our UI object propagated later to post_push
284 # store new bookmarks in our UI object propagated later to post_push
285 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
285 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
286 return
286 return
287
287
288
288
289 # backward compat
289 # backward compat
290 log_pull_action = post_pull
290 log_pull_action = post_pull
291
291
292 # backward compat
292 # backward compat
293 log_push_action = post_push
293 log_push_action = post_push
294
294
295
295
296 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
296 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
297 """
297 """
298 Old hook name: keep here for backward compatibility.
298 Old hook name: keep here for backward compatibility.
299
299
300 This is only required when the installed git hooks are not upgraded.
300 This is only required when the installed git hooks are not upgraded.
301 """
301 """
302 pass
302 pass
303
303
304
304
305 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
305 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
306 """
306 """
307 Old hook name: keep here for backward compatibility.
307 Old hook name: keep here for backward compatibility.
308
308
309 This is only required when the installed git hooks are not upgraded.
309 This is only required when the installed git hooks are not upgraded.
310 """
310 """
311 pass
311 pass
312
312
313
313
314 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
314 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
315
315
316
316
317 def git_pre_pull(extras):
317 def git_pre_pull(extras):
318 """
318 """
319 Pre pull hook.
319 Pre pull hook.
320
320
321 :param extras: dictionary containing the keys defined in simplevcs
321 :param extras: dictionary containing the keys defined in simplevcs
322 :type extras: dict
322 :type extras: dict
323
323
324 :return: status code of the hook. 0 for success.
324 :return: status code of the hook. 0 for success.
325 :rtype: int
325 :rtype: int
326 """
326 """
327 if 'pull' not in extras['hooks']:
327 if 'pull' not in extras['hooks']:
328 return HookResponse(0, '')
328 return HookResponse(0, '')
329
329
330 stdout = io.BytesIO()
330 stdout = io.BytesIO()
331 try:
331 try:
332 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
332 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
333 except Exception as error:
333 except Exception as error:
334 status = 128
334 status = 128
335 stdout.write('ERROR: %s\n' % str(error))
335 stdout.write('ERROR: %s\n' % str(error))
336
336
337 return HookResponse(status, stdout.getvalue())
337 return HookResponse(status, stdout.getvalue())
338
338
339
339
340 def git_post_pull(extras):
340 def git_post_pull(extras):
341 """
341 """
342 Post pull hook.
342 Post pull hook.
343
343
344 :param extras: dictionary containing the keys defined in simplevcs
344 :param extras: dictionary containing the keys defined in simplevcs
345 :type extras: dict
345 :type extras: dict
346
346
347 :return: status code of the hook. 0 for success.
347 :return: status code of the hook. 0 for success.
348 :rtype: int
348 :rtype: int
349 """
349 """
350 if 'pull' not in extras['hooks']:
350 if 'pull' not in extras['hooks']:
351 return HookResponse(0, '')
351 return HookResponse(0, '')
352
352
353 stdout = io.BytesIO()
353 stdout = io.BytesIO()
354 try:
354 try:
355 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
355 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
356 except Exception as error:
356 except Exception as error:
357 status = 128
357 status = 128
358 stdout.write('ERROR: %s\n' % error)
358 stdout.write('ERROR: %s\n' % error)
359
359
360 return HookResponse(status, stdout.getvalue())
360 return HookResponse(status, stdout.getvalue())
361
361
362
362
363 def _parse_git_ref_lines(revision_lines):
363 def _parse_git_ref_lines(revision_lines):
364 rev_data = []
364 rev_data = []
365 for revision_line in revision_lines or []:
365 for revision_line in revision_lines or []:
366 old_rev, new_rev, ref = revision_line.strip().split(' ')
366 old_rev, new_rev, ref = revision_line.strip().split(' ')
367 ref_data = ref.split('/', 2)
367 ref_data = ref.split('/', 2)
368 if ref_data[1] in ('tags', 'heads'):
368 if ref_data[1] in ('tags', 'heads'):
369 rev_data.append({
369 rev_data.append({
370 'old_rev': old_rev,
370 'old_rev': old_rev,
371 'new_rev': new_rev,
371 'new_rev': new_rev,
372 'ref': ref,
372 'ref': ref,
373 'type': ref_data[1],
373 'type': ref_data[1],
374 'name': ref_data[2],
374 'name': ref_data[2],
375 })
375 })
376 return rev_data
376 return rev_data
377
377
378
378
379 def git_pre_receive(unused_repo_path, revision_lines, env):
379 def git_pre_receive(unused_repo_path, revision_lines, env):
380 """
380 """
381 Pre push hook.
381 Pre push hook.
382
382
383 :param extras: dictionary containing the keys defined in simplevcs
383 :param extras: dictionary containing the keys defined in simplevcs
384 :type extras: dict
384 :type extras: dict
385
385
386 :return: status code of the hook. 0 for success.
386 :return: status code of the hook. 0 for success.
387 :rtype: int
387 :rtype: int
388 """
388 """
389 extras = json.loads(env['RC_SCM_DATA'])
389 extras = json.loads(env['RC_SCM_DATA'])
390 rev_data = _parse_git_ref_lines(revision_lines)
390 rev_data = _parse_git_ref_lines(revision_lines)
391 if 'push' not in extras['hooks']:
391 if 'push' not in extras['hooks']:
392 return 0
392 return 0
393 extras['commit_ids'] = rev_data
393 extras['commit_ids'] = rev_data
394 return _call_hook('pre_push', extras, GitMessageWriter())
394 return _call_hook('pre_push', extras, GitMessageWriter())
395
395
396
396
397 def git_post_receive(unused_repo_path, revision_lines, env):
397 def git_post_receive(unused_repo_path, revision_lines, env):
398 """
398 """
399 Post push hook.
399 Post push hook.
400
400
401 :param extras: dictionary containing the keys defined in simplevcs
401 :param extras: dictionary containing the keys defined in simplevcs
402 :type extras: dict
402 :type extras: dict
403
403
404 :return: status code of the hook. 0 for success.
404 :return: status code of the hook. 0 for success.
405 :rtype: int
405 :rtype: int
406 """
406 """
407 extras = json.loads(env['RC_SCM_DATA'])
407 extras = json.loads(env['RC_SCM_DATA'])
408 if 'push' not in extras['hooks']:
408 if 'push' not in extras['hooks']:
409 return 0
409 return 0
410
410
411 rev_data = _parse_git_ref_lines(revision_lines)
411 rev_data = _parse_git_ref_lines(revision_lines)
412
412
413 git_revs = []
413 git_revs = []
414
414
415 # N.B.(skreft): it is ok to just call git, as git before calling a
415 # N.B.(skreft): it is ok to just call git, as git before calling a
416 # subcommand sets the PATH environment variable so that it point to the
416 # subcommand sets the PATH environment variable so that it point to the
417 # correct version of the git executable.
417 # correct version of the git executable.
418 empty_commit_id = '0' * 40
418 empty_commit_id = '0' * 40
419 branches = []
419 branches = []
420 tags = []
420 tags = []
421 for push_ref in rev_data:
421 for push_ref in rev_data:
422 type_ = push_ref['type']
422 type_ = push_ref['type']
423
423
424 if type_ == 'heads':
424 if type_ == 'heads':
425 if push_ref['old_rev'] == empty_commit_id:
425 if push_ref['old_rev'] == empty_commit_id:
426 # starting new branch case
426 # starting new branch case
427 if push_ref['name'] not in branches:
427 if push_ref['name'] not in branches:
428 branches.append(push_ref['name'])
428 branches.append(push_ref['name'])
429
429
430 # Fix up head revision if needed
430 # Fix up head revision if needed
431 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
431 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
432 try:
432 try:
433 subprocessio.run_command(cmd, env=os.environ.copy())
433 subprocessio.run_command(cmd, env=os.environ.copy())
434 except Exception:
434 except Exception:
435 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
435 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
436 'refs/heads/%s' % push_ref['name']]
436 'refs/heads/%s' % push_ref['name']]
437 print("Setting default branch to %s" % push_ref['name'])
437 print("Setting default branch to %s" % push_ref['name'])
438 subprocessio.run_command(cmd, env=os.environ.copy())
438 subprocessio.run_command(cmd, env=os.environ.copy())
439
439
440 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
440 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
441 '--format=%(refname)', 'refs/heads/*']
441 '--format=%(refname)', 'refs/heads/*']
442 stdout, stderr = subprocessio.run_command(
442 stdout, stderr = subprocessio.run_command(
443 cmd, env=os.environ.copy())
443 cmd, env=os.environ.copy())
444 heads = stdout
444 heads = stdout
445 heads = heads.replace(push_ref['ref'], '')
445 heads = heads.replace(push_ref['ref'], '')
446 heads = ' '.join(head for head
446 heads = ' '.join(head for head
447 in heads.splitlines() if head) or '.'
447 in heads.splitlines() if head) or '.'
448 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
448 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
449 '--pretty=format:%H', '--', push_ref['new_rev'],
449 '--pretty=format:%H', '--', push_ref['new_rev'],
450 '--not', heads]
450 '--not', heads]
451 stdout, stderr = subprocessio.run_command(
451 stdout, stderr = subprocessio.run_command(
452 cmd, env=os.environ.copy())
452 cmd, env=os.environ.copy())
453 git_revs.extend(stdout.splitlines())
453 git_revs.extend(stdout.splitlines())
454 elif push_ref['new_rev'] == empty_commit_id:
454 elif push_ref['new_rev'] == empty_commit_id:
455 # delete branch case
455 # delete branch case
456 git_revs.append('delete_branch=>%s' % push_ref['name'])
456 git_revs.append('delete_branch=>%s' % push_ref['name'])
457 else:
457 else:
458 if push_ref['name'] not in branches:
458 if push_ref['name'] not in branches:
459 branches.append(push_ref['name'])
459 branches.append(push_ref['name'])
460
460
461 cmd = [settings.GIT_EXECUTABLE, 'log',
461 cmd = [settings.GIT_EXECUTABLE, 'log',
462 '{old_rev}..{new_rev}'.format(**push_ref),
462 '{old_rev}..{new_rev}'.format(**push_ref),
463 '--reverse', '--pretty=format:%H']
463 '--reverse', '--pretty=format:%H']
464 stdout, stderr = subprocessio.run_command(
464 stdout, stderr = subprocessio.run_command(
465 cmd, env=os.environ.copy())
465 cmd, env=os.environ.copy())
466 git_revs.extend(stdout.splitlines())
466 git_revs.extend(stdout.splitlines())
467 elif type_ == 'tags':
467 elif type_ == 'tags':
468 if push_ref['name'] not in tags:
468 if push_ref['name'] not in tags:
469 tags.append(push_ref['name'])
469 tags.append(push_ref['name'])
470 git_revs.append('tag=>%s' % push_ref['name'])
470 git_revs.append('tag=>%s' % push_ref['name'])
471
471
472 extras['commit_ids'] = git_revs
472 extras['commit_ids'] = git_revs
473 extras['new_refs'] = {
473 extras['new_refs'] = {
474 'branches': branches,
474 'branches': branches,
475 'bookmarks': [],
475 'bookmarks': [],
476 'tags': tags,
476 'tags': tags,
477 }
477 }
478
478
479 if 'repo_size' in extras['hooks']:
479 if 'repo_size' in extras['hooks']:
480 try:
480 try:
481 _call_hook('repo_size', extras, GitMessageWriter())
481 _call_hook('repo_size', extras, GitMessageWriter())
482 except:
482 except:
483 pass
483 pass
484
484
485 return _call_hook('post_push', extras, GitMessageWriter())
485 return _call_hook('post_push', extras, GitMessageWriter())
486
486
487
487
488 def _get_extras_from_txn_id(path, txn_id):
488 def _get_extras_from_txn_id(path, txn_id):
489 extras = {}
489 extras = {}
490 try:
490 try:
491 cmd = ['svnlook', 'pget',
491 cmd = ['svnlook', 'pget',
492 '-t', txn_id,
492 '-t', txn_id,
493 '--revprop', path, 'rc-scm-extras']
493 '--revprop', path, 'rc-scm-extras']
494 stdout, stderr = subprocessio.run_command(
494 stdout, stderr = subprocessio.run_command(
495 cmd, env=os.environ.copy())
495 cmd, env=os.environ.copy())
496 extras = json.loads(base64.urlsafe_b64decode(stdout))
496 extras = json.loads(base64.urlsafe_b64decode(stdout))
497 except Exception:
497 except Exception:
498 log.exception('Failed to extract extras info from txn_id')
498 log.exception('Failed to extract extras info from txn_id')
499
499
500 return extras
500 return extras
501
501
502
502
503 def svn_pre_commit(repo_path, commit_data, env):
503 def svn_pre_commit(repo_path, commit_data, env):
504 path, txn_id = commit_data
504 path, txn_id = commit_data
505 branches = []
505 branches = []
506 tags = []
506 tags = []
507
507
508 if env.get('RC_SCM_DATA'):
508 if env.get('RC_SCM_DATA'):
509 extras = json.loads(env['RC_SCM_DATA'])
509 extras = json.loads(env['RC_SCM_DATA'])
510 else:
510 else:
511 # fallback method to read from TXN-ID stored data
511 # fallback method to read from TXN-ID stored data
512 extras = _get_extras_from_txn_id(path, txn_id)
512 extras = _get_extras_from_txn_id(path, txn_id)
513 if not extras:
513 if not extras:
514 return 0
514 return 0
515
515
516 extras['commit_ids'] = []
516 extras['commit_ids'] = []
517 extras['txn_id'] = txn_id
517 extras['txn_id'] = txn_id
518 extras['new_refs'] = {
518 extras['new_refs'] = {
519 'branches': branches,
519 'branches': branches,
520 'bookmarks': [],
520 'bookmarks': [],
521 'tags': tags,
521 'tags': tags,
522 }
522 }
523
523
524 return _call_hook('pre_push', extras, SvnMessageWriter())
524 return _call_hook('pre_push', extras, SvnMessageWriter())
525
525
526
526
527 def _get_extras_from_commit_id(commit_id, path):
527 def _get_extras_from_commit_id(commit_id, path):
528 extras = {}
528 extras = {}
529 try:
529 try:
530 cmd = ['svnlook', 'pget',
530 cmd = ['svnlook', 'pget',
531 '-r', commit_id,
531 '-r', commit_id,
532 '--revprop', path, 'rc-scm-extras']
532 '--revprop', path, 'rc-scm-extras']
533 stdout, stderr = subprocessio.run_command(
533 stdout, stderr = subprocessio.run_command(
534 cmd, env=os.environ.copy())
534 cmd, env=os.environ.copy())
535 extras = json.loads(base64.urlsafe_b64decode(stdout))
535 extras = json.loads(base64.urlsafe_b64decode(stdout))
536 except Exception:
536 except Exception:
537 log.exception('Failed to extract extras info from commit_id')
537 log.exception('Failed to extract extras info from commit_id')
538
538
539 return extras
539 return extras
540
540
541
541
542 def svn_post_commit(repo_path, commit_data, env):
542 def svn_post_commit(repo_path, commit_data, env):
543 """
543 """
544 commit_data is path, rev, txn_id
544 commit_data is path, rev, txn_id
545 """
545 """
546 path, commit_id, txn_id = commit_data
546 path, commit_id, txn_id = commit_data
547 branches = []
547 branches = []
548 tags = []
548 tags = []
549
549
550 if env.get('RC_SCM_DATA'):
550 if env.get('RC_SCM_DATA'):
551 extras = json.loads(env['RC_SCM_DATA'])
551 extras = json.loads(env['RC_SCM_DATA'])
552 else:
552 else:
553 # fallback method to read from TXN-ID stored data
553 # fallback method to read from TXN-ID stored data
554 extras = _get_extras_from_commit_id(commit_id, path)
554 extras = _get_extras_from_commit_id(commit_id, path)
555 if not extras:
555 if not extras:
556 return 0
556 return 0
557
557
558 extras['commit_ids'] = [commit_id]
558 extras['commit_ids'] = [commit_id]
559 extras['txn_id'] = txn_id
559 extras['txn_id'] = txn_id
560 extras['new_refs'] = {
560 extras['new_refs'] = {
561 'branches': branches,
561 'branches': branches,
562 'bookmarks': [],
562 'bookmarks': [],
563 'tags': tags,
563 'tags': tags,
564 }
564 }
565
565
566 if 'repo_size' in extras['hooks']:
566 if 'repo_size' in extras['hooks']:
567 try:
567 try:
568 _call_hook('repo_size', extras, SvnMessageWriter())
568 _call_hook('repo_size', extras, SvnMessageWriter())
569 except Exception:
569 except Exception:
570 pass
570 pass
571
571
572 return _call_hook('post_push', extras, SvnMessageWriter())
572 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,234 +1,234 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.wireprotoserver
25 import mercurial.hgweb.common
25 import mercurial.hgweb.common
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 from mercurial.hgweb import request as requestmod
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
79 gen = self.run_wsgi(req, res)
80
80
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = gen.next()
84 data = gen.next()
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
88 except StopIteration:
88 except StopIteration:
89 pass
89 pass
90
90
91 if first_chunk:
91 if first_chunk:
92 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
93 return gen
93 return gen
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get('cmd', '')
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
103 res.setbodybytes('')
104 return res.sendresponse()
104 return res.sendresponse()
105
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
106 return super(HgWeb, self)._runwsgi(req, res, repo)
107
107
108
108
109 def make_hg_ui_from_config(repo_config):
109 def make_hg_ui_from_config(repo_config):
110 baseui = mercurial.ui.ui()
110 baseui = mercurial.ui.ui()
111
111
112 # clean the baseui object
112 # clean the baseui object
113 baseui._ocfg = mercurial.config.config()
113 baseui._ocfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
116
116
117 for section, option, value in repo_config:
117 for section, option, value in repo_config:
118 baseui.setconfig(section, option, value)
118 baseui.setconfig(section, option, value)
119
119
120 # make our hgweb quiet so it doesn't print output
120 # make our hgweb quiet so it doesn't print output
121 baseui.setconfig('ui', 'quiet', 'true')
121 baseui.setconfig('ui', 'quiet', 'true')
122
122
123 return baseui
123 return baseui
124
124
125
125
126 def update_hg_ui_from_hgrc(baseui, repo_path):
126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 path = os.path.join(repo_path, '.hg', 'hgrc')
127 path = os.path.join(repo_path, '.hg', 'hgrc')
128
128
129 if not os.path.isfile(path):
129 if not os.path.isfile(path):
130 log.debug('hgrc file is not present at %s, skipping...', path)
130 log.debug('hgrc file is not present at %s, skipping...', path)
131 return
131 return
132 log.debug('reading hgrc from %s', path)
132 log.debug('reading hgrc from %s', path)
133 cfg = mercurial.config.config()
133 cfg = mercurial.config.config()
134 cfg.read(path)
134 cfg.read(path)
135 for section in HG_UI_SECTIONS:
135 for section in HG_UI_SECTIONS:
136 for k, v in cfg.items(section):
136 for k, v in cfg.items(section):
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 baseui.setconfig(section, k, v)
138 baseui.setconfig(section, k, v)
139
139
140
140
141 def create_hg_wsgi_app(repo_path, repo_name, config):
141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 """
142 """
143 Prepares a WSGI application to handle Mercurial requests.
143 Prepares a WSGI application to handle Mercurial requests.
144
144
145 :param config: is a list of 3-item tuples representing a ConfigObject
145 :param config: is a list of 3-item tuples representing a ConfigObject
146 (it is the serialized version of the config object).
146 (it is the serialized version of the config object).
147 """
147 """
148 log.debug("Creating Mercurial WSGI application")
148 log.debug("Creating Mercurial WSGI application")
149
149
150 baseui = make_hg_ui_from_config(config)
150 baseui = make_hg_ui_from_config(config)
151 update_hg_ui_from_hgrc(baseui, repo_path)
151 update_hg_ui_from_hgrc(baseui, repo_path)
152
152
153 try:
153 try:
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 except mercurial.error.RequirementError as exc:
155 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(exc)
156 raise exceptions.RequirementException(e)(e)
157
157
158
158
159 class GitHandler(object):
159 class GitHandler(object):
160 """
160 """
161 Handler for Git operations like push/pull etc
161 Handler for Git operations like push/pull etc
162 """
162 """
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 extras):
164 extras):
165 if not os.path.isdir(repo_location):
165 if not os.path.isdir(repo_location):
166 raise OSError(repo_location)
166 raise OSError(repo_location)
167 self.content_path = repo_location
167 self.content_path = repo_location
168 self.repo_name = repo_name
168 self.repo_name = repo_name
169 self.repo_location = repo_location
169 self.repo_location = repo_location
170 self.extras = extras
170 self.extras = extras
171 self.git_path = git_path
171 self.git_path = git_path
172 self.update_server_info = update_server_info
172 self.update_server_info = update_server_info
173
173
174 def __call__(self, environ, start_response):
174 def __call__(self, environ, start_response):
175 app = webob.exc.HTTPNotFound()
175 app = webob.exc.HTTPNotFound()
176 candidate_paths = (
176 candidate_paths = (
177 self.content_path, os.path.join(self.content_path, '.git'))
177 self.content_path, os.path.join(self.content_path, '.git'))
178
178
179 for content_path in candidate_paths:
179 for content_path in candidate_paths:
180 try:
180 try:
181 app = pygrack.GitRepository(
181 app = pygrack.GitRepository(
182 self.repo_name, content_path, self.git_path,
182 self.repo_name, content_path, self.git_path,
183 self.update_server_info, self.extras)
183 self.update_server_info, self.extras)
184 break
184 break
185 except OSError:
185 except OSError:
186 continue
186 continue
187
187
188 return app(environ, start_response)
188 return app(environ, start_response)
189
189
190
190
191 def create_git_wsgi_app(repo_path, repo_name, config):
191 def create_git_wsgi_app(repo_path, repo_name, config):
192 """
192 """
193 Creates a WSGI application to handle Git requests.
193 Creates a WSGI application to handle Git requests.
194
194
195 :param config: is a dictionary holding the extras.
195 :param config: is a dictionary holding the extras.
196 """
196 """
197 git_path = settings.GIT_EXECUTABLE
197 git_path = settings.GIT_EXECUTABLE
198 update_server_info = config.pop('git_update_server_info')
198 update_server_info = config.pop('git_update_server_info')
199 app = GitHandler(
199 app = GitHandler(
200 repo_path, repo_name, git_path, update_server_info, config)
200 repo_path, repo_name, git_path, update_server_info, config)
201
201
202 return app
202 return app
203
203
204
204
205 class GitLFSHandler(object):
205 class GitLFSHandler(object):
206 """
206 """
207 Handler for Git LFS operations
207 Handler for Git LFS operations
208 """
208 """
209
209
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 extras):
211 extras):
212 if not os.path.isdir(repo_location):
212 if not os.path.isdir(repo_location):
213 raise OSError(repo_location)
213 raise OSError(repo_location)
214 self.content_path = repo_location
214 self.content_path = repo_location
215 self.repo_name = repo_name
215 self.repo_name = repo_name
216 self.repo_location = repo_location
216 self.repo_location = repo_location
217 self.extras = extras
217 self.extras = extras
218 self.git_path = git_path
218 self.git_path = git_path
219 self.update_server_info = update_server_info
219 self.update_server_info = update_server_info
220
220
221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
223 return app
223 return app
224
224
225
225
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 git_path = settings.GIT_EXECUTABLE
227 git_path = settings.GIT_EXECUTABLE
228 update_server_info = config.pop('git_update_server_info')
228 update_server_info = config.pop('git_update_server_info')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 app = GitLFSHandler(
231 app = GitLFSHandler(
232 repo_path, repo_name, git_path, update_server_info, config)
232 repo_path, repo_name, git_path, update_server_info, config)
233
233
234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,705 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import urllib
25 import urllib
26 import traceback
26 import traceback
27
27
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.delta
30 import svn.delta
31 import svn.diff
31 import svn.diff
32 import svn.fs
32 import svn.fs
33 import svn.repos
33 import svn.repos
34
34
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = {
43 svn_compatible_versions = {
44 'pre-1.4-compatible',
44 'pre-1.4-compatible',
45 'pre-1.5-compatible',
45 'pre-1.5-compatible',
46 'pre-1.6-compatible',
46 'pre-1.6-compatible',
47 'pre-1.8-compatible',
47 'pre-1.8-compatible',
48 'pre-1.9-compatible'
48 'pre-1.9-compatible'
49 }
49 }
50
50
51 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
52 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
53 'pre-1.5-compatible': '1.4',
53 'pre-1.5-compatible': '1.4',
54 'pre-1.6-compatible': '1.5',
54 'pre-1.6-compatible': '1.5',
55 'pre-1.8-compatible': '1.7',
55 'pre-1.8-compatible': '1.7',
56 'pre-1.9-compatible': '1.8',
56 'pre-1.9-compatible': '1.8',
57 }
57 }
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException(e))
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74 repo_type = 'svn'
75
75
76 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
77 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
78 if create:
78 if create:
79 fs_config = {'compatible-version': '1.9'}
79 fs_config = {'compatible-version': '1.9'}
80 if compatible_version:
80 if compatible_version:
81 if compatible_version not in svn_compatible_versions:
81 if compatible_version not in svn_compatible_versions:
82 raise Exception('Unknown SVN compatible version "{}"'
82 raise Exception('Unknown SVN compatible version "{}"'
83 .format(compatible_version))
83 .format(compatible_version))
84 fs_config['compatible-version'] = \
84 fs_config['compatible-version'] = \
85 svn_compatible_versions_map[compatible_version]
85 svn_compatible_versions_map[compatible_version]
86
86
87 log.debug('Create SVN repo with config "%s"', fs_config)
87 log.debug('Create SVN repo with config "%s"', fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
89 else:
89 else:
90 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
91
91
92 log.debug('Got SVN object: %s', repo)
92 log.debug('Got SVN object: %s', repo)
93 return repo
93 return repo
94
94
95 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
96 """
96 """
97 Get a repository instance for the given path.
97 Get a repository instance for the given path.
98
98
99 Uses internally the low level beaker API since the decorators introduce
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
100 significant overhead.
101 """
101 """
102 region = self._cache_region
102 region = self._cache_region
103 context = wire.get('context', None)
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
107 cache_on = context and cache
108
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
111 return self._create_repo(wire, create, compatible_version)
111 return self._create_repo(wire, create, compatible_version)
112
112
113 return create_new_repo(self.repo_type, repo_path, context_uid,
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
114 compatible_version)
115
115
116
116
117 NODE_TYPE_MAPPING = {
117 NODE_TYPE_MAPPING = {
118 svn.core.svn_node_file: 'file',
118 svn.core.svn_node_file: 'file',
119 svn.core.svn_node_dir: 'dir',
119 svn.core.svn_node_dir: 'dir',
120 }
120 }
121
121
122
122
123 class SvnRemote(object):
123 class SvnRemote(object):
124
124
125 def __init__(self, factory, hg_factory=None):
125 def __init__(self, factory, hg_factory=None):
126 self._factory = factory
126 self._factory = factory
127 # TODO: Remove once we do not use internal Mercurial objects anymore
127 # TODO: Remove once we do not use internal Mercurial objects anymore
128 # for subversion
128 # for subversion
129 self._hg_factory = hg_factory
129 self._hg_factory = hg_factory
130
130
131 @reraise_safe_exceptions
131 @reraise_safe_exceptions
132 def discover_svn_version(self):
132 def discover_svn_version(self):
133 try:
133 try:
134 import svn.core
134 import svn.core
135 svn_ver = svn.core.SVN_VERSION
135 svn_ver = svn.core.SVN_VERSION
136 except ImportError:
136 except ImportError:
137 svn_ver = None
137 svn_ver = None
138 return svn_ver
138 return svn_ver
139
139
140 def check_url(self, url, config_items):
140 def check_url(self, url, config_items):
141 # this can throw exception if not installed, but we detect this
141 # this can throw exception if not installed, but we detect this
142 from hgsubversion import svnrepo
142 from hgsubversion import svnrepo
143
143
144 baseui = self._hg_factory._create_config(config_items)
144 baseui = self._hg_factory._create_config(config_items)
145 # uuid function get's only valid UUID from proper repo, else
145 # uuid function get's only valid UUID from proper repo, else
146 # throws exception
146 # throws exception
147 try:
147 try:
148 svnrepo.svnremoterepo(baseui, url).svn.uuid
148 svnrepo.svnremoterepo(baseui, url).svn.uuid
149 except Exception:
149 except Exception:
150 tb = traceback.format_exc()
150 tb = traceback.format_exc()
151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
152 raise URLError(
152 raise URLError(
153 '"%s" is not a valid Subversion source url.' % (url, ))
153 '"%s" is not a valid Subversion source url.' % (url, ))
154 return True
154 return True
155
155
156 def is_path_valid_repository(self, wire, path):
156 def is_path_valid_repository(self, wire, path):
157
157
158 # NOTE(marcink): short circuit the check for SVN repo
158 # NOTE(marcink): short circuit the check for SVN repo
159 # the repos.open might be expensive to check, but we have one cheap
159 # the repos.open might be expensive to check, but we have one cheap
160 # pre condition that we can use, to check for 'format' file
160 # pre condition that we can use, to check for 'format' file
161
161
162 if not os.path.isfile(os.path.join(path, 'format')):
162 if not os.path.isfile(os.path.join(path, 'format')):
163 return False
163 return False
164
164
165 try:
165 try:
166 svn.repos.open(path)
166 svn.repos.open(path)
167 except svn.core.SubversionException:
167 except svn.core.SubversionException:
168 tb = traceback.format_exc()
168 tb = traceback.format_exc()
169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
170 return False
170 return False
171 return True
171 return True
172
172
173 @reraise_safe_exceptions
173 @reraise_safe_exceptions
174 def verify(self, wire,):
174 def verify(self, wire,):
175 repo_path = wire['path']
175 repo_path = wire['path']
176 if not self.is_path_valid_repository(wire, repo_path):
176 if not self.is_path_valid_repository(wire, repo_path):
177 raise Exception(
177 raise Exception(
178 "Path %s is not a valid Subversion repository." % repo_path)
178 "Path %s is not a valid Subversion repository." % repo_path)
179
179
180 cmd = ['svnadmin', 'info', repo_path]
180 cmd = ['svnadmin', 'info', repo_path]
181 stdout, stderr = subprocessio.run_command(cmd)
181 stdout, stderr = subprocessio.run_command(cmd)
182 return stdout
182 return stdout
183
183
184 def lookup(self, wire, revision):
184 def lookup(self, wire, revision):
185 if revision not in [-1, None, 'HEAD']:
185 if revision not in [-1, None, 'HEAD']:
186 raise NotImplementedError
186 raise NotImplementedError
187 repo = self._factory.repo(wire)
187 repo = self._factory.repo(wire)
188 fs_ptr = svn.repos.fs(repo)
188 fs_ptr = svn.repos.fs(repo)
189 head = svn.fs.youngest_rev(fs_ptr)
189 head = svn.fs.youngest_rev(fs_ptr)
190 return head
190 return head
191
191
192 def lookup_interval(self, wire, start_ts, end_ts):
192 def lookup_interval(self, wire, start_ts, end_ts):
193 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
194 fsobj = svn.repos.fs(repo)
194 fsobj = svn.repos.fs(repo)
195 start_rev = None
195 start_rev = None
196 end_rev = None
196 end_rev = None
197 if start_ts:
197 if start_ts:
198 start_ts_svn = apr_time_t(start_ts)
198 start_ts_svn = apr_time_t(start_ts)
199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
200 else:
200 else:
201 start_rev = 1
201 start_rev = 1
202 if end_ts:
202 if end_ts:
203 end_ts_svn = apr_time_t(end_ts)
203 end_ts_svn = apr_time_t(end_ts)
204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
205 else:
205 else:
206 end_rev = svn.fs.youngest_rev(fsobj)
206 end_rev = svn.fs.youngest_rev(fsobj)
207 return start_rev, end_rev
207 return start_rev, end_rev
208
208
209 def revision_properties(self, wire, revision):
209 def revision_properties(self, wire, revision):
210 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
211 fs_ptr = svn.repos.fs(repo)
211 fs_ptr = svn.repos.fs(repo)
212 return svn.fs.revision_proplist(fs_ptr, revision)
212 return svn.fs.revision_proplist(fs_ptr, revision)
213
213
214 def revision_changes(self, wire, revision):
214 def revision_changes(self, wire, revision):
215
215
216 repo = self._factory.repo(wire)
216 repo = self._factory.repo(wire)
217 fsobj = svn.repos.fs(repo)
217 fsobj = svn.repos.fs(repo)
218 rev_root = svn.fs.revision_root(fsobj, revision)
218 rev_root = svn.fs.revision_root(fsobj, revision)
219
219
220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
222 base_dir = ""
222 base_dir = ""
223 send_deltas = False
223 send_deltas = False
224 svn.repos.replay2(
224 svn.repos.replay2(
225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
226 editor_ptr, editor_baton, None)
226 editor_ptr, editor_baton, None)
227
227
228 added = []
228 added = []
229 changed = []
229 changed = []
230 removed = []
230 removed = []
231
231
232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
233 for path, change in editor.changes.iteritems():
233 for path, change in editor.changes.iteritems():
234 # TODO: Decide what to do with directory nodes. Subversion can add
234 # TODO: Decide what to do with directory nodes. Subversion can add
235 # empty directories.
235 # empty directories.
236
236
237 if change.item_kind == svn.core.svn_node_dir:
237 if change.item_kind == svn.core.svn_node_dir:
238 continue
238 continue
239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
240 added.append(path)
240 added.append(path)
241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
242 svn.repos.CHANGE_ACTION_REPLACE]:
242 svn.repos.CHANGE_ACTION_REPLACE]:
243 changed.append(path)
243 changed.append(path)
244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
245 removed.append(path)
245 removed.append(path)
246 else:
246 else:
247 raise NotImplementedError(
247 raise NotImplementedError(
248 "Action %s not supported on path %s" % (
248 "Action %s not supported on path %s" % (
249 change.action, path))
249 change.action, path))
250
250
251 changes = {
251 changes = {
252 'added': added,
252 'added': added,
253 'changed': changed,
253 'changed': changed,
254 'removed': removed,
254 'removed': removed,
255 }
255 }
256 return changes
256 return changes
257
257
258 def node_history(self, wire, path, revision, limit):
258 def node_history(self, wire, path, revision, limit):
259 cross_copies = False
259 cross_copies = False
260 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
261 fsobj = svn.repos.fs(repo)
261 fsobj = svn.repos.fs(repo)
262 rev_root = svn.fs.revision_root(fsobj, revision)
262 rev_root = svn.fs.revision_root(fsobj, revision)
263
263
264 history_revisions = []
264 history_revisions = []
265 history = svn.fs.node_history(rev_root, path)
265 history = svn.fs.node_history(rev_root, path)
266 history = svn.fs.history_prev(history, cross_copies)
266 history = svn.fs.history_prev(history, cross_copies)
267 while history:
267 while history:
268 __, node_revision = svn.fs.history_location(history)
268 __, node_revision = svn.fs.history_location(history)
269 history_revisions.append(node_revision)
269 history_revisions.append(node_revision)
270 if limit and len(history_revisions) >= limit:
270 if limit and len(history_revisions) >= limit:
271 break
271 break
272 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
273 return history_revisions
273 return history_revisions
274
274
275 def node_properties(self, wire, path, revision):
275 def node_properties(self, wire, path, revision):
276 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
277 fsobj = svn.repos.fs(repo)
277 fsobj = svn.repos.fs(repo)
278 rev_root = svn.fs.revision_root(fsobj, revision)
278 rev_root = svn.fs.revision_root(fsobj, revision)
279 return svn.fs.node_proplist(rev_root, path)
279 return svn.fs.node_proplist(rev_root, path)
280
280
281 def file_annotate(self, wire, path, revision):
281 def file_annotate(self, wire, path, revision):
282 abs_path = 'file://' + urllib.pathname2url(
282 abs_path = 'file://' + urllib.pathname2url(
283 vcspath.join(wire['path'], path))
283 vcspath.join(wire['path'], path))
284 file_uri = svn.core.svn_path_canonicalize(abs_path)
284 file_uri = svn.core.svn_path_canonicalize(abs_path)
285
285
286 start_rev = svn_opt_revision_value_t(0)
286 start_rev = svn_opt_revision_value_t(0)
287 peg_rev = svn_opt_revision_value_t(revision)
287 peg_rev = svn_opt_revision_value_t(revision)
288 end_rev = peg_rev
288 end_rev = peg_rev
289
289
290 annotations = []
290 annotations = []
291
291
292 def receiver(line_no, revision, author, date, line, pool):
292 def receiver(line_no, revision, author, date, line, pool):
293 annotations.append((line_no, revision, line))
293 annotations.append((line_no, revision, line))
294
294
295 # TODO: Cannot use blame5, missing typemap function in the swig code
295 # TODO: Cannot use blame5, missing typemap function in the swig code
296 try:
296 try:
297 svn.client.blame2(
297 svn.client.blame2(
298 file_uri, peg_rev, start_rev, end_rev,
298 file_uri, peg_rev, start_rev, end_rev,
299 receiver, svn.client.create_context())
299 receiver, svn.client.create_context())
300 except svn.core.SubversionException as exc:
300 except svn.core.SubversionException as exc:
301 log.exception("Error during blame operation.")
301 log.exception("Error during blame operation.")
302 raise Exception(
302 raise Exception(
303 "Blame not supported or file does not exist at path %s. "
303 "Blame not supported or file does not exist at path %s. "
304 "Error %s." % (path, exc))
304 "Error %s." % (path, exc))
305
305
306 return annotations
306 return annotations
307
307
308 def get_node_type(self, wire, path, rev=None):
308 def get_node_type(self, wire, path, rev=None):
309 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
310 fs_ptr = svn.repos.fs(repo)
310 fs_ptr = svn.repos.fs(repo)
311 if rev is None:
311 if rev is None:
312 rev = svn.fs.youngest_rev(fs_ptr)
312 rev = svn.fs.youngest_rev(fs_ptr)
313 root = svn.fs.revision_root(fs_ptr, rev)
313 root = svn.fs.revision_root(fs_ptr, rev)
314 node = svn.fs.check_path(root, path)
314 node = svn.fs.check_path(root, path)
315 return NODE_TYPE_MAPPING.get(node, None)
315 return NODE_TYPE_MAPPING.get(node, None)
316
316
317 def get_nodes(self, wire, path, revision=None):
317 def get_nodes(self, wire, path, revision=None):
318 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
319 fsobj = svn.repos.fs(repo)
319 fsobj = svn.repos.fs(repo)
320 if revision is None:
320 if revision is None:
321 revision = svn.fs.youngest_rev(fsobj)
321 revision = svn.fs.youngest_rev(fsobj)
322 root = svn.fs.revision_root(fsobj, revision)
322 root = svn.fs.revision_root(fsobj, revision)
323 entries = svn.fs.dir_entries(root, path)
323 entries = svn.fs.dir_entries(root, path)
324 result = []
324 result = []
325 for entry_path, entry_info in entries.iteritems():
325 for entry_path, entry_info in entries.iteritems():
326 result.append(
326 result.append(
327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
328 return result
328 return result
329
329
330 def get_file_content(self, wire, path, rev=None):
330 def get_file_content(self, wire, path, rev=None):
331 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
332 fsobj = svn.repos.fs(repo)
332 fsobj = svn.repos.fs(repo)
333 if rev is None:
333 if rev is None:
334 rev = svn.fs.youngest_revision(fsobj)
334 rev = svn.fs.youngest_revision(fsobj)
335 root = svn.fs.revision_root(fsobj, rev)
335 root = svn.fs.revision_root(fsobj, rev)
336 content = svn.core.Stream(svn.fs.file_contents(root, path))
336 content = svn.core.Stream(svn.fs.file_contents(root, path))
337 return content.read()
337 return content.read()
338
338
339 def get_file_size(self, wire, path, revision=None):
339 def get_file_size(self, wire, path, revision=None):
340 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
341 fsobj = svn.repos.fs(repo)
341 fsobj = svn.repos.fs(repo)
342 if revision is None:
342 if revision is None:
343 revision = svn.fs.youngest_revision(fsobj)
343 revision = svn.fs.youngest_revision(fsobj)
344 root = svn.fs.revision_root(fsobj, revision)
344 root = svn.fs.revision_root(fsobj, revision)
345 size = svn.fs.file_length(root, path)
345 size = svn.fs.file_length(root, path)
346 return size
346 return size
347
347
348 def create_repository(self, wire, compatible_version=None):
348 def create_repository(self, wire, compatible_version=None):
349 log.info('Creating Subversion repository in path "%s"', wire['path'])
349 log.info('Creating Subversion repository in path "%s"', wire['path'])
350 self._factory.repo(wire, create=True,
350 self._factory.repo(wire, create=True,
351 compatible_version=compatible_version)
351 compatible_version=compatible_version)
352
352
353 def import_remote_repository(self, wire, src_url):
353 def import_remote_repository(self, wire, src_url):
354 repo_path = wire['path']
354 repo_path = wire['path']
355 if not self.is_path_valid_repository(wire, repo_path):
355 if not self.is_path_valid_repository(wire, repo_path):
356 raise Exception(
356 raise Exception(
357 "Path %s is not a valid Subversion repository." % repo_path)
357 "Path %s is not a valid Subversion repository." % repo_path)
358
358
359 # TODO: johbo: URL checks ?
359 # TODO: johbo: URL checks ?
360 import subprocess
360 import subprocess
361 rdump = subprocess.Popen(
361 rdump = subprocess.Popen(
362 ['svnrdump', 'dump', '--non-interactive', src_url],
362 ['svnrdump', 'dump', '--non-interactive', src_url],
363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
364 load = subprocess.Popen(
364 load = subprocess.Popen(
365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
366
366
367 # TODO: johbo: This can be a very long operation, might be better
367 # TODO: johbo: This can be a very long operation, might be better
368 # to track some kind of status and provide an api to check if the
368 # to track some kind of status and provide an api to check if the
369 # import is done.
369 # import is done.
370 rdump.wait()
370 rdump.wait()
371 load.wait()
371 load.wait()
372
372
373 if rdump.returncode != 0:
373 if rdump.returncode != 0:
374 errors = rdump.stderr.read()
374 errors = rdump.stderr.read()
375 log.error('svnrdump dump failed: statuscode %s: message: %s',
375 log.error('svnrdump dump failed: statuscode %s: message: %s',
376 rdump.returncode, errors)
376 rdump.returncode, errors)
377 reason = 'UNKNOWN'
377 reason = 'UNKNOWN'
378 if 'svnrdump: E230001:' in errors:
378 if 'svnrdump: E230001:' in errors:
379 reason = 'INVALID_CERTIFICATE'
379 reason = 'INVALID_CERTIFICATE'
380 raise Exception(
380 raise Exception(
381 'Failed to dump the remote repository from %s.' % src_url,
381 'Failed to dump the remote repository from %s.' % src_url,
382 reason)
382 reason)
383 if load.returncode != 0:
383 if load.returncode != 0:
384 raise Exception(
384 raise Exception(
385 'Failed to load the dump of remote repository from %s.' %
385 'Failed to load the dump of remote repository from %s.' %
386 (src_url, ))
386 (src_url, ))
387
387
388 def commit(self, wire, message, author, timestamp, updated, removed):
388 def commit(self, wire, message, author, timestamp, updated, removed):
389 assert isinstance(message, str)
389 assert isinstance(message, str)
390 assert isinstance(author, str)
390 assert isinstance(author, str)
391
391
392 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394
394
395 rev = svn.fs.youngest_rev(fsobj)
395 rev = svn.fs.youngest_rev(fsobj)
396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
397 txn_root = svn.fs.txn_root(txn)
397 txn_root = svn.fs.txn_root(txn)
398
398
399 for node in updated:
399 for node in updated:
400 TxnNodeProcessor(node, txn_root).update()
400 TxnNodeProcessor(node, txn_root).update()
401 for node in removed:
401 for node in removed:
402 TxnNodeProcessor(node, txn_root).remove()
402 TxnNodeProcessor(node, txn_root).remove()
403
403
404 commit_id = svn.repos.fs_commit_txn(repo, txn)
404 commit_id = svn.repos.fs_commit_txn(repo, txn)
405
405
406 if timestamp:
406 if timestamp:
407 apr_time = apr_time_t(timestamp)
407 apr_time = apr_time_t(timestamp)
408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
410
410
411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
412 return commit_id
412 return commit_id
413
413
414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
415 ignore_whitespace=False, context=3):
415 ignore_whitespace=False, context=3):
416
416
417 wire.update(cache=False)
417 wire.update(cache=False)
418 repo = self._factory.repo(wire)
418 repo = self._factory.repo(wire)
419 diff_creator = SvnDiffer(
419 diff_creator = SvnDiffer(
420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
421 try:
421 try:
422 return diff_creator.generate_diff()
422 return diff_creator.generate_diff()
423 except svn.core.SubversionException as e:
423 except svn.core.SubversionException as e:
424 log.exception(
424 log.exception(
425 "Error during diff operation operation. "
425 "Error during diff operation operation. "
426 "Path might not exist %s, %s" % (path1, path2))
426 "Path might not exist %s, %s" % (path1, path2))
427 return ""
427 return ""
428
428
429 @reraise_safe_exceptions
429 @reraise_safe_exceptions
430 def is_large_file(self, wire, path):
430 def is_large_file(self, wire, path):
431 return False
431 return False
432
432
433 @reraise_safe_exceptions
433 @reraise_safe_exceptions
434 def install_hooks(self, wire, force=False):
434 def install_hooks(self, wire, force=False):
435 from vcsserver.hook_utils import install_svn_hooks
435 from vcsserver.hook_utils import install_svn_hooks
436 repo_path = wire['path']
436 repo_path = wire['path']
437 binary_dir = settings.BINARY_DIR
437 binary_dir = settings.BINARY_DIR
438 executable = None
438 executable = None
439 if binary_dir:
439 if binary_dir:
440 executable = os.path.join(binary_dir, 'python')
440 executable = os.path.join(binary_dir, 'python')
441 return install_svn_hooks(
441 return install_svn_hooks(
442 repo_path, executable=executable, force_create=force)
442 repo_path, executable=executable, force_create=force)
443
443
444
444
445 class SvnDiffer(object):
445 class SvnDiffer(object):
446 """
446 """
447 Utility to create diffs based on difflib and the Subversion api
447 Utility to create diffs based on difflib and the Subversion api
448 """
448 """
449
449
450 binary_content = False
450 binary_content = False
451
451
452 def __init__(
452 def __init__(
453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
454 ignore_whitespace, context):
454 ignore_whitespace, context):
455 self.repo = repo
455 self.repo = repo
456 self.ignore_whitespace = ignore_whitespace
456 self.ignore_whitespace = ignore_whitespace
457 self.context = context
457 self.context = context
458
458
459 fsobj = svn.repos.fs(repo)
459 fsobj = svn.repos.fs(repo)
460
460
461 self.tgt_rev = tgt_rev
461 self.tgt_rev = tgt_rev
462 self.tgt_path = tgt_path or ''
462 self.tgt_path = tgt_path or ''
463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
465
465
466 self.src_rev = src_rev
466 self.src_rev = src_rev
467 self.src_path = src_path or self.tgt_path
467 self.src_path = src_path or self.tgt_path
468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
470
470
471 self._validate()
471 self._validate()
472
472
473 def _validate(self):
473 def _validate(self):
474 if (self.tgt_kind != svn.core.svn_node_none and
474 if (self.tgt_kind != svn.core.svn_node_none and
475 self.src_kind != svn.core.svn_node_none and
475 self.src_kind != svn.core.svn_node_none and
476 self.src_kind != self.tgt_kind):
476 self.src_kind != self.tgt_kind):
477 # TODO: johbo: proper error handling
477 # TODO: johbo: proper error handling
478 raise Exception(
478 raise Exception(
479 "Source and target are not compatible for diff generation. "
479 "Source and target are not compatible for diff generation. "
480 "Source type: %s, target type: %s" %
480 "Source type: %s, target type: %s" %
481 (self.src_kind, self.tgt_kind))
481 (self.src_kind, self.tgt_kind))
482
482
483 def generate_diff(self):
483 def generate_diff(self):
484 buf = StringIO.StringIO()
484 buf = StringIO.StringIO()
485 if self.tgt_kind == svn.core.svn_node_dir:
485 if self.tgt_kind == svn.core.svn_node_dir:
486 self._generate_dir_diff(buf)
486 self._generate_dir_diff(buf)
487 else:
487 else:
488 self._generate_file_diff(buf)
488 self._generate_file_diff(buf)
489 return buf.getvalue()
489 return buf.getvalue()
490
490
491 def _generate_dir_diff(self, buf):
491 def _generate_dir_diff(self, buf):
492 editor = DiffChangeEditor()
492 editor = DiffChangeEditor()
493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
494 svn.repos.dir_delta2(
494 svn.repos.dir_delta2(
495 self.src_root,
495 self.src_root,
496 self.src_path,
496 self.src_path,
497 '', # src_entry
497 '', # src_entry
498 self.tgt_root,
498 self.tgt_root,
499 self.tgt_path,
499 self.tgt_path,
500 editor_ptr, editor_baton,
500 editor_ptr, editor_baton,
501 authorization_callback_allow_all,
501 authorization_callback_allow_all,
502 False, # text_deltas
502 False, # text_deltas
503 svn.core.svn_depth_infinity, # depth
503 svn.core.svn_depth_infinity, # depth
504 False, # entry_props
504 False, # entry_props
505 False, # ignore_ancestry
505 False, # ignore_ancestry
506 )
506 )
507
507
508 for path, __, change in sorted(editor.changes):
508 for path, __, change in sorted(editor.changes):
509 self._generate_node_diff(
509 self._generate_node_diff(
510 buf, change, path, self.tgt_path, path, self.src_path)
510 buf, change, path, self.tgt_path, path, self.src_path)
511
511
512 def _generate_file_diff(self, buf):
512 def _generate_file_diff(self, buf):
513 change = None
513 change = None
514 if self.src_kind == svn.core.svn_node_none:
514 if self.src_kind == svn.core.svn_node_none:
515 change = "add"
515 change = "add"
516 elif self.tgt_kind == svn.core.svn_node_none:
516 elif self.tgt_kind == svn.core.svn_node_none:
517 change = "delete"
517 change = "delete"
518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
519 src_base, src_path = vcspath.split(self.src_path)
519 src_base, src_path = vcspath.split(self.src_path)
520 self._generate_node_diff(
520 self._generate_node_diff(
521 buf, change, tgt_path, tgt_base, src_path, src_base)
521 buf, change, tgt_path, tgt_base, src_path, src_base)
522
522
523 def _generate_node_diff(
523 def _generate_node_diff(
524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
525
525
526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
527 # makes consistent behaviour with git/hg to return empty diff if
527 # makes consistent behaviour with git/hg to return empty diff if
528 # we compare same revisions
528 # we compare same revisions
529 return
529 return
530
530
531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
532 src_full_path = vcspath.join(src_base, src_path)
532 src_full_path = vcspath.join(src_base, src_path)
533
533
534 self.binary_content = False
534 self.binary_content = False
535 mime_type = self._get_mime_type(tgt_full_path)
535 mime_type = self._get_mime_type(tgt_full_path)
536
536
537 if mime_type and not mime_type.startswith('text'):
537 if mime_type and not mime_type.startswith('text'):
538 self.binary_content = True
538 self.binary_content = True
539 buf.write("=" * 67 + '\n')
539 buf.write("=" * 67 + '\n')
540 buf.write("Cannot display: file marked as a binary type.\n")
540 buf.write("Cannot display: file marked as a binary type.\n")
541 buf.write("svn:mime-type = %s\n" % mime_type)
541 buf.write("svn:mime-type = %s\n" % mime_type)
542 buf.write("Index: %s\n" % (tgt_path, ))
542 buf.write("Index: %s\n" % (tgt_path, ))
543 buf.write("=" * 67 + '\n')
543 buf.write("=" * 67 + '\n')
544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
545 'tgt_path': tgt_path})
545 'tgt_path': tgt_path})
546
546
547 if change == 'add':
547 if change == 'add':
548 # TODO: johbo: SVN is missing a zero here compared to git
548 # TODO: johbo: SVN is missing a zero here compared to git
549 buf.write("new file mode 10644\n")
549 buf.write("new file mode 10644\n")
550
550
551 #TODO(marcink): intro to binary detection of svn patches
551 #TODO(marcink): intro to binary detection of svn patches
552 # if self.binary_content:
552 # if self.binary_content:
553 # buf.write('GIT binary patch\n')
553 # buf.write('GIT binary patch\n')
554
554
555 buf.write("--- /dev/null\t(revision 0)\n")
555 buf.write("--- /dev/null\t(revision 0)\n")
556 src_lines = []
556 src_lines = []
557 else:
557 else:
558 if change == 'delete':
558 if change == 'delete':
559 buf.write("deleted file mode 10644\n")
559 buf.write("deleted file mode 10644\n")
560
560
561 #TODO(marcink): intro to binary detection of svn patches
561 #TODO(marcink): intro to binary detection of svn patches
562 # if self.binary_content:
562 # if self.binary_content:
563 # buf.write('GIT binary patch\n')
563 # buf.write('GIT binary patch\n')
564
564
565 buf.write("--- a/%s\t(revision %s)\n" % (
565 buf.write("--- a/%s\t(revision %s)\n" % (
566 src_path, self.src_rev))
566 src_path, self.src_rev))
567 src_lines = self._svn_readlines(self.src_root, src_full_path)
567 src_lines = self._svn_readlines(self.src_root, src_full_path)
568
568
569 if change == 'delete':
569 if change == 'delete':
570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
571 tgt_lines = []
571 tgt_lines = []
572 else:
572 else:
573 buf.write("+++ b/%s\t(revision %s)\n" % (
573 buf.write("+++ b/%s\t(revision %s)\n" % (
574 tgt_path, self.tgt_rev))
574 tgt_path, self.tgt_rev))
575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
576
576
577 if not self.binary_content:
577 if not self.binary_content:
578 udiff = svn_diff.unified_diff(
578 udiff = svn_diff.unified_diff(
579 src_lines, tgt_lines, context=self.context,
579 src_lines, tgt_lines, context=self.context,
580 ignore_blank_lines=self.ignore_whitespace,
580 ignore_blank_lines=self.ignore_whitespace,
581 ignore_case=False,
581 ignore_case=False,
582 ignore_space_changes=self.ignore_whitespace)
582 ignore_space_changes=self.ignore_whitespace)
583 buf.writelines(udiff)
583 buf.writelines(udiff)
584
584
585 def _get_mime_type(self, path):
585 def _get_mime_type(self, path):
586 try:
586 try:
587 mime_type = svn.fs.node_prop(
587 mime_type = svn.fs.node_prop(
588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
589 except svn.core.SubversionException:
589 except svn.core.SubversionException:
590 mime_type = svn.fs.node_prop(
590 mime_type = svn.fs.node_prop(
591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
592 return mime_type
592 return mime_type
593
593
594 def _svn_readlines(self, fs_root, node_path):
594 def _svn_readlines(self, fs_root, node_path):
595 if self.binary_content:
595 if self.binary_content:
596 return []
596 return []
597 node_kind = svn.fs.check_path(fs_root, node_path)
597 node_kind = svn.fs.check_path(fs_root, node_path)
598 if node_kind not in (
598 if node_kind not in (
599 svn.core.svn_node_file, svn.core.svn_node_symlink):
599 svn.core.svn_node_file, svn.core.svn_node_symlink):
600 return []
600 return []
601 content = svn.core.Stream(
601 content = svn.core.Stream(
602 svn.fs.file_contents(fs_root, node_path)).read()
602 svn.fs.file_contents(fs_root, node_path)).read()
603 return content.splitlines(True)
603 return content.splitlines(True)
604
604
605
605
606
606
607 class DiffChangeEditor(svn.delta.Editor):
607 class DiffChangeEditor(svn.delta.Editor):
608 """
608 """
609 Records changes between two given revisions
609 Records changes between two given revisions
610 """
610 """
611
611
612 def __init__(self):
612 def __init__(self):
613 self.changes = []
613 self.changes = []
614
614
615 def delete_entry(self, path, revision, parent_baton, pool=None):
615 def delete_entry(self, path, revision, parent_baton, pool=None):
616 self.changes.append((path, None, 'delete'))
616 self.changes.append((path, None, 'delete'))
617
617
618 def add_file(
618 def add_file(
619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
620 file_pool=None):
620 file_pool=None):
621 self.changes.append((path, 'file', 'add'))
621 self.changes.append((path, 'file', 'add'))
622
622
623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
624 self.changes.append((path, 'file', 'change'))
624 self.changes.append((path, 'file', 'change'))
625
625
626
626
627 def authorization_callback_allow_all(root, path, pool):
627 def authorization_callback_allow_all(root, path, pool):
628 return True
628 return True
629
629
630
630
631 class TxnNodeProcessor(object):
631 class TxnNodeProcessor(object):
632 """
632 """
633 Utility to process the change of one node within a transaction root.
633 Utility to process the change of one node within a transaction root.
634
634
635 It encapsulates the knowledge of how to add, update or remove
635 It encapsulates the knowledge of how to add, update or remove
636 a node for a given transaction root. The purpose is to support the method
636 a node for a given transaction root. The purpose is to support the method
637 `SvnRemote.commit`.
637 `SvnRemote.commit`.
638 """
638 """
639
639
640 def __init__(self, node, txn_root):
640 def __init__(self, node, txn_root):
641 assert isinstance(node['path'], str)
641 assert isinstance(node['path'], str)
642
642
643 self.node = node
643 self.node = node
644 self.txn_root = txn_root
644 self.txn_root = txn_root
645
645
646 def update(self):
646 def update(self):
647 self._ensure_parent_dirs()
647 self._ensure_parent_dirs()
648 self._add_file_if_node_does_not_exist()
648 self._add_file_if_node_does_not_exist()
649 self._update_file_content()
649 self._update_file_content()
650 self._update_file_properties()
650 self._update_file_properties()
651
651
652 def remove(self):
652 def remove(self):
653 svn.fs.delete(self.txn_root, self.node['path'])
653 svn.fs.delete(self.txn_root, self.node['path'])
654 # TODO: Clean up directory if empty
654 # TODO: Clean up directory if empty
655
655
656 def _ensure_parent_dirs(self):
656 def _ensure_parent_dirs(self):
657 curdir = vcspath.dirname(self.node['path'])
657 curdir = vcspath.dirname(self.node['path'])
658 dirs_to_create = []
658 dirs_to_create = []
659 while not self._svn_path_exists(curdir):
659 while not self._svn_path_exists(curdir):
660 dirs_to_create.append(curdir)
660 dirs_to_create.append(curdir)
661 curdir = vcspath.dirname(curdir)
661 curdir = vcspath.dirname(curdir)
662
662
663 for curdir in reversed(dirs_to_create):
663 for curdir in reversed(dirs_to_create):
664 log.debug('Creating missing directory "%s"', curdir)
664 log.debug('Creating missing directory "%s"', curdir)
665 svn.fs.make_dir(self.txn_root, curdir)
665 svn.fs.make_dir(self.txn_root, curdir)
666
666
667 def _svn_path_exists(self, path):
667 def _svn_path_exists(self, path):
668 path_status = svn.fs.check_path(self.txn_root, path)
668 path_status = svn.fs.check_path(self.txn_root, path)
669 return path_status != svn.core.svn_node_none
669 return path_status != svn.core.svn_node_none
670
670
671 def _add_file_if_node_does_not_exist(self):
671 def _add_file_if_node_does_not_exist(self):
672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
673 if kind == svn.core.svn_node_none:
673 if kind == svn.core.svn_node_none:
674 svn.fs.make_file(self.txn_root, self.node['path'])
674 svn.fs.make_file(self.txn_root, self.node['path'])
675
675
676 def _update_file_content(self):
676 def _update_file_content(self):
677 assert isinstance(self.node['content'], str)
677 assert isinstance(self.node['content'], str)
678 handler, baton = svn.fs.apply_textdelta(
678 handler, baton = svn.fs.apply_textdelta(
679 self.txn_root, self.node['path'], None, None)
679 self.txn_root, self.node['path'], None, None)
680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
681
681
682 def _update_file_properties(self):
682 def _update_file_properties(self):
683 properties = self.node.get('properties', {})
683 properties = self.node.get('properties', {})
684 for key, value in properties.iteritems():
684 for key, value in properties.iteritems():
685 svn.fs.change_node_prop(
685 svn.fs.change_node_prop(
686 self.txn_root, self.node['path'], key, value)
686 self.txn_root, self.node['path'], key, value)
687
687
688
688
689 def apr_time_t(timestamp):
689 def apr_time_t(timestamp):
690 """
690 """
691 Convert a Python timestamp into APR timestamp type apr_time_t
691 Convert a Python timestamp into APR timestamp type apr_time_t
692 """
692 """
693 return timestamp * 1E6
693 return timestamp * 1E6
694
694
695
695
696 def svn_opt_revision_value_t(num):
696 def svn_opt_revision_value_t(num):
697 """
697 """
698 Put `num` into a `svn_opt_revision_value_t` structure.
698 Put `num` into a `svn_opt_revision_value_t` structure.
699 """
699 """
700 value = svn.core.svn_opt_revision_value_t()
700 value = svn.core.svn_opt_revision_value_t()
701 value.number = num
701 value.number = num
702 revision = svn.core.svn_opt_revision_t()
702 revision = svn.core.svn_opt_revision_t()
703 revision.kind = svn.core.svn_opt_revision_number
703 revision.kind = svn.core.svn_opt_revision_number
704 revision.value = value
704 revision.value = value
705 return revision
705 return revision
@@ -1,127 +1,127 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestHGLookup(object):
29 class TestHGLookup(object):
30 def setup(self):
30 def setup(self):
31 self.mock_repo = MagicMock()
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
36 self.remote_hg = hg.HgRemote(factory)
37
37
38 def test_fail_lookup_hg(self):
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
41 wire=None, revision='revision_or_commit_id', both=True)
42
42
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
44 assert 'revision_or_commit_id' in exc_info.value.args
45
45
46
46
47 class TestDiff(object):
47 class TestDiff(object):
48 def test_raising_safe_exception_when_lookup_failed(self):
48 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
49 repo = Mock()
50 factory = Mock()
50 factory = Mock()
51 factory.repo = Mock(return_value=repo)
51 factory.repo = Mock(return_value=repo)
52 hg_remote = hg.HgRemote(factory)
52 hg_remote = hg.HgRemote(factory)
53 with patch('mercurial.patch.diff') as diff_mock:
53 with patch('mercurial.patch.diff') as diff_mock:
54 diff_mock.side_effect = LookupError(
54 diff_mock.side_effect = LookupError(
55 'deadbeef', 'index', 'message')
55 'deadbeef', 'index', 'message')
56 with pytest.raises(Exception) as exc_info:
56 with pytest.raises(Exception) as exc_info:
57 hg_remote.diff(
57 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 file_filter=None, opt_git=True, opt_ignorews=True,
59 file_filter=None, opt_git=True, opt_ignorews=True,
60 context=3)
60 context=3)
61 assert type(exc_info.value) == Exception
61 assert type(exc_info.value) == Exception
62 assert exc_info.value._vcs_kind == 'lookup'
62 assert exc_info.value._vcs_kind == 'lookup'
63
63
64
64
65 class TestReraiseSafeExceptions(object):
65 class TestReraiseSafeExceptions(object):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 factory = Mock()
67 factory = Mock()
68 hg_remote = hg.HgRemote(factory)
68 hg_remote = hg.HgRemote(factory)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 decorator = hg.reraise_safe_exceptions(None)
70 decorator = hg.reraise_safe_exceptions(None)
71 for method_name, method in methods:
71 for method_name, method in methods:
72 if not method_name.startswith('_'):
72 if not method_name.startswith('_'):
73 assert method.im_func.__code__ == decorator.__code__
73 assert method.im_func.__code__ == decorator.__code__
74
74
75 @pytest.mark.parametrize('side_effect, expected_type', [
75 @pytest.mark.parametrize('side_effect, expected_type', [
76 (hgcompat.Abort(), 'abort'),
76 (hgcompat.Abort(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
78 (hgcompat.RepoLookupError(), 'lookup'),
78 (hgcompat.RepoLookupError(), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 (hgcompat.RepoError(), 'error'),
80 (hgcompat.RepoError(), 'error'),
81 (hgcompat.RequirementError(), 'requirement'),
81 (hgcompat.RequirementError(), 'requirement'),
82 ])
82 ])
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 @hg.reraise_safe_exceptions
84 @hg.reraise_safe_exceptions
85 def fake_method():
85 def fake_method():
86 raise side_effect
86 raise side_effect
87
87
88 with pytest.raises(Exception) as exc_info:
88 with pytest.raises(Exception) as exc_info:
89 fake_method()
89 fake_method()
90 assert type(exc_info.value) == Exception
90 assert type(exc_info.value) == Exception
91 assert exc_info.value._vcs_kind == expected_type
91 assert exc_info.value._vcs_kind == expected_type
92
92
93 def test_keeps_original_traceback(self):
93 def test_keeps_original_traceback(self):
94 @hg.reraise_safe_exceptions
94 @hg.reraise_safe_exceptions
95 def fake_method():
95 def fake_method():
96 try:
96 try:
97 raise hgcompat.Abort()
97 raise hgcompat.Abort()
98 except:
98 except:
99 self.original_traceback = traceback.format_tb(
99 self.original_traceback = traceback.format_tb(
100 sys.exc_info()[2])
100 sys.exc_info()[2])
101 raise
101 raise
102
102
103 try:
103 try:
104 fake_method()
104 fake_method()
105 except Exception:
105 except Exception:
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107
107
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 assert new_traceback_tail == self.original_traceback
109 assert new_traceback_tail == self.original_traceback
110
110
111 def test_maps_unknow_exceptions_to_unhandled(self):
111 def test_maps_unknow_exceptions_to_unhandled(self):
112 @hg.reraise_safe_exceptions
112 @hg.reraise_safe_exceptions
113 def stub_method():
113 def stub_method():
114 raise ValueError('stub')
114 raise ValueError('stub')
115
115
116 with pytest.raises(Exception) as exc_info:
116 with pytest.raises(Exception) as exc_info:
117 stub_method()
117 stub_method()
118 assert exc_info.value._vcs_kind == 'unhandled'
118 assert exc_info.value._vcs_kind == 'unhandled'
119
119
120 def test_does_not_map_known_exceptions(self):
120 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
121 @hg.reraise_safe_exceptions
122 def stub_method():
122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException()('stub')
124
124
125 with pytest.raises(Exception) as exc_info:
125 with pytest.raises(Exception) as exc_info:
126 stub_method()
126 stub_method()
127 assert exc_info.value._vcs_kind == 'lookup'
127 assert exc_info.value._vcs_kind == 'lookup'
General Comments 0
You need to be logged in to leave comments. Login now