##// END OF EJS Templates
git: switched most git operations to libgit2
marcink -
r725:d64a71b9 default
parent child Browse files
Show More
@@ -1,94 +1,76 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class RepoFactory(object):
27 class RepoFactory(object):
28 """
28 """
29 Utility to create instances of repository
29 Utility to create instances of repository
30
30
31 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
32 the :term:`call context`.
32 the :term:`call context`.
33 """
33 """
34 repo_type = None
34 repo_type = None
35
35
36 def __init__(self):
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38
38
39 def _create_config(self, path, config):
39 def _create_config(self, path, config):
40 config = {}
40 config = {}
41 return config
41 return config
42
42
43 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
44 raise NotImplementedError()
44 raise NotImplementedError()
45
45
46 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
47 """
47 raise NotImplementedError()
48 Get a repository instance for the given path.
49
50 Uses internally the low level beaker API since the decorators introduce
51 significant overhead.
52 """
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
62 return self._create_repo(wire, create)
63
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
66
48
67
49
68 def obfuscate_qs(query_string):
50 def obfuscate_qs(query_string):
69 if query_string is None:
51 if query_string is None:
70 return None
52 return None
71
53
72 parsed = []
54 parsed = []
73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
74 if k in ['auth_token', 'api_key']:
56 if k in ['auth_token', 'api_key']:
75 v = "*****"
57 v = "*****"
76 parsed.append((k, v))
58 parsed.append((k, v))
77
59
78 return '&'.join('{}{}'.format(
60 return '&'.join('{}{}'.format(
79 k, '={}'.format(v) if v else '') for k, v in parsed)
61 k, '={}'.format(v) if v else '') for k, v in parsed)
80
62
81
63
82 def raise_from_original(new_type):
64 def raise_from_original(new_type):
83 """
65 """
84 Raise a new exception type with original args and traceback.
66 Raise a new exception type with original args and traceback.
85 """
67 """
86 exc_type, exc_value, exc_traceback = sys.exc_info()
68 exc_type, exc_value, exc_traceback = sys.exc_info()
87 new_exc = new_type(*exc_value.args)
69 new_exc = new_type(*exc_value.args)
88 # store the original traceback into the new exc
70 # store the original traceback into the new exc
89 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
90
72
91 try:
73 try:
92 raise new_exc, None, exc_traceback
74 raise new_exc, None, exc_traceback
93 finally:
75 finally:
94 del exc_traceback
76 del exc_traceback
@@ -1,752 +1,845 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import collections
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 import more_itertools
28 import more_itertools
29 import pygit2
30 from pygit2 import Repository as LibGit2Repo
29 from dulwich import index, objects
31 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
32 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
33 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
34 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
35 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
36 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
37 from dulwich.repo import Repo as DulwichRepo
36 from dulwich.server import update_server_info
38 from dulwich.server import update_server_info
37
39
38 from vcsserver import exceptions, settings, subprocessio
40 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
42 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 from vcsserver.hgcompat import (
43 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
44 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
45 from vcsserver.git_lfs.lib import LFSOidStore
44
46
45 DIR_STAT = stat.S_IFDIR
47 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
48 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
49 GIT_LINK = objects.S_IFGITLINK
48
50
49 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
50
52
51
53
52 def reraise_safe_exceptions(func):
54 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
55 """Converts Dulwich exceptions to something neutral."""
56
54 @wraps(func)
57 @wraps(func)
55 def wrapper(*args, **kwargs):
58 def wrapper(*args, **kwargs):
56 try:
59 try:
57 return func(*args, **kwargs)
60 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
61 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
59 ObjectMissing) as e:
62 exc = exceptions.LookupException(org_exc=e)
60 exc = exceptions.LookupException(e)
63 raise exc(safe_str(e))
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
64 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
65 exc = exceptions.VcsException(org_exc=e)
64 raise exc(e)
66 raise exc(safe_str(e))
65 except Exception as e:
67 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
68 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
69 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
70 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
71 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
72 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
73 #raise_from_original(exceptions.UnhandledException)
72 raise
74 raise
73 return wrapper
75 return wrapper
74
76
75
77
76 class Repo(DulwichRepo):
78 class Repo(DulwichRepo):
77 """
79 """
78 A wrapper for dulwich Repo class.
80 A wrapper for dulwich Repo class.
79
81
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
82 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
83 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
84 once the repo object is destroyed.
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
86 """
85 """
87 def __del__(self):
86 def __del__(self):
88 if hasattr(self, 'object_store'):
87 if hasattr(self, 'object_store'):
89 self.close()
88 self.close()
90
89
91
90
91 class Repository(LibGit2Repo):
92
93 def __enter__(self):
94 return self
95
96 def __exit__(self, exc_type, exc_val, exc_tb):
97 self.free()
98
99
92 class GitFactory(RepoFactory):
100 class GitFactory(RepoFactory):
93 repo_type = 'git'
101 repo_type = 'git'
94
102
95 def _create_repo(self, wire, create):
103 def _create_repo(self, wire, create, use_libgit2=False):
96 repo_path = str_to_dulwich(wire['path'])
104 if use_libgit2:
97 return Repo(repo_path)
105 return Repository(wire['path'])
106 else:
107 repo_path = str_to_dulwich(wire['path'])
108 return Repo(repo_path)
109
110 def repo(self, wire, create=False, use_libgit2=False):
111 """
112 Get a repository instance for the given path.
113 """
114 region = self._cache_region
115 context = wire.get('context', None)
116 repo_path = wire.get('path', '')
117 context_uid = '{}'.format(context)
118 cache = wire.get('cache', True)
119 cache_on = context and cache
120
121 @region.conditional_cache_on_arguments(condition=cache_on)
122 def create_new_repo(_repo_type, _repo_path, _context_uid, _use_libgit2):
123 return self._create_repo(wire, create, use_libgit2)
124
125 repo = create_new_repo(self.repo_type, repo_path, context_uid, use_libgit2)
126 return repo
127
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
98
130
99
131
100 class GitRemote(object):
132 class GitRemote(object):
101
133
102 def __init__(self, factory):
134 def __init__(self, factory):
103 self._factory = factory
135 self._factory = factory
104 self.peeled_ref_marker = '^{}'
136 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
137 self._bulk_methods = {
106 "author": self.commit_attribute,
138 "date": self.date,
107 "date": self.get_object_attrs,
139 "author": self.author,
108 "message": self.commit_attribute,
140 "message": self.message,
109 "parents": self.commit_attribute,
141 "parents": self.parents,
110 "_commit": self.revision,
142 "_commit": self.revision,
111 }
143 }
112
144
113 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
114 if 'config' in wire:
146 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
148 return {}
117
149
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
121
122 def _remote_conf(self, config):
150 def _remote_conf(self, config):
123 params = [
151 params = [
124 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
125 ]
153 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
155 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
157 return params
130
158
131 @reraise_safe_exceptions
159 @reraise_safe_exceptions
132 def is_empty(self, wire):
160 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
161 repo = self._factory.repo_libgit2(wire)
134 try:
162
135 return not repo.head()
163 # NOTE(marcink): old solution as an alternative
136 except Exception:
164 # try:
137 log.exception("failed to read object_store")
165 # return not repo.head.name
138 return True
166 # except Exception:
167 # return True
168
169 return repo.is_empty
139
170
140 @reraise_safe_exceptions
171 @reraise_safe_exceptions
141 def add_object(self, wire, content):
172 def add_object(self, wire, content):
142 repo = self._factory.repo(wire)
173 repo = self._factory.repo(wire)
143 blob = objects.Blob()
174 blob = objects.Blob()
144 blob.set_raw_string(content)
175 blob.set_raw_string(content)
145 repo.object_store.add_object(blob)
176 repo.object_store.add_object(blob)
146 return blob.id
177 return blob.id
147
178
148 @reraise_safe_exceptions
179 @reraise_safe_exceptions
149 def assert_correct_path(self, wire):
180 def assert_correct_path(self, wire):
150 path = wire.get('path')
151 try:
181 try:
152 self._factory.repo(wire)
182 self._factory.repo_libgit2(wire)
153 except NotGitRepository as e:
183 except pygit2.GitError:
184 path = wire.get('path')
154 tb = traceback.format_exc()
185 tb = traceback.format_exc()
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
186 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
156 return False
187 return False
157
188
158 return True
189 return True
159
190
160 @reraise_safe_exceptions
191 @reraise_safe_exceptions
161 def bare(self, wire):
192 def bare(self, wire):
162 repo = self._factory.repo(wire)
193 repo = self._factory.repo_libgit2(wire)
163 return repo.bare
194 return repo.is_bare
164
195
165 @reraise_safe_exceptions
196 @reraise_safe_exceptions
166 def blob_as_pretty_string(self, wire, sha):
197 def blob_as_pretty_string(self, wire, sha):
167 repo = self._factory.repo(wire)
198 repo_init = self._factory.repo_libgit2(wire)
168 return repo[sha].as_pretty_string()
199 with repo_init as repo:
200 blob_obj = repo[sha]
201 blob = blob_obj.data
202 return blob
169
203
170 @reraise_safe_exceptions
204 @reraise_safe_exceptions
171 def blob_raw_length(self, wire, sha):
205 def blob_raw_length(self, wire, sha):
172 repo = self._factory.repo(wire)
206 repo_init = self._factory.repo_libgit2(wire)
173 blob = repo[sha]
207 with repo_init as repo:
174 return blob.raw_length()
208 blob = repo[sha]
209 return blob.size
175
210
176 def _parse_lfs_pointer(self, raw_content):
211 def _parse_lfs_pointer(self, raw_content):
177
212
178 spec_string = 'version https://git-lfs.github.com/spec'
213 spec_string = 'version https://git-lfs.github.com/spec'
179 if raw_content and raw_content.startswith(spec_string):
214 if raw_content and raw_content.startswith(spec_string):
180 pattern = re.compile(r"""
215 pattern = re.compile(r"""
181 (?:\n)?
216 (?:\n)?
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
217 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
218 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
219 ^size[ ](?P<oid_size>[0-9]+)\n
185 (?:\n)?
220 (?:\n)?
186 """, re.VERBOSE | re.MULTILINE)
221 """, re.VERBOSE | re.MULTILINE)
187 match = pattern.match(raw_content)
222 match = pattern.match(raw_content)
188 if match:
223 if match:
189 return match.groupdict()
224 return match.groupdict()
190
225
191 return {}
226 return {}
192
227
193 @reraise_safe_exceptions
228 @reraise_safe_exceptions
194 def is_large_file(self, wire, sha):
229 def is_large_file(self, wire, sha):
195 repo = self._factory.repo(wire)
230 repo = self._factory.repo(wire)
196 blob = repo[sha]
231 blob = repo[sha]
197 return self._parse_lfs_pointer(blob.as_raw_string())
232 return self._parse_lfs_pointer(blob.as_raw_string())
198
233
199 @reraise_safe_exceptions
234 @reraise_safe_exceptions
200 def in_largefiles_store(self, wire, oid):
235 def in_largefiles_store(self, wire, oid):
201 repo = self._factory.repo(wire)
236 repo = self._factory.repo(wire)
202 conf = self._wire_to_config(wire)
237 conf = self._wire_to_config(wire)
203
238
204 store_location = conf.get('vcs_git_lfs_store_location')
239 store_location = conf.get('vcs_git_lfs_store_location')
205 if store_location:
240 if store_location:
206 repo_name = repo.path
241 repo_name = repo.path
207 store = LFSOidStore(
242 store = LFSOidStore(
208 oid=oid, repo=repo_name, store_location=store_location)
243 oid=oid, repo=repo_name, store_location=store_location)
209 return store.has_oid()
244 return store.has_oid()
210
245
211 return False
246 return False
212
247
213 @reraise_safe_exceptions
248 @reraise_safe_exceptions
214 def store_path(self, wire, oid):
249 def store_path(self, wire, oid):
215 repo = self._factory.repo(wire)
250 repo = self._factory.repo(wire)
216 conf = self._wire_to_config(wire)
251 conf = self._wire_to_config(wire)
217
252
218 store_location = conf.get('vcs_git_lfs_store_location')
253 store_location = conf.get('vcs_git_lfs_store_location')
219 if store_location:
254 if store_location:
220 repo_name = repo.path
255 repo_name = repo.path
221 store = LFSOidStore(
256 store = LFSOidStore(
222 oid=oid, repo=repo_name, store_location=store_location)
257 oid=oid, repo=repo_name, store_location=store_location)
223 return store.oid_path
258 return store.oid_path
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
259 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225
260
226 @reraise_safe_exceptions
261 @reraise_safe_exceptions
227 def bulk_request(self, wire, rev, pre_load):
262 def bulk_request(self, wire, rev, pre_load):
228 result = {}
263 result = {}
229 for attr in pre_load:
264 for attr in pre_load:
230 try:
265 try:
231 method = self._bulk_methods[attr]
266 method = self._bulk_methods[attr]
232 args = [wire, rev]
267 args = [wire, rev]
233 if attr == "date":
234 args.extend(["commit_time", "commit_timezone"])
235 elif attr in ["author", "message", "parents"]:
236 args.append(attr)
237 result[attr] = method(*args)
268 result[attr] = method(*args)
238 except KeyError as e:
269 except KeyError as e:
239 raise exceptions.VcsException(e)(
270 raise exceptions.VcsException(e)("Unknown bulk attribute: %s" % attr)
240 "Unknown bulk attribute: %s" % attr)
241 return result
271 return result
242
272
243 def _build_opener(self, url):
273 def _build_opener(self, url):
244 handlers = []
274 handlers = []
245 url_obj = url_parser(url)
275 url_obj = url_parser(url)
246 _, authinfo = url_obj.authinfo()
276 _, authinfo = url_obj.authinfo()
247
277
248 if authinfo:
278 if authinfo:
249 # create a password manager
279 # create a password manager
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
280 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 passmgr.add_password(*authinfo)
281 passmgr.add_password(*authinfo)
252
282
253 handlers.extend((httpbasicauthhandler(passmgr),
283 handlers.extend((httpbasicauthhandler(passmgr),
254 httpdigestauthhandler(passmgr)))
284 httpdigestauthhandler(passmgr)))
255
285
256 return urllib2.build_opener(*handlers)
286 return urllib2.build_opener(*handlers)
257
287
288 def _type_id_to_name(self, type_id):
289 return {
290 1: b'commit',
291 2: b'tree',
292 3: b'blob',
293 4: b'tag'
294 }[type_id]
295
258 @reraise_safe_exceptions
296 @reraise_safe_exceptions
259 def check_url(self, url, config):
297 def check_url(self, url, config):
260 url_obj = url_parser(url)
298 url_obj = url_parser(url)
261 test_uri, _ = url_obj.authinfo()
299 test_uri, _ = url_obj.authinfo()
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
300 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 url_obj.query = obfuscate_qs(url_obj.query)
301 url_obj.query = obfuscate_qs(url_obj.query)
264 cleaned_uri = str(url_obj)
302 cleaned_uri = str(url_obj)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
303 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266
304
267 if not test_uri.endswith('info/refs'):
305 if not test_uri.endswith('info/refs'):
268 test_uri = test_uri.rstrip('/') + '/info/refs'
306 test_uri = test_uri.rstrip('/') + '/info/refs'
269
307
270 o = self._build_opener(url)
308 o = self._build_opener(url)
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
309 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272
310
273 q = {"service": 'git-upload-pack'}
311 q = {"service": 'git-upload-pack'}
274 qs = '?%s' % urllib.urlencode(q)
312 qs = '?%s' % urllib.urlencode(q)
275 cu = "%s%s" % (test_uri, qs)
313 cu = "%s%s" % (test_uri, qs)
276 req = urllib2.Request(cu, None, {})
314 req = urllib2.Request(cu, None, {})
277
315
278 try:
316 try:
279 log.debug("Trying to open URL %s", cleaned_uri)
317 log.debug("Trying to open URL %s", cleaned_uri)
280 resp = o.open(req)
318 resp = o.open(req)
281 if resp.code != 200:
319 if resp.code != 200:
282 raise exceptions.URLError()('Return Code is not 200')
320 raise exceptions.URLError()('Return Code is not 200')
283 except Exception as e:
321 except Exception as e:
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
322 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 # means it cannot be cloned
323 # means it cannot be cloned
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
324 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287
325
288 # now detect if it's proper git repo
326 # now detect if it's proper git repo
289 gitdata = resp.read()
327 gitdata = resp.read()
290 if 'service=git-upload-pack' in gitdata:
328 if 'service=git-upload-pack' in gitdata:
291 pass
329 pass
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
330 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 # old style git can return some other format !
331 # old style git can return some other format !
294 pass
332 pass
295 else:
333 else:
296 raise exceptions.URLError()(
334 raise exceptions.URLError()(
297 "url [%s] does not look like an git" % (cleaned_uri,))
335 "url [%s] does not look like an git" % (cleaned_uri,))
298
336
299 return True
337 return True
300
338
301 @reraise_safe_exceptions
339 @reraise_safe_exceptions
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
340 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
341 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 remote_refs = self.pull(wire, url, apply_refs=False)
342 remote_refs = self.pull(wire, url, apply_refs=False)
305 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
306 if isinstance(valid_refs, list):
344 if isinstance(valid_refs, list):
307 valid_refs = tuple(valid_refs)
345 valid_refs = tuple(valid_refs)
308
346
309 for k in remote_refs:
347 for k in remote_refs:
310 # only parse heads/tags and skip so called deferred tags
348 # only parse heads/tags and skip so called deferred tags
311 if k.startswith(valid_refs) and not k.endswith(deferred):
349 if k.startswith(valid_refs) and not k.endswith(deferred):
312 repo[k] = remote_refs[k]
350 repo[k] = remote_refs[k]
313
351
314 if update_after_clone:
352 if update_after_clone:
315 # we want to checkout HEAD
353 # we want to checkout HEAD
316 repo["HEAD"] = remote_refs["HEAD"]
354 repo["HEAD"] = remote_refs["HEAD"]
317 index.build_index_from_tree(repo.path, repo.index_path(),
355 index.build_index_from_tree(repo.path, repo.index_path(),
318 repo.object_store, repo["HEAD"].tree)
356 repo.object_store, repo["HEAD"].tree)
319
357
320 # TODO: this is quite complex, check if that can be simplified
358 # TODO: this is quite complex, check if that can be simplified
321 @reraise_safe_exceptions
359 @reraise_safe_exceptions
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
360 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
324 object_store = repo.object_store
362 object_store = repo.object_store
325
363
326 # Create tree and populates it with blobs
364 # Create tree and populates it with blobs
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
365 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328
366
329 for node in updated:
367 for node in updated:
330 # Compute subdirs if needed
368 # Compute subdirs if needed
331 dirpath, nodename = vcspath.split(node['path'])
369 dirpath, nodename = vcspath.split(node['path'])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
370 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 parent = commit_tree
371 parent = commit_tree
334 ancestors = [('', parent)]
372 ancestors = [('', parent)]
335
373
336 # Tries to dig for the deepest existing tree
374 # Tries to dig for the deepest existing tree
337 while dirnames:
375 while dirnames:
338 curdir = dirnames.pop(0)
376 curdir = dirnames.pop(0)
339 try:
377 try:
340 dir_id = parent[curdir][1]
378 dir_id = parent[curdir][1]
341 except KeyError:
379 except KeyError:
342 # put curdir back into dirnames and stops
380 # put curdir back into dirnames and stops
343 dirnames.insert(0, curdir)
381 dirnames.insert(0, curdir)
344 break
382 break
345 else:
383 else:
346 # If found, updates parent
384 # If found, updates parent
347 parent = repo[dir_id]
385 parent = repo[dir_id]
348 ancestors.append((curdir, parent))
386 ancestors.append((curdir, parent))
349 # Now parent is deepest existing tree and we need to create
387 # Now parent is deepest existing tree and we need to create
350 # subtrees for dirnames (in reverse order)
388 # subtrees for dirnames (in reverse order)
351 # [this only applies for nodes from added]
389 # [this only applies for nodes from added]
352 new_trees = []
390 new_trees = []
353
391
354 blob = objects.Blob.from_string(node['content'])
392 blob = objects.Blob.from_string(node['content'])
355
393
356 if dirnames:
394 if dirnames:
357 # If there are trees which should be created we need to build
395 # If there are trees which should be created we need to build
358 # them now (in reverse order)
396 # them now (in reverse order)
359 reversed_dirnames = list(reversed(dirnames))
397 reversed_dirnames = list(reversed(dirnames))
360 curtree = objects.Tree()
398 curtree = objects.Tree()
361 curtree[node['node_path']] = node['mode'], blob.id
399 curtree[node['node_path']] = node['mode'], blob.id
362 new_trees.append(curtree)
400 new_trees.append(curtree)
363 for dirname in reversed_dirnames[:-1]:
401 for dirname in reversed_dirnames[:-1]:
364 newtree = objects.Tree()
402 newtree = objects.Tree()
365 newtree[dirname] = (DIR_STAT, curtree.id)
403 newtree[dirname] = (DIR_STAT, curtree.id)
366 new_trees.append(newtree)
404 new_trees.append(newtree)
367 curtree = newtree
405 curtree = newtree
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
406 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 else:
407 else:
370 parent.add(
408 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372
409
373 new_trees.append(parent)
410 new_trees.append(parent)
374 # Update ancestors
411 # Update ancestors
375 reversed_ancestors = reversed(
412 reversed_ancestors = reversed(
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
413 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 for parent, tree, path in reversed_ancestors:
414 for parent, tree, path in reversed_ancestors:
378 parent[path] = (DIR_STAT, tree.id)
415 parent[path] = (DIR_STAT, tree.id)
379 object_store.add_object(tree)
416 object_store.add_object(tree)
380
417
381 object_store.add_object(blob)
418 object_store.add_object(blob)
382 for tree in new_trees:
419 for tree in new_trees:
383 object_store.add_object(tree)
420 object_store.add_object(tree)
384
421
385 for node_path in removed:
422 for node_path in removed:
386 paths = node_path.split('/')
423 paths = node_path.split('/')
387 tree = commit_tree
424 tree = commit_tree
388 trees = [tree]
425 trees = [tree]
389 # Traverse deep into the forest...
426 # Traverse deep into the forest...
390 for path in paths:
427 for path in paths:
391 try:
428 try:
392 obj = repo[tree[path][1]]
429 obj = repo[tree[path][1]]
393 if isinstance(obj, objects.Tree):
430 if isinstance(obj, objects.Tree):
394 trees.append(obj)
431 trees.append(obj)
395 tree = obj
432 tree = obj
396 except KeyError:
433 except KeyError:
397 break
434 break
398 # Cut down the blob and all rotten trees on the way back...
435 # Cut down the blob and all rotten trees on the way back...
399 for path, tree in reversed(zip(paths, trees)):
436 for path, tree in reversed(zip(paths, trees)):
400 del tree[path]
437 del tree[path]
401 if tree:
438 if tree:
402 # This tree still has elements - don't remove it or any
439 # This tree still has elements - don't remove it or any
403 # of it's parents
440 # of it's parents
404 break
441 break
405
442
406 object_store.add_object(commit_tree)
443 object_store.add_object(commit_tree)
407
444
408 # Create commit
445 # Create commit
409 commit = objects.Commit()
446 commit = objects.Commit()
410 commit.tree = commit_tree.id
447 commit.tree = commit_tree.id
411 for k, v in commit_data.iteritems():
448 for k, v in commit_data.iteritems():
412 setattr(commit, k, v)
449 setattr(commit, k, v)
413 object_store.add_object(commit)
450 object_store.add_object(commit)
414
451
452 self.create_branch(wire, branch, commit.id)
453
454 # dulwich set-ref
415 ref = 'refs/heads/%s' % branch
455 ref = 'refs/heads/%s' % branch
416 repo.refs[ref] = commit.id
456 repo.refs[ref] = commit.id
417
457
418 return commit.id
458 return commit.id
419
459
420 @reraise_safe_exceptions
460 @reraise_safe_exceptions
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
461 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 if url != 'default' and '://' not in url:
462 if url != 'default' and '://' not in url:
423 client = LocalGitClient(url)
463 client = LocalGitClient(url)
424 else:
464 else:
425 url_obj = url_parser(url)
465 url_obj = url_parser(url)
426 o = self._build_opener(url)
466 o = self._build_opener(url)
427 url, _ = url_obj.authinfo()
467 url, _ = url_obj.authinfo()
428 client = HttpGitClient(base_url=url, opener=o)
468 client = HttpGitClient(base_url=url, opener=o)
429 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
430
470
431 determine_wants = repo.object_store.determine_wants_all
471 determine_wants = repo.object_store.determine_wants_all
432 if refs:
472 if refs:
433 def determine_wants_requested(references):
473 def determine_wants_requested(references):
434 return [references[r] for r in references if r in refs]
474 return [references[r] for r in references if r in refs]
435 determine_wants = determine_wants_requested
475 determine_wants = determine_wants_requested
436
476
437 try:
477 try:
438 remote_refs = client.fetch(
478 remote_refs = client.fetch(
439 path=url, target=repo, determine_wants=determine_wants)
479 path=url, target=repo, determine_wants=determine_wants)
440 except NotGitRepository as e:
480 except NotGitRepository as e:
441 log.warning(
481 log.warning(
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
482 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 # Exception can contain unicode which we convert
483 # Exception can contain unicode which we convert
444 raise exceptions.AbortException(e)(repr(e))
484 raise exceptions.AbortException(e)(repr(e))
445
485
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
486 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 # refs filtered by `determine_wants` function. We need to filter result
487 # refs filtered by `determine_wants` function. We need to filter result
448 # as well
488 # as well
449 if refs:
489 if refs:
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
490 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451
491
452 if apply_refs:
492 if apply_refs:
453 # TODO: johbo: Needs proper test coverage with a git repository
493 # TODO: johbo: Needs proper test coverage with a git repository
454 # that contains a tag object, so that we would end up with
494 # that contains a tag object, so that we would end up with
455 # a peeled ref at this point.
495 # a peeled ref at this point.
456 for k in remote_refs:
496 for k in remote_refs:
457 if k.endswith(self.peeled_ref_marker):
497 if k.endswith(self.peeled_ref_marker):
458 log.debug("Skipping peeled reference %s", k)
498 log.debug("Skipping peeled reference %s", k)
459 continue
499 continue
460 repo[k] = remote_refs[k]
500 repo[k] = remote_refs[k]
461
501
462 if refs and not update_after:
502 if refs and not update_after:
463 # mikhail: explicitly set the head to the last ref.
503 # mikhail: explicitly set the head to the last ref.
464 repo['HEAD'] = remote_refs[refs[-1]]
504 repo['HEAD'] = remote_refs[refs[-1]]
465
505
466 if update_after:
506 if update_after:
467 # we want to checkout HEAD
507 # we want to checkout HEAD
468 repo["HEAD"] = remote_refs["HEAD"]
508 repo["HEAD"] = remote_refs["HEAD"]
469 index.build_index_from_tree(repo.path, repo.index_path(),
509 index.build_index_from_tree(repo.path, repo.index_path(),
470 repo.object_store, repo["HEAD"].tree)
510 repo.object_store, repo["HEAD"].tree)
471 return remote_refs
511 return remote_refs
472
512
473 @reraise_safe_exceptions
513 @reraise_safe_exceptions
474 def sync_fetch(self, wire, url, refs=None):
514 def sync_fetch(self, wire, url, refs=None):
475 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
476 if refs and not isinstance(refs, (list, tuple)):
516 if refs and not isinstance(refs, (list, tuple)):
477 refs = [refs]
517 refs = [refs]
478 config = self._wire_to_config(wire)
518 config = self._wire_to_config(wire)
479 # get all remote refs we'll use to fetch later
519 # get all remote refs we'll use to fetch later
480 output, __ = self.run_git_command(
520 output, __ = self.run_git_command(
481 wire, ['ls-remote', url], fail_on_stderr=False,
521 wire, ['ls-remote', url], fail_on_stderr=False,
482 _copts=self._remote_conf(config),
522 _copts=self._remote_conf(config),
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
523 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484
524
485 remote_refs = collections.OrderedDict()
525 remote_refs = collections.OrderedDict()
486 fetch_refs = []
526 fetch_refs = []
487
527
488 for ref_line in output.splitlines():
528 for ref_line in output.splitlines():
489 sha, ref = ref_line.split('\t')
529 sha, ref = ref_line.split('\t')
490 sha = sha.strip()
530 sha = sha.strip()
491 if ref in remote_refs:
531 if ref in remote_refs:
492 # duplicate, skip
532 # duplicate, skip
493 continue
533 continue
494 if ref.endswith(self.peeled_ref_marker):
534 if ref.endswith(self.peeled_ref_marker):
495 log.debug("Skipping peeled reference %s", ref)
535 log.debug("Skipping peeled reference %s", ref)
496 continue
536 continue
497 # don't sync HEAD
537 # don't sync HEAD
498 if ref in ['HEAD']:
538 if ref in ['HEAD']:
499 continue
539 continue
500
540
501 remote_refs[ref] = sha
541 remote_refs[ref] = sha
502
542
503 if refs and sha in refs:
543 if refs and sha in refs:
504 # we filter fetch using our specified refs
544 # we filter fetch using our specified refs
505 fetch_refs.append('{}:{}'.format(ref, ref))
545 fetch_refs.append('{}:{}'.format(ref, ref))
506 elif not refs:
546 elif not refs:
507 fetch_refs.append('{}:{}'.format(ref, ref))
547 fetch_refs.append('{}:{}'.format(ref, ref))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
548 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
509 if fetch_refs:
549 if fetch_refs:
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
550 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 fetch_refs_chunks = list(chunk)
551 fetch_refs_chunks = list(chunk)
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
552 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 _out, _err = self.run_git_command(
553 _out, _err = self.run_git_command(
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
554 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 fail_on_stderr=False,
555 fail_on_stderr=False,
516 _copts=self._remote_conf(config),
556 _copts=self._remote_conf(config),
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
557 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518
558
519 return remote_refs
559 return remote_refs
520
560
521 @reraise_safe_exceptions
561 @reraise_safe_exceptions
522 def sync_push(self, wire, url, refs=None):
562 def sync_push(self, wire, url, refs=None):
523 if not self.check_url(url, wire):
563 if not self.check_url(url, wire):
524 return
564 return
525 config = self._wire_to_config(wire)
565 config = self._wire_to_config(wire)
526 repo = self._factory.repo(wire)
566 repo = self._factory.repo(wire)
527 self.run_git_command(
567 self.run_git_command(
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
568 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 _copts=self._remote_conf(config),
569 _copts=self._remote_conf(config),
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
570 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531
571
532 @reraise_safe_exceptions
572 @reraise_safe_exceptions
533 def get_remote_refs(self, wire, url):
573 def get_remote_refs(self, wire, url):
534 repo = Repo(url)
574 repo = Repo(url)
535 return repo.get_refs()
575 return repo.get_refs()
536
576
537 @reraise_safe_exceptions
577 @reraise_safe_exceptions
538 def get_description(self, wire):
578 def get_description(self, wire):
539 repo = self._factory.repo(wire)
579 repo = self._factory.repo(wire)
540 return repo.get_description()
580 return repo.get_description()
541
581
542 @reraise_safe_exceptions
582 @reraise_safe_exceptions
543 def get_missing_revs(self, wire, rev1, rev2, path2):
583 def get_missing_revs(self, wire, rev1, rev2, path2):
544 repo = self._factory.repo(wire)
584 repo = self._factory.repo(wire)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
585 LocalGitClient(thin_packs=False).fetch(path2, repo)
546
586
547 wire_remote = wire.copy()
587 wire_remote = wire.copy()
548 wire_remote['path'] = path2
588 wire_remote['path'] = path2
549 repo_remote = self._factory.repo(wire_remote)
589 repo_remote = self._factory.repo(wire_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
590 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551
591
552 revs = [
592 revs = [
553 x.commit.id
593 x.commit.id
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
594 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 return revs
595 return revs
556
596
557 @reraise_safe_exceptions
597 @reraise_safe_exceptions
558 def get_object(self, wire, sha):
598 def get_object(self, wire, sha):
559 repo = self._factory.repo(wire)
599 repo = self._factory.repo_libgit2(wire)
560 obj = repo.get_object(sha)
561 commit_id = obj.id
562
600
563 if isinstance(obj, Tag):
601 try:
564 commit_id = obj.object[1]
602 commit = repo.revparse_single(sha)
603 except (KeyError, ValueError) as e:
604 msg = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
605 raise exceptions.LookupException(e)(msg)
606
607 if isinstance(commit, pygit2.Tag):
608 commit = repo.get(commit.target)
609
610 commit_id = commit.hex
611 type_id = commit.type
565
612
566 return {
613 return {
567 'id': obj.id,
614 'id': commit_id,
568 'type': obj.type_name,
615 'type': self._type_id_to_name(type_id),
569 'commit_id': commit_id,
616 'commit_id': commit_id,
570 'idx': 0
617 'idx': 0
571 }
618 }
572
619
573 @reraise_safe_exceptions
620 @reraise_safe_exceptions
574 def get_object_attrs(self, wire, sha, *attrs):
621 def get_refs(self, wire):
575 repo = self._factory.repo(wire)
622 repo = self._factory.repo_libgit2(wire)
576 obj = repo.get_object(sha)
577 return list(getattr(obj, a) for a in attrs)
578
623
579 @reraise_safe_exceptions
580 def get_refs(self, wire):
581 repo = self._factory.repo(wire)
582 result = {}
624 result = {}
583 for ref, sha in repo.refs.as_dict().items():
625 for ref in repo.references:
584 peeled_sha = repo.get_peeled(ref)
626 peeled_sha = repo.lookup_reference(ref).peel()
585 result[ref] = peeled_sha
627 result[ref] = peeled_sha.hex
628
586 return result
629 return result
587
630
588 @reraise_safe_exceptions
631 @reraise_safe_exceptions
589 def get_refs_path(self, wire):
590 repo = self._factory.repo(wire)
591 return repo.refs.path
592
593 @reraise_safe_exceptions
594 def head(self, wire, show_exc=True):
632 def head(self, wire, show_exc=True):
595 repo = self._factory.repo(wire)
633 repo = self._factory.repo_libgit2(wire)
596 try:
634 try:
597 return repo.head()
635 return repo.head.peel().hex
598 except Exception:
636 except Exception:
599 if show_exc:
637 if show_exc:
600 raise
638 raise
601
639
602 @reraise_safe_exceptions
640 @reraise_safe_exceptions
603 def init(self, wire):
641 def init(self, wire):
604 repo_path = str_to_dulwich(wire['path'])
642 repo_path = str_to_dulwich(wire['path'])
605 self.repo = Repo.init(repo_path)
643 self.repo = Repo.init(repo_path)
606
644
607 @reraise_safe_exceptions
645 @reraise_safe_exceptions
608 def init_bare(self, wire):
646 def init_bare(self, wire):
609 repo_path = str_to_dulwich(wire['path'])
647 repo_path = str_to_dulwich(wire['path'])
610 self.repo = Repo.init_bare(repo_path)
648 self.repo = Repo.init_bare(repo_path)
611
649
612 @reraise_safe_exceptions
650 @reraise_safe_exceptions
613 def revision(self, wire, rev):
651 def revision(self, wire, rev):
614 repo = self._factory.repo(wire)
652 repo = self._factory.repo_libgit2(wire)
615 obj = repo[rev]
653 commit = repo[rev]
616 obj_data = {
654 obj_data = {
617 'id': obj.id,
655 'id': commit.id.hex,
618 }
656 }
619 try:
657 # tree objects itself don't have tree_id attribute
620 obj_data['tree'] = obj.tree
658 if hasattr(commit, 'tree_id'):
621 except AttributeError:
659 obj_data['tree'] = commit.tree_id.hex
622 pass
660
623 return obj_data
661 return obj_data
624
662
625 @reraise_safe_exceptions
663 @reraise_safe_exceptions
626 def commit_attribute(self, wire, rev, attr):
664 def date(self, wire, rev):
627 repo = self._factory.repo(wire)
665 repo = self._factory.repo_libgit2(wire)
628 obj = repo[rev]
666 commit = repo[rev]
629 return getattr(obj, attr)
667 # TODO(marcink): check dulwich difference of offset vs timezone
668 return [commit.commit_time, commit.commit_time_offset]
669
670 @reraise_safe_exceptions
671 def author(self, wire, rev):
672 repo = self._factory.repo_libgit2(wire)
673 commit = repo[rev]
674 if commit.author.email:
675 return u"{} <{}>".format(commit.author.name, commit.author.email)
676
677 return u"{}".format(commit.author.raw_name)
678
679 @reraise_safe_exceptions
680 def message(self, wire, rev):
681 repo = self._factory.repo_libgit2(wire)
682 commit = repo[rev]
683 return commit.message
684
685 @reraise_safe_exceptions
686 def parents(self, wire, rev):
687 repo = self._factory.repo_libgit2(wire)
688 commit = repo[rev]
689 return [x.hex for x in commit.parent_ids]
630
690
631 @reraise_safe_exceptions
691 @reraise_safe_exceptions
632 def set_refs(self, wire, key, value):
692 def set_refs(self, wire, key, value):
633 repo = self._factory.repo(wire)
693 repo = self._factory.repo_libgit2(wire)
634 repo.refs[key] = value
694 repo.references.create(key, value, force=True)
695
696 @reraise_safe_exceptions
697 def create_branch(self, wire, branch_name, commit_id, force=False):
698 repo = self._factory.repo_libgit2(wire)
699 commit = repo[commit_id]
700
701 if force:
702 repo.branches.local.create(branch_name, commit, force=force)
703 elif not repo.branches.get(branch_name):
704 # create only if that branch isn't existing
705 repo.branches.local.create(branch_name, commit, force=force)
635
706
636 @reraise_safe_exceptions
707 @reraise_safe_exceptions
637 def remove_ref(self, wire, key):
708 def remove_ref(self, wire, key):
638 repo = self._factory.repo(wire)
709 repo = self._factory.repo_libgit2(wire)
639 del repo.refs[key]
710 repo.references.delete(key)
711
712 @reraise_safe_exceptions
713 def tag_remove(self, wire, tag_name):
714 repo = self._factory.repo_libgit2(wire)
715 key = 'refs/tags/{}'.format(tag_name)
716 repo.references.delete(key)
640
717
641 @reraise_safe_exceptions
718 @reraise_safe_exceptions
642 def tree_changes(self, wire, source_id, target_id):
719 def tree_changes(self, wire, source_id, target_id):
720 # TODO(marcink): remove this seems it's only used by tests
643 repo = self._factory.repo(wire)
721 repo = self._factory.repo(wire)
644 source = repo[source_id].tree if source_id else None
722 source = repo[source_id].tree if source_id else None
645 target = repo[target_id].tree
723 target = repo[target_id].tree
646 result = repo.object_store.tree_changes(source, target)
724 result = repo.object_store.tree_changes(source, target)
647 return list(result)
725 return list(result)
648
726
649 @reraise_safe_exceptions
727 @reraise_safe_exceptions
650 def tree_items(self, wire, tree_id):
728 def tree_items(self, wire, tree_id):
651 repo = self._factory.repo(wire)
729 repo_init = self._factory.repo_libgit2(wire)
652 tree = repo[tree_id]
653
730
654 result = []
731 with repo_init as repo:
655 for item in tree.iteritems():
732 tree = repo[tree_id]
656 item_sha = item.sha
657 item_mode = item.mode
658
733
659 if FILE_MODE(item_mode) == GIT_LINK:
734 result = []
660 item_type = "link"
735 for item in tree:
661 else:
736 item_sha = item.hex
662 item_type = repo[item_sha].type_name
737 item_mode = item.filemode
738 item_type = item.type
663
739
664 result.append((item.path, item_mode, item_sha, item_type))
740 if item_type == 'commit':
665 return result
741 # NOTE(marcink): submodules we translate to 'link' for backward compat
742 item_type = 'link'
743
744 result.append((item.name, item_mode, item_sha, item_type))
745 return result
666
746
667 @reraise_safe_exceptions
747 @reraise_safe_exceptions
668 def update_server_info(self, wire):
748 def update_server_info(self, wire):
669 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
670 update_server_info(repo)
750 update_server_info(repo)
671
751
672 @reraise_safe_exceptions
752 @reraise_safe_exceptions
673 def discover_git_version(self):
753 def discover_git_version(self):
674 stdout, _ = self.run_git_command(
754 stdout, _ = self.run_git_command(
675 {}, ['--version'], _bare=True, _safe=True)
755 {}, ['--version'], _bare=True, _safe=True)
676 prefix = 'git version'
756 prefix = 'git version'
677 if stdout.startswith(prefix):
757 if stdout.startswith(prefix):
678 stdout = stdout[len(prefix):]
758 stdout = stdout[len(prefix):]
679 return stdout.strip()
759 return stdout.strip()
680
760
681 @reraise_safe_exceptions
761 @reraise_safe_exceptions
762 def get_all_commit_ids(self, wire):
763 if self.is_empty(wire):
764 return []
765
766 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
767 try:
768 output, __ = self.run_git_command(wire, cmd)
769 return output.splitlines()
770 except Exception:
771 # Can be raised for empty repositories
772 return []
773
774 @reraise_safe_exceptions
682 def run_git_command(self, wire, cmd, **opts):
775 def run_git_command(self, wire, cmd, **opts):
683 path = wire.get('path', None)
776 path = wire.get('path', None)
684
777
685 if path and os.path.isdir(path):
778 if path and os.path.isdir(path):
686 opts['cwd'] = path
779 opts['cwd'] = path
687
780
688 if '_bare' in opts:
781 if '_bare' in opts:
689 _copts = []
782 _copts = []
690 del opts['_bare']
783 del opts['_bare']
691 else:
784 else:
692 _copts = ['-c', 'core.quotepath=false', ]
785 _copts = ['-c', 'core.quotepath=false', ]
693 safe_call = False
786 safe_call = False
694 if '_safe' in opts:
787 if '_safe' in opts:
695 # no exc on failure
788 # no exc on failure
696 del opts['_safe']
789 del opts['_safe']
697 safe_call = True
790 safe_call = True
698
791
699 if '_copts' in opts:
792 if '_copts' in opts:
700 _copts.extend(opts['_copts'] or [])
793 _copts.extend(opts['_copts'] or [])
701 del opts['_copts']
794 del opts['_copts']
702
795
703 gitenv = os.environ.copy()
796 gitenv = os.environ.copy()
704 gitenv.update(opts.pop('extra_env', {}))
797 gitenv.update(opts.pop('extra_env', {}))
705 # need to clean fix GIT_DIR !
798 # need to clean fix GIT_DIR !
706 if 'GIT_DIR' in gitenv:
799 if 'GIT_DIR' in gitenv:
707 del gitenv['GIT_DIR']
800 del gitenv['GIT_DIR']
708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
801 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
802 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
710
803
711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
804 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
712 _opts = {'env': gitenv, 'shell': False}
805 _opts = {'env': gitenv, 'shell': False}
713
806
714 try:
807 try:
715 _opts.update(opts)
808 _opts.update(opts)
716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
809 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
717
810
718 return ''.join(p), ''.join(p.error)
811 return ''.join(p), ''.join(p.error)
719 except (EnvironmentError, OSError) as err:
812 except (EnvironmentError, OSError) as err:
720 cmd = ' '.join(cmd) # human friendly CMD
813 cmd = ' '.join(cmd) # human friendly CMD
721 tb_err = ("Couldn't run git command (%s).\n"
814 tb_err = ("Couldn't run git command (%s).\n"
722 "Original error was:%s\n"
815 "Original error was:%s\n"
723 "Call options:%s\n"
816 "Call options:%s\n"
724 % (cmd, err, _opts))
817 % (cmd, err, _opts))
725 log.exception(tb_err)
818 log.exception(tb_err)
726 if safe_call:
819 if safe_call:
727 return '', err
820 return '', err
728 else:
821 else:
729 raise exceptions.VcsException()(tb_err)
822 raise exceptions.VcsException()(tb_err)
730
823
731 @reraise_safe_exceptions
824 @reraise_safe_exceptions
732 def install_hooks(self, wire, force=False):
825 def install_hooks(self, wire, force=False):
733 from vcsserver.hook_utils import install_git_hooks
826 from vcsserver.hook_utils import install_git_hooks
734 repo = self._factory.repo(wire)
827 repo = self._factory.repo(wire)
735 return install_git_hooks(repo.path, repo.bare, force_create=force)
828 return install_git_hooks(repo.path, repo.bare, force_create=force)
736
829
737 @reraise_safe_exceptions
830 @reraise_safe_exceptions
738 def get_hooks_info(self, wire):
831 def get_hooks_info(self, wire):
739 from vcsserver.hook_utils import (
832 from vcsserver.hook_utils import (
740 get_git_pre_hook_version, get_git_post_hook_version)
833 get_git_pre_hook_version, get_git_post_hook_version)
741 repo = self._factory.repo(wire)
834 repo = self._factory.repo(wire)
742 return {
835 return {
743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
836 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
837 'post_version': get_git_post_hook_version(repo.path, repo.bare),
745 }
838 }
746
839
747
840
748 def str_to_dulwich(value):
841 def str_to_dulwich(value):
749 """
842 """
750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
843 Dulwich 0.10.1a requires `unicode` objects to be passed in.
751 """
844 """
752 return value.decode(settings.WIRE_ENCODING)
845 return value.decode(settings.WIRE_ENCODING)
@@ -1,856 +1,874 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 def make_ui_from_config(repo_config):
44 def make_ui_from_config(repo_config):
45
45
46 class LoggingUI(ui.ui):
46 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
47 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
48 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
49 super(LoggingUI, self).status(*msg, **opts)
50
50
51 def warn(self, *msg, **opts):
51 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
52 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
53 super(LoggingUI, self).warn(*msg, **opts)
54
54
55 def error(self, *msg, **opts):
55 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
56 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
57 super(LoggingUI, self).error(*msg, **opts)
58
58
59 def note(self, *msg, **opts):
59 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
60 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
61 super(LoggingUI, self).note(*msg, **opts)
62
62
63 def debug(self, *msg, **opts):
63 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
64 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
65 super(LoggingUI, self).debug(*msg, **opts)
66
66
67 baseui = LoggingUI()
67 baseui = LoggingUI()
68
68
69 # clean the baseui object
69 # clean the baseui object
70 baseui._ocfg = hgconfig.config()
70 baseui._ocfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
73
73
74 for section, option, value in repo_config:
74 for section, option, value in repo_config:
75 baseui.setconfig(section, option, value)
75 baseui.setconfig(section, option, value)
76
76
77 # make our hgweb quiet so it doesn't print output
77 # make our hgweb quiet so it doesn't print output
78 baseui.setconfig('ui', 'quiet', 'true')
78 baseui.setconfig('ui', 'quiet', 'true')
79
79
80 baseui.setconfig('ui', 'paginate', 'never')
80 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
81 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
82 baseui.setconfig('ui', 'message-output', 'stderr')
83
83
84 # force mercurial to only use 1 thread, otherwise it may try to set a
84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # signal in a non-main thread, thus generating a ValueError.
85 # signal in a non-main thread, thus generating a ValueError.
86 baseui.setconfig('worker', 'numcpus', 1)
86 baseui.setconfig('worker', 'numcpus', 1)
87
87
88 # If there is no config for the largefiles extension, we explicitly disable
88 # If there is no config for the largefiles extension, we explicitly disable
89 # it here. This overrides settings from repositories hgrc file. Recent
89 # it here. This overrides settings from repositories hgrc file. Recent
90 # mercurial versions enable largefiles in hgrc on clone from largefile
90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # repo.
91 # repo.
92 if not baseui.hasconfig('extensions', 'largefiles'):
92 if not baseui.hasconfig('extensions', 'largefiles'):
93 log.debug('Explicitly disable largefiles extension for repo.')
93 log.debug('Explicitly disable largefiles extension for repo.')
94 baseui.setconfig('extensions', 'largefiles', '!')
94 baseui.setconfig('extensions', 'largefiles', '!')
95
95
96 return baseui
96 return baseui
97
97
98
98
99 def reraise_safe_exceptions(func):
99 def reraise_safe_exceptions(func):
100 """Decorator for converting mercurial exceptions to something neutral."""
100 """Decorator for converting mercurial exceptions to something neutral."""
101
101 def wrapper(*args, **kwargs):
102 def wrapper(*args, **kwargs):
102 try:
103 try:
103 return func(*args, **kwargs)
104 return func(*args, **kwargs)
104 except (Abort, InterventionRequired) as e:
105 except (Abort, InterventionRequired) as e:
105 raise_from_original(exceptions.AbortException(e))
106 raise_from_original(exceptions.AbortException(e))
106 except RepoLookupError as e:
107 except RepoLookupError as e:
107 raise_from_original(exceptions.LookupException(e))
108 raise_from_original(exceptions.LookupException(e))
108 except RequirementError as e:
109 except RequirementError as e:
109 raise_from_original(exceptions.RequirementException(e))
110 raise_from_original(exceptions.RequirementException(e))
110 except RepoError as e:
111 except RepoError as e:
111 raise_from_original(exceptions.VcsException(e))
112 raise_from_original(exceptions.VcsException(e))
112 except LookupError as e:
113 except LookupError as e:
113 raise_from_original(exceptions.LookupException(e))
114 raise_from_original(exceptions.LookupException(e))
114 except Exception as e:
115 except Exception as e:
115 if not hasattr(e, '_vcs_kind'):
116 if not hasattr(e, '_vcs_kind'):
116 log.exception("Unhandled exception in hg remote call")
117 log.exception("Unhandled exception in hg remote call")
117 raise_from_original(exceptions.UnhandledException(e))
118 raise_from_original(exceptions.UnhandledException(e))
118
119
119 raise
120 raise
120 return wrapper
121 return wrapper
121
122
122
123
123 class MercurialFactory(RepoFactory):
124 class MercurialFactory(RepoFactory):
124 repo_type = 'hg'
125 repo_type = 'hg'
125
126
126 def _create_config(self, config, hooks=True):
127 def _create_config(self, config, hooks=True):
127 if not hooks:
128 if not hooks:
128 hooks_to_clean = frozenset((
129 hooks_to_clean = frozenset((
129 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 new_config = []
132 new_config = []
132 for section, option, value in config:
133 for section, option, value in config:
133 if section == 'hooks' and option in hooks_to_clean:
134 if section == 'hooks' and option in hooks_to_clean:
134 continue
135 continue
135 new_config.append((section, option, value))
136 new_config.append((section, option, value))
136 config = new_config
137 config = new_config
137
138
138 baseui = make_ui_from_config(config)
139 baseui = make_ui_from_config(config)
139 return baseui
140 return baseui
140
141
141 def _create_repo(self, wire, create):
142 def _create_repo(self, wire, create):
142 baseui = self._create_config(wire["config"])
143 baseui = self._create_config(wire["config"])
143 return instance(baseui, wire["path"], create)
144 return instance(baseui, wire["path"], create)
144
145
146 def repo(self, wire, create=False):
147 """
148 Get a repository instance for the given path.
149 """
150 region = self._cache_region
151 context = wire.get('context', None)
152 repo_path = wire.get('path', '')
153 context_uid = '{}'.format(context)
154 cache = wire.get('cache', True)
155 cache_on = context and cache
156
157 @region.conditional_cache_on_arguments(condition=cache_on)
158 def create_new_repo(_repo_type, _repo_path, _context_uid):
159 return self._create_repo(wire, create)
160
161 return create_new_repo(self.repo_type, repo_path, context_uid)
162
145
163
146 class HgRemote(object):
164 class HgRemote(object):
147
165
148 def __init__(self, factory):
166 def __init__(self, factory):
149 self._factory = factory
167 self._factory = factory
150
168
151 self._bulk_methods = {
169 self._bulk_methods = {
152 "affected_files": self.ctx_files,
170 "affected_files": self.ctx_files,
153 "author": self.ctx_user,
171 "author": self.ctx_user,
154 "branch": self.ctx_branch,
172 "branch": self.ctx_branch,
155 "children": self.ctx_children,
173 "children": self.ctx_children,
156 "date": self.ctx_date,
174 "date": self.ctx_date,
157 "message": self.ctx_description,
175 "message": self.ctx_description,
158 "parents": self.ctx_parents,
176 "parents": self.ctx_parents,
159 "status": self.ctx_status,
177 "status": self.ctx_status,
160 "obsolete": self.ctx_obsolete,
178 "obsolete": self.ctx_obsolete,
161 "phase": self.ctx_phase,
179 "phase": self.ctx_phase,
162 "hidden": self.ctx_hidden,
180 "hidden": self.ctx_hidden,
163 "_file_paths": self.ctx_list,
181 "_file_paths": self.ctx_list,
164 }
182 }
165
183
166 def _get_ctx(self, repo, ref):
184 def _get_ctx(self, repo, ref):
167 return get_ctx(repo, ref)
185 return get_ctx(repo, ref)
168
186
169 @reraise_safe_exceptions
187 @reraise_safe_exceptions
170 def discover_hg_version(self):
188 def discover_hg_version(self):
171 from mercurial import util
189 from mercurial import util
172 return util.version()
190 return util.version()
173
191
174 @reraise_safe_exceptions
192 @reraise_safe_exceptions
175 def is_empty(self, wire):
193 def is_empty(self, wire):
176 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
177
195
178 try:
196 try:
179 return len(repo) == 0
197 return len(repo) == 0
180 except Exception:
198 except Exception:
181 log.exception("failed to read object_store")
199 log.exception("failed to read object_store")
182 return False
200 return False
183
201
184 @reraise_safe_exceptions
202 @reraise_safe_exceptions
185 def archive_repo(self, archive_path, mtime, file_info, kind):
203 def archive_repo(self, archive_path, mtime, file_info, kind):
186 if kind == "tgz":
204 if kind == "tgz":
187 archiver = archival.tarit(archive_path, mtime, "gz")
205 archiver = archival.tarit(archive_path, mtime, "gz")
188 elif kind == "tbz2":
206 elif kind == "tbz2":
189 archiver = archival.tarit(archive_path, mtime, "bz2")
207 archiver = archival.tarit(archive_path, mtime, "bz2")
190 elif kind == 'zip':
208 elif kind == 'zip':
191 archiver = archival.zipit(archive_path, mtime)
209 archiver = archival.zipit(archive_path, mtime)
192 else:
210 else:
193 raise exceptions.ArchiveException()(
211 raise exceptions.ArchiveException()(
194 'Remote does not support: "%s".' % kind)
212 'Remote does not support: "%s".' % kind)
195
213
196 for f_path, f_mode, f_is_link, f_content in file_info:
214 for f_path, f_mode, f_is_link, f_content in file_info:
197 archiver.addfile(f_path, f_mode, f_is_link, f_content)
215 archiver.addfile(f_path, f_mode, f_is_link, f_content)
198 archiver.done()
216 archiver.done()
199
217
200 @reraise_safe_exceptions
218 @reraise_safe_exceptions
201 def bookmarks(self, wire):
219 def bookmarks(self, wire):
202 repo = self._factory.repo(wire)
220 repo = self._factory.repo(wire)
203 return dict(repo._bookmarks)
221 return dict(repo._bookmarks)
204
222
205 @reraise_safe_exceptions
223 @reraise_safe_exceptions
206 def branches(self, wire, normal, closed):
224 def branches(self, wire, normal, closed):
207 repo = self._factory.repo(wire)
225 repo = self._factory.repo(wire)
208 iter_branches = repo.branchmap().iterbranches()
226 iter_branches = repo.branchmap().iterbranches()
209 bt = {}
227 bt = {}
210 for branch_name, _heads, tip, is_closed in iter_branches:
228 for branch_name, _heads, tip, is_closed in iter_branches:
211 if normal and not is_closed:
229 if normal and not is_closed:
212 bt[branch_name] = tip
230 bt[branch_name] = tip
213 if closed and is_closed:
231 if closed and is_closed:
214 bt[branch_name] = tip
232 bt[branch_name] = tip
215
233
216 return bt
234 return bt
217
235
218 @reraise_safe_exceptions
236 @reraise_safe_exceptions
219 def bulk_request(self, wire, rev, pre_load):
237 def bulk_request(self, wire, rev, pre_load):
220 result = {}
238 result = {}
221 for attr in pre_load:
239 for attr in pre_load:
222 try:
240 try:
223 method = self._bulk_methods[attr]
241 method = self._bulk_methods[attr]
224 result[attr] = method(wire, rev)
242 result[attr] = method(wire, rev)
225 except KeyError as e:
243 except KeyError as e:
226 raise exceptions.VcsException(e)(
244 raise exceptions.VcsException(e)(
227 'Unknown bulk attribute: "%s"' % attr)
245 'Unknown bulk attribute: "%s"' % attr)
228 return result
246 return result
229
247
230 @reraise_safe_exceptions
248 @reraise_safe_exceptions
231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
249 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
250 baseui = self._factory._create_config(wire["config"], hooks=hooks)
233 clone(baseui, source, dest, noupdate=not update_after_clone)
251 clone(baseui, source, dest, noupdate=not update_after_clone)
234
252
235 @reraise_safe_exceptions
253 @reraise_safe_exceptions
236 def commitctx(
254 def commitctx(
237 self, wire, message, parents, commit_time, commit_timezone,
255 self, wire, message, parents, commit_time, commit_timezone,
238 user, files, extra, removed, updated):
256 user, files, extra, removed, updated):
239
257
240 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
241 baseui = self._factory._create_config(wire['config'])
259 baseui = self._factory._create_config(wire['config'])
242 publishing = baseui.configbool('phases', 'publish')
260 publishing = baseui.configbool('phases', 'publish')
243 if publishing:
261 if publishing:
244 new_commit = 'public'
262 new_commit = 'public'
245 else:
263 else:
246 new_commit = 'draft'
264 new_commit = 'draft'
247
265
248 def _filectxfn(_repo, ctx, path):
266 def _filectxfn(_repo, ctx, path):
249 """
267 """
250 Marks given path as added/changed/removed in a given _repo. This is
268 Marks given path as added/changed/removed in a given _repo. This is
251 for internal mercurial commit function.
269 for internal mercurial commit function.
252 """
270 """
253
271
254 # check if this path is removed
272 # check if this path is removed
255 if path in removed:
273 if path in removed:
256 # returning None is a way to mark node for removal
274 # returning None is a way to mark node for removal
257 return None
275 return None
258
276
259 # check if this path is added
277 # check if this path is added
260 for node in updated:
278 for node in updated:
261 if node['path'] == path:
279 if node['path'] == path:
262 return memfilectx(
280 return memfilectx(
263 _repo,
281 _repo,
264 changectx=ctx,
282 changectx=ctx,
265 path=node['path'],
283 path=node['path'],
266 data=node['content'],
284 data=node['content'],
267 islink=False,
285 islink=False,
268 isexec=bool(node['mode'] & stat.S_IXUSR),
286 isexec=bool(node['mode'] & stat.S_IXUSR),
269 copied=False)
287 copied=False)
270
288
271 raise exceptions.AbortException()(
289 raise exceptions.AbortException()(
272 "Given path haven't been marked as added, "
290 "Given path haven't been marked as added, "
273 "changed or removed (%s)" % path)
291 "changed or removed (%s)" % path)
274
292
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
293 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
276
294
277 commit_ctx = memctx(
295 commit_ctx = memctx(
278 repo=repo,
296 repo=repo,
279 parents=parents,
297 parents=parents,
280 text=message,
298 text=message,
281 files=files,
299 files=files,
282 filectxfn=_filectxfn,
300 filectxfn=_filectxfn,
283 user=user,
301 user=user,
284 date=(commit_time, commit_timezone),
302 date=(commit_time, commit_timezone),
285 extra=extra)
303 extra=extra)
286
304
287 n = repo.commitctx(commit_ctx)
305 n = repo.commitctx(commit_ctx)
288 new_id = hex(n)
306 new_id = hex(n)
289
307
290 return new_id
308 return new_id
291
309
292 @reraise_safe_exceptions
310 @reraise_safe_exceptions
293 def ctx_branch(self, wire, revision):
311 def ctx_branch(self, wire, revision):
294 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
295 ctx = self._get_ctx(repo, revision)
313 ctx = self._get_ctx(repo, revision)
296 return ctx.branch()
314 return ctx.branch()
297
315
298 @reraise_safe_exceptions
316 @reraise_safe_exceptions
299 def ctx_children(self, wire, revision):
317 def ctx_children(self, wire, revision):
300 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, revision)
319 ctx = self._get_ctx(repo, revision)
302 return [child.rev() for child in ctx.children()]
320 return [child.rev() for child in ctx.children()]
303
321
304 @reraise_safe_exceptions
322 @reraise_safe_exceptions
305 def ctx_date(self, wire, revision):
323 def ctx_date(self, wire, revision):
306 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
307 ctx = self._get_ctx(repo, revision)
325 ctx = self._get_ctx(repo, revision)
308 return ctx.date()
326 return ctx.date()
309
327
310 @reraise_safe_exceptions
328 @reraise_safe_exceptions
311 def ctx_description(self, wire, revision):
329 def ctx_description(self, wire, revision):
312 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, revision)
331 ctx = self._get_ctx(repo, revision)
314 return ctx.description()
332 return ctx.description()
315
333
316 @reraise_safe_exceptions
334 @reraise_safe_exceptions
317 def ctx_files(self, wire, revision):
335 def ctx_files(self, wire, revision):
318 repo = self._factory.repo(wire)
336 repo = self._factory.repo(wire)
319 ctx = self._get_ctx(repo, revision)
337 ctx = self._get_ctx(repo, revision)
320 return ctx.files()
338 return ctx.files()
321
339
322 @reraise_safe_exceptions
340 @reraise_safe_exceptions
323 def ctx_list(self, path, revision):
341 def ctx_list(self, path, revision):
324 repo = self._factory.repo(path)
342 repo = self._factory.repo(path)
325 ctx = self._get_ctx(repo, revision)
343 ctx = self._get_ctx(repo, revision)
326 return list(ctx)
344 return list(ctx)
327
345
328 @reraise_safe_exceptions
346 @reraise_safe_exceptions
329 def ctx_parents(self, wire, revision):
347 def ctx_parents(self, wire, revision):
330 repo = self._factory.repo(wire)
348 repo = self._factory.repo(wire)
331 ctx = self._get_ctx(repo, revision)
349 ctx = self._get_ctx(repo, revision)
332 return [parent.rev() for parent in ctx.parents()]
350 return [parent.rev() for parent in ctx.parents()]
333
351
334 @reraise_safe_exceptions
352 @reraise_safe_exceptions
335 def ctx_phase(self, wire, revision):
353 def ctx_phase(self, wire, revision):
336 repo = self._factory.repo(wire)
354 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, revision)
355 ctx = self._get_ctx(repo, revision)
338 # public=0, draft=1, secret=3
356 # public=0, draft=1, secret=3
339 return ctx.phase()
357 return ctx.phase()
340
358
341 @reraise_safe_exceptions
359 @reraise_safe_exceptions
342 def ctx_obsolete(self, wire, revision):
360 def ctx_obsolete(self, wire, revision):
343 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
344 ctx = self._get_ctx(repo, revision)
362 ctx = self._get_ctx(repo, revision)
345 return ctx.obsolete()
363 return ctx.obsolete()
346
364
347 @reraise_safe_exceptions
365 @reraise_safe_exceptions
348 def ctx_hidden(self, wire, revision):
366 def ctx_hidden(self, wire, revision):
349 repo = self._factory.repo(wire)
367 repo = self._factory.repo(wire)
350 ctx = self._get_ctx(repo, revision)
368 ctx = self._get_ctx(repo, revision)
351 return ctx.hidden()
369 return ctx.hidden()
352
370
353 @reraise_safe_exceptions
371 @reraise_safe_exceptions
354 def ctx_substate(self, wire, revision):
372 def ctx_substate(self, wire, revision):
355 repo = self._factory.repo(wire)
373 repo = self._factory.repo(wire)
356 ctx = self._get_ctx(repo, revision)
374 ctx = self._get_ctx(repo, revision)
357 return ctx.substate
375 return ctx.substate
358
376
359 @reraise_safe_exceptions
377 @reraise_safe_exceptions
360 def ctx_status(self, wire, revision):
378 def ctx_status(self, wire, revision):
361 repo = self._factory.repo(wire)
379 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, revision)
380 ctx = self._get_ctx(repo, revision)
363 status = repo[ctx.p1().node()].status(other=ctx.node())
381 status = repo[ctx.p1().node()].status(other=ctx.node())
364 # object of status (odd, custom named tuple in mercurial) is not
382 # object of status (odd, custom named tuple in mercurial) is not
365 # correctly serializable, we make it a list, as the underling
383 # correctly serializable, we make it a list, as the underling
366 # API expects this to be a list
384 # API expects this to be a list
367 return list(status)
385 return list(status)
368
386
369 @reraise_safe_exceptions
387 @reraise_safe_exceptions
370 def ctx_user(self, wire, revision):
388 def ctx_user(self, wire, revision):
371 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
372 ctx = self._get_ctx(repo, revision)
390 ctx = self._get_ctx(repo, revision)
373 return ctx.user()
391 return ctx.user()
374
392
375 @reraise_safe_exceptions
393 @reraise_safe_exceptions
376 def check_url(self, url, config):
394 def check_url(self, url, config):
377 _proto = None
395 _proto = None
378 if '+' in url[:url.find('://')]:
396 if '+' in url[:url.find('://')]:
379 _proto = url[0:url.find('+')]
397 _proto = url[0:url.find('+')]
380 url = url[url.find('+') + 1:]
398 url = url[url.find('+') + 1:]
381 handlers = []
399 handlers = []
382 url_obj = url_parser(url)
400 url_obj = url_parser(url)
383 test_uri, authinfo = url_obj.authinfo()
401 test_uri, authinfo = url_obj.authinfo()
384 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
402 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
385 url_obj.query = obfuscate_qs(url_obj.query)
403 url_obj.query = obfuscate_qs(url_obj.query)
386
404
387 cleaned_uri = str(url_obj)
405 cleaned_uri = str(url_obj)
388 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
406 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
389
407
390 if authinfo:
408 if authinfo:
391 # create a password manager
409 # create a password manager
392 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
410 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
393 passmgr.add_password(*authinfo)
411 passmgr.add_password(*authinfo)
394
412
395 handlers.extend((httpbasicauthhandler(passmgr),
413 handlers.extend((httpbasicauthhandler(passmgr),
396 httpdigestauthhandler(passmgr)))
414 httpdigestauthhandler(passmgr)))
397
415
398 o = urllib2.build_opener(*handlers)
416 o = urllib2.build_opener(*handlers)
399 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
417 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
400 ('Accept', 'application/mercurial-0.1')]
418 ('Accept', 'application/mercurial-0.1')]
401
419
402 q = {"cmd": 'between'}
420 q = {"cmd": 'between'}
403 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
421 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
404 qs = '?%s' % urllib.urlencode(q)
422 qs = '?%s' % urllib.urlencode(q)
405 cu = "%s%s" % (test_uri, qs)
423 cu = "%s%s" % (test_uri, qs)
406 req = urllib2.Request(cu, None, {})
424 req = urllib2.Request(cu, None, {})
407
425
408 try:
426 try:
409 log.debug("Trying to open URL %s", cleaned_uri)
427 log.debug("Trying to open URL %s", cleaned_uri)
410 resp = o.open(req)
428 resp = o.open(req)
411 if resp.code != 200:
429 if resp.code != 200:
412 raise exceptions.URLError()('Return Code is not 200')
430 raise exceptions.URLError()('Return Code is not 200')
413 except Exception as e:
431 except Exception as e:
414 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
432 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
415 # means it cannot be cloned
433 # means it cannot be cloned
416 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
434 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
417
435
418 # now check if it's a proper hg repo, but don't do it for svn
436 # now check if it's a proper hg repo, but don't do it for svn
419 try:
437 try:
420 if _proto == 'svn':
438 if _proto == 'svn':
421 pass
439 pass
422 else:
440 else:
423 # check for pure hg repos
441 # check for pure hg repos
424 log.debug(
442 log.debug(
425 "Verifying if URL is a Mercurial repository: %s",
443 "Verifying if URL is a Mercurial repository: %s",
426 cleaned_uri)
444 cleaned_uri)
427 ui = make_ui_from_config(config)
445 ui = make_ui_from_config(config)
428 peer_checker = makepeer(ui, url)
446 peer_checker = makepeer(ui, url)
429 peer_checker.lookup('tip')
447 peer_checker.lookup('tip')
430 except Exception as e:
448 except Exception as e:
431 log.warning("URL is not a valid Mercurial repository: %s",
449 log.warning("URL is not a valid Mercurial repository: %s",
432 cleaned_uri)
450 cleaned_uri)
433 raise exceptions.URLError(e)(
451 raise exceptions.URLError(e)(
434 "url [%s] does not look like an hg repo org_exc: %s"
452 "url [%s] does not look like an hg repo org_exc: %s"
435 % (cleaned_uri, e))
453 % (cleaned_uri, e))
436
454
437 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
455 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
438 return True
456 return True
439
457
440 @reraise_safe_exceptions
458 @reraise_safe_exceptions
441 def diff(
459 def diff(
442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
460 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
443 context):
461 context):
444 repo = self._factory.repo(wire)
462 repo = self._factory.repo(wire)
445
463
446 if file_filter:
464 if file_filter:
447 match_filter = match(file_filter[0], '', [file_filter[1]])
465 match_filter = match(file_filter[0], '', [file_filter[1]])
448 else:
466 else:
449 match_filter = file_filter
467 match_filter = file_filter
450 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
468 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
451
469
452 try:
470 try:
453 return "".join(patch.diff(
471 return "".join(patch.diff(
454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
472 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
455 except RepoLookupError as e:
473 except RepoLookupError as e:
456 raise exceptions.LookupException(e)()
474 raise exceptions.LookupException(e)()
457
475
458 @reraise_safe_exceptions
476 @reraise_safe_exceptions
459 def node_history(self, wire, revision, path, limit):
477 def node_history(self, wire, revision, path, limit):
460 repo = self._factory.repo(wire)
478 repo = self._factory.repo(wire)
461
479
462 ctx = self._get_ctx(repo, revision)
480 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
481 fctx = ctx.filectx(path)
464
482
465 def history_iter():
483 def history_iter():
466 limit_rev = fctx.rev()
484 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
485 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
486 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
487 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
488 if ctx.hidden() or ctx.obsolete():
471 continue
489 continue
472
490
473 if limit_rev >= obj.rev():
491 if limit_rev >= obj.rev():
474 yield obj
492 yield obj
475
493
476 history = []
494 history = []
477 for cnt, obj in enumerate(history_iter()):
495 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
496 if limit and cnt >= limit:
479 break
497 break
480 history.append(hex(obj.node()))
498 history.append(hex(obj.node()))
481
499
482 return [x for x in history]
500 return [x for x in history]
483
501
484 @reraise_safe_exceptions
502 @reraise_safe_exceptions
485 def node_history_untill(self, wire, revision, path, limit):
503 def node_history_untill(self, wire, revision, path, limit):
486 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
487 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
488 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
489
507
490 file_log = list(fctx.filelog())
508 file_log = list(fctx.filelog())
491 if limit:
509 if limit:
492 # Limit to the last n items
510 # Limit to the last n items
493 file_log = file_log[-limit:]
511 file_log = file_log[-limit:]
494
512
495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
496
514
497 @reraise_safe_exceptions
515 @reraise_safe_exceptions
498 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
499 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
500 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
501 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
502
520
503 result = []
521 result = []
504 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
505 ln_no = i
523 ln_no = i
506 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
507 content = annotate_obj.text
525 content = annotate_obj.text
508 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
509 return result
527 return result
510
528
511 @reraise_safe_exceptions
529 @reraise_safe_exceptions
512 def fctx_data(self, wire, revision, path):
530 def fctx_data(self, wire, revision, path):
513 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
514 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
515 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
516 return fctx.data()
534 return fctx.data()
517
535
518 @reraise_safe_exceptions
536 @reraise_safe_exceptions
519 def fctx_flags(self, wire, revision, path):
537 def fctx_flags(self, wire, revision, path):
520 repo = self._factory.repo(wire)
538 repo = self._factory.repo(wire)
521 ctx = self._get_ctx(repo, revision)
539 ctx = self._get_ctx(repo, revision)
522 fctx = ctx.filectx(path)
540 fctx = ctx.filectx(path)
523 return fctx.flags()
541 return fctx.flags()
524
542
525 @reraise_safe_exceptions
543 @reraise_safe_exceptions
526 def fctx_size(self, wire, revision, path):
544 def fctx_size(self, wire, revision, path):
527 repo = self._factory.repo(wire)
545 repo = self._factory.repo(wire)
528 ctx = self._get_ctx(repo, revision)
546 ctx = self._get_ctx(repo, revision)
529 fctx = ctx.filectx(path)
547 fctx = ctx.filectx(path)
530 return fctx.size()
548 return fctx.size()
531
549
532 @reraise_safe_exceptions
550 @reraise_safe_exceptions
533 def get_all_commit_ids(self, wire, name):
551 def get_all_commit_ids(self, wire, name):
534 repo = self._factory.repo(wire)
552 repo = self._factory.repo(wire)
535 repo = repo.filtered(name)
553 repo = repo.filtered(name)
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 revs = map(lambda x: hex(x[7]), repo.changelog.index)
537 return revs
555 return revs
538
556
539 @reraise_safe_exceptions
557 @reraise_safe_exceptions
540 def get_config_value(self, wire, section, name, untrusted=False):
558 def get_config_value(self, wire, section, name, untrusted=False):
541 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
542 return repo.ui.config(section, name, untrusted=untrusted)
560 return repo.ui.config(section, name, untrusted=untrusted)
543
561
544 @reraise_safe_exceptions
562 @reraise_safe_exceptions
545 def get_config_bool(self, wire, section, name, untrusted=False):
563 def get_config_bool(self, wire, section, name, untrusted=False):
546 repo = self._factory.repo(wire)
564 repo = self._factory.repo(wire)
547 return repo.ui.configbool(section, name, untrusted=untrusted)
565 return repo.ui.configbool(section, name, untrusted=untrusted)
548
566
549 @reraise_safe_exceptions
567 @reraise_safe_exceptions
550 def get_config_list(self, wire, section, name, untrusted=False):
568 def get_config_list(self, wire, section, name, untrusted=False):
551 repo = self._factory.repo(wire)
569 repo = self._factory.repo(wire)
552 return repo.ui.configlist(section, name, untrusted=untrusted)
570 return repo.ui.configlist(section, name, untrusted=untrusted)
553
571
554 @reraise_safe_exceptions
572 @reraise_safe_exceptions
555 def is_large_file(self, wire, path):
573 def is_large_file(self, wire, path):
556 return largefiles.lfutil.isstandin(path)
574 return largefiles.lfutil.isstandin(path)
557
575
558 @reraise_safe_exceptions
576 @reraise_safe_exceptions
559 def in_largefiles_store(self, wire, sha):
577 def in_largefiles_store(self, wire, sha):
560 repo = self._factory.repo(wire)
578 repo = self._factory.repo(wire)
561 return largefiles.lfutil.instore(repo, sha)
579 return largefiles.lfutil.instore(repo, sha)
562
580
563 @reraise_safe_exceptions
581 @reraise_safe_exceptions
564 def in_user_cache(self, wire, sha):
582 def in_user_cache(self, wire, sha):
565 repo = self._factory.repo(wire)
583 repo = self._factory.repo(wire)
566 return largefiles.lfutil.inusercache(repo.ui, sha)
584 return largefiles.lfutil.inusercache(repo.ui, sha)
567
585
568 @reraise_safe_exceptions
586 @reraise_safe_exceptions
569 def store_path(self, wire, sha):
587 def store_path(self, wire, sha):
570 repo = self._factory.repo(wire)
588 repo = self._factory.repo(wire)
571 return largefiles.lfutil.storepath(repo, sha)
589 return largefiles.lfutil.storepath(repo, sha)
572
590
573 @reraise_safe_exceptions
591 @reraise_safe_exceptions
574 def link(self, wire, sha, path):
592 def link(self, wire, sha, path):
575 repo = self._factory.repo(wire)
593 repo = self._factory.repo(wire)
576 largefiles.lfutil.link(
594 largefiles.lfutil.link(
577 largefiles.lfutil.usercachepath(repo.ui, sha), path)
595 largefiles.lfutil.usercachepath(repo.ui, sha), path)
578
596
579 @reraise_safe_exceptions
597 @reraise_safe_exceptions
580 def localrepository(self, wire, create=False):
598 def localrepository(self, wire, create=False):
581 self._factory.repo(wire, create=create)
599 self._factory.repo(wire, create=create)
582
600
583 @reraise_safe_exceptions
601 @reraise_safe_exceptions
584 def lookup(self, wire, revision, both):
602 def lookup(self, wire, revision, both):
585
603
586 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
587
605
588 if isinstance(revision, int):
606 if isinstance(revision, int):
589 # NOTE(marcink):
607 # NOTE(marcink):
590 # since Mercurial doesn't support negative indexes properly
608 # since Mercurial doesn't support negative indexes properly
591 # we need to shift accordingly by one to get proper index, e.g
609 # we need to shift accordingly by one to get proper index, e.g
592 # repo[-1] => repo[-2]
610 # repo[-1] => repo[-2]
593 # repo[0] => repo[-1]
611 # repo[0] => repo[-1]
594 if revision <= 0:
612 if revision <= 0:
595 revision = revision + -1
613 revision = revision + -1
596 try:
614 try:
597 ctx = self._get_ctx(repo, revision)
615 ctx = self._get_ctx(repo, revision)
598 except (TypeError, RepoLookupError) as e:
616 except (TypeError, RepoLookupError) as e:
599 e._org_exc_tb = traceback.format_exc()
617 e._org_exc_tb = traceback.format_exc()
600 raise exceptions.LookupException(e)(revision)
618 raise exceptions.LookupException(e)(revision)
601 except LookupError as e:
619 except LookupError as e:
602 e._org_exc_tb = traceback.format_exc()
620 e._org_exc_tb = traceback.format_exc()
603 raise exceptions.LookupException(e)(e.name)
621 raise exceptions.LookupException(e)(e.name)
604
622
605 if not both:
623 if not both:
606 return ctx.hex()
624 return ctx.hex()
607
625
608 ctx = repo[ctx.hex()]
626 ctx = repo[ctx.hex()]
609 return ctx.hex(), ctx.rev()
627 return ctx.hex(), ctx.rev()
610
628
611 @reraise_safe_exceptions
629 @reraise_safe_exceptions
612 def pull(self, wire, url, commit_ids=None):
630 def pull(self, wire, url, commit_ids=None):
613 repo = self._factory.repo(wire)
631 repo = self._factory.repo(wire)
614 # Disable any prompts for this repo
632 # Disable any prompts for this repo
615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
633 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
616
634
617 remote = peer(repo, {}, url)
635 remote = peer(repo, {}, url)
618 # Disable any prompts for this remote
636 # Disable any prompts for this remote
619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
637 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
620
638
621 if commit_ids:
639 if commit_ids:
622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
640 commit_ids = [bin(commit_id) for commit_id in commit_ids]
623
641
624 return exchange.pull(
642 return exchange.pull(
625 repo, remote, heads=commit_ids, force=None).cgresult
643 repo, remote, heads=commit_ids, force=None).cgresult
626
644
627 @reraise_safe_exceptions
645 @reraise_safe_exceptions
628 def sync_push(self, wire, url):
646 def sync_push(self, wire, url):
629 if not self.check_url(url, wire['config']):
647 if not self.check_url(url, wire['config']):
630 return
648 return
631
649
632 repo = self._factory.repo(wire)
650 repo = self._factory.repo(wire)
633
651
634 # Disable any prompts for this repo
652 # Disable any prompts for this repo
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
653 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636
654
637 bookmarks = dict(repo._bookmarks).keys()
655 bookmarks = dict(repo._bookmarks).keys()
638 remote = peer(repo, {}, url)
656 remote = peer(repo, {}, url)
639 # Disable any prompts for this remote
657 # Disable any prompts for this remote
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
658 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641
659
642 return exchange.push(
660 return exchange.push(
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
661 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644
662
645 @reraise_safe_exceptions
663 @reraise_safe_exceptions
646 def revision(self, wire, rev):
664 def revision(self, wire, rev):
647 repo = self._factory.repo(wire)
665 repo = self._factory.repo(wire)
648 ctx = self._get_ctx(repo, rev)
666 ctx = self._get_ctx(repo, rev)
649 return ctx.rev()
667 return ctx.rev()
650
668
651 @reraise_safe_exceptions
669 @reraise_safe_exceptions
652 def rev_range(self, wire, filter):
670 def rev_range(self, wire, filter):
653 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
654 revisions = [rev for rev in revrange(repo, filter)]
672 revisions = [rev for rev in revrange(repo, filter)]
655 return revisions
673 return revisions
656
674
657 @reraise_safe_exceptions
675 @reraise_safe_exceptions
658 def rev_range_hash(self, wire, node):
676 def rev_range_hash(self, wire, node):
659 repo = self._factory.repo(wire)
677 repo = self._factory.repo(wire)
660
678
661 def get_revs(repo, rev_opt):
679 def get_revs(repo, rev_opt):
662 if rev_opt:
680 if rev_opt:
663 revs = revrange(repo, rev_opt)
681 revs = revrange(repo, rev_opt)
664 if len(revs) == 0:
682 if len(revs) == 0:
665 return (nullrev, nullrev)
683 return (nullrev, nullrev)
666 return max(revs), min(revs)
684 return max(revs), min(revs)
667 else:
685 else:
668 return len(repo) - 1, 0
686 return len(repo) - 1, 0
669
687
670 stop, start = get_revs(repo, [node + ':'])
688 stop, start = get_revs(repo, [node + ':'])
671 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
689 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
672 return revs
690 return revs
673
691
674 @reraise_safe_exceptions
692 @reraise_safe_exceptions
675 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
693 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
676 other_path = kwargs.pop('other_path', None)
694 other_path = kwargs.pop('other_path', None)
677
695
678 # case when we want to compare two independent repositories
696 # case when we want to compare two independent repositories
679 if other_path and other_path != wire["path"]:
697 if other_path and other_path != wire["path"]:
680 baseui = self._factory._create_config(wire["config"])
698 baseui = self._factory._create_config(wire["config"])
681 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
699 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
682 else:
700 else:
683 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
684 return list(repo.revs(rev_spec, *args))
702 return list(repo.revs(rev_spec, *args))
685
703
686 @reraise_safe_exceptions
704 @reraise_safe_exceptions
687 def strip(self, wire, revision, update, backup):
705 def strip(self, wire, revision, update, backup):
688 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
689 ctx = self._get_ctx(repo, revision)
707 ctx = self._get_ctx(repo, revision)
690 hgext_strip(
708 hgext_strip(
691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
709 repo.baseui, repo, ctx.node(), update=update, backup=backup)
692
710
693 @reraise_safe_exceptions
711 @reraise_safe_exceptions
694 def verify(self, wire,):
712 def verify(self, wire,):
695 repo = self._factory.repo(wire)
713 repo = self._factory.repo(wire)
696 baseui = self._factory._create_config(wire['config'])
714 baseui = self._factory._create_config(wire['config'])
697 baseui.setconfig('ui', 'quiet', 'false')
715 baseui.setconfig('ui', 'quiet', 'false')
698 output = io.BytesIO()
716 output = io.BytesIO()
699
717
700 def write(data, **unused_kwargs):
718 def write(data, **unused_kwargs):
701 output.write(data)
719 output.write(data)
702 baseui.write = write
720 baseui.write = write
703
721
704 repo.ui = baseui
722 repo.ui = baseui
705 verify.verify(repo)
723 verify.verify(repo)
706 return output.getvalue()
724 return output.getvalue()
707
725
708 @reraise_safe_exceptions
726 @reraise_safe_exceptions
709 def tag(self, wire, name, revision, message, local, user,
727 def tag(self, wire, name, revision, message, local, user,
710 tag_time, tag_timezone):
728 tag_time, tag_timezone):
711 repo = self._factory.repo(wire)
729 repo = self._factory.repo(wire)
712 ctx = self._get_ctx(repo, revision)
730 ctx = self._get_ctx(repo, revision)
713 node = ctx.node()
731 node = ctx.node()
714
732
715 date = (tag_time, tag_timezone)
733 date = (tag_time, tag_timezone)
716 try:
734 try:
717 hg_tag.tag(repo, name, node, message, local, user, date)
735 hg_tag.tag(repo, name, node, message, local, user, date)
718 except Abort as e:
736 except Abort as e:
719 log.exception("Tag operation aborted")
737 log.exception("Tag operation aborted")
720 # Exception can contain unicode which we convert
738 # Exception can contain unicode which we convert
721 raise exceptions.AbortException(e)(repr(e))
739 raise exceptions.AbortException(e)(repr(e))
722
740
723 @reraise_safe_exceptions
741 @reraise_safe_exceptions
724 def tags(self, wire):
742 def tags(self, wire):
725 repo = self._factory.repo(wire)
743 repo = self._factory.repo(wire)
726 return repo.tags()
744 return repo.tags()
727
745
728 @reraise_safe_exceptions
746 @reraise_safe_exceptions
729 def update(self, wire, node=None, clean=False):
747 def update(self, wire, node=None, clean=False):
730 repo = self._factory.repo(wire)
748 repo = self._factory.repo(wire)
731 baseui = self._factory._create_config(wire['config'])
749 baseui = self._factory._create_config(wire['config'])
732 commands.update(baseui, repo, node=node, clean=clean)
750 commands.update(baseui, repo, node=node, clean=clean)
733
751
734 @reraise_safe_exceptions
752 @reraise_safe_exceptions
735 def identify(self, wire):
753 def identify(self, wire):
736 repo = self._factory.repo(wire)
754 repo = self._factory.repo(wire)
737 baseui = self._factory._create_config(wire['config'])
755 baseui = self._factory._create_config(wire['config'])
738 output = io.BytesIO()
756 output = io.BytesIO()
739 baseui.write = output.write
757 baseui.write = output.write
740 # This is required to get a full node id
758 # This is required to get a full node id
741 baseui.debugflag = True
759 baseui.debugflag = True
742 commands.identify(baseui, repo, id=True)
760 commands.identify(baseui, repo, id=True)
743
761
744 return output.getvalue()
762 return output.getvalue()
745
763
746 @reraise_safe_exceptions
764 @reraise_safe_exceptions
747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
765 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
748 hooks=True):
766 hooks=True):
749 repo = self._factory.repo(wire)
767 repo = self._factory.repo(wire)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
768 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751
769
752 # Mercurial internally has a lot of logic that checks ONLY if
770 # Mercurial internally has a lot of logic that checks ONLY if
753 # option is defined, we just pass those if they are defined then
771 # option is defined, we just pass those if they are defined then
754 opts = {}
772 opts = {}
755 if bookmark:
773 if bookmark:
756 opts['bookmark'] = bookmark
774 opts['bookmark'] = bookmark
757 if branch:
775 if branch:
758 opts['branch'] = branch
776 opts['branch'] = branch
759 if revision:
777 if revision:
760 opts['rev'] = revision
778 opts['rev'] = revision
761
779
762 commands.pull(baseui, repo, source, **opts)
780 commands.pull(baseui, repo, source, **opts)
763
781
764 @reraise_safe_exceptions
782 @reraise_safe_exceptions
765 def heads(self, wire, branch=None):
783 def heads(self, wire, branch=None):
766 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
767 baseui = self._factory._create_config(wire['config'])
785 baseui = self._factory._create_config(wire['config'])
768 output = io.BytesIO()
786 output = io.BytesIO()
769
787
770 def write(data, **unused_kwargs):
788 def write(data, **unused_kwargs):
771 output.write(data)
789 output.write(data)
772
790
773 baseui.write = write
791 baseui.write = write
774 if branch:
792 if branch:
775 args = [branch]
793 args = [branch]
776 else:
794 else:
777 args = []
795 args = []
778 commands.heads(baseui, repo, template='{node} ', *args)
796 commands.heads(baseui, repo, template='{node} ', *args)
779
797
780 return output.getvalue()
798 return output.getvalue()
781
799
782 @reraise_safe_exceptions
800 @reraise_safe_exceptions
783 def ancestor(self, wire, revision1, revision2):
801 def ancestor(self, wire, revision1, revision2):
784 repo = self._factory.repo(wire)
802 repo = self._factory.repo(wire)
785 changelog = repo.changelog
803 changelog = repo.changelog
786 lookup = repo.lookup
804 lookup = repo.lookup
787 a = changelog.ancestor(lookup(revision1), lookup(revision2))
805 a = changelog.ancestor(lookup(revision1), lookup(revision2))
788 return hex(a)
806 return hex(a)
789
807
790 @reraise_safe_exceptions
808 @reraise_safe_exceptions
791 def push(self, wire, revisions, dest_path, hooks=True,
809 def push(self, wire, revisions, dest_path, hooks=True,
792 push_branches=False):
810 push_branches=False):
793 repo = self._factory.repo(wire)
811 repo = self._factory.repo(wire)
794 baseui = self._factory._create_config(wire['config'], hooks=hooks)
812 baseui = self._factory._create_config(wire['config'], hooks=hooks)
795 commands.push(baseui, repo, dest=dest_path, rev=revisions,
813 commands.push(baseui, repo, dest=dest_path, rev=revisions,
796 new_branch=push_branches)
814 new_branch=push_branches)
797
815
798 @reraise_safe_exceptions
816 @reraise_safe_exceptions
799 def merge(self, wire, revision):
817 def merge(self, wire, revision):
800 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
801 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
802 repo.ui.setconfig('ui', 'merge', 'internal:dump')
820 repo.ui.setconfig('ui', 'merge', 'internal:dump')
803
821
804 # In case of sub repositories are used mercurial prompts the user in
822 # In case of sub repositories are used mercurial prompts the user in
805 # case of merge conflicts or different sub repository sources. By
823 # case of merge conflicts or different sub repository sources. By
806 # setting the interactive flag to `False` mercurial doesn't prompt the
824 # setting the interactive flag to `False` mercurial doesn't prompt the
807 # used but instead uses a default value.
825 # used but instead uses a default value.
808 repo.ui.setconfig('ui', 'interactive', False)
826 repo.ui.setconfig('ui', 'interactive', False)
809 commands.merge(baseui, repo, rev=revision)
827 commands.merge(baseui, repo, rev=revision)
810
828
811 @reraise_safe_exceptions
829 @reraise_safe_exceptions
812 def merge_state(self, wire):
830 def merge_state(self, wire):
813 repo = self._factory.repo(wire)
831 repo = self._factory.repo(wire)
814 repo.ui.setconfig('ui', 'merge', 'internal:dump')
832 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815
833
816 # In case of sub repositories are used mercurial prompts the user in
834 # In case of sub repositories are used mercurial prompts the user in
817 # case of merge conflicts or different sub repository sources. By
835 # case of merge conflicts or different sub repository sources. By
818 # setting the interactive flag to `False` mercurial doesn't prompt the
836 # setting the interactive flag to `False` mercurial doesn't prompt the
819 # used but instead uses a default value.
837 # used but instead uses a default value.
820 repo.ui.setconfig('ui', 'interactive', False)
838 repo.ui.setconfig('ui', 'interactive', False)
821 ms = hg_merge.mergestate(repo)
839 ms = hg_merge.mergestate(repo)
822 return [x for x in ms.unresolved()]
840 return [x for x in ms.unresolved()]
823
841
824 @reraise_safe_exceptions
842 @reraise_safe_exceptions
825 def commit(self, wire, message, username, close_branch=False):
843 def commit(self, wire, message, username, close_branch=False):
826 repo = self._factory.repo(wire)
844 repo = self._factory.repo(wire)
827 baseui = self._factory._create_config(wire['config'])
845 baseui = self._factory._create_config(wire['config'])
828 repo.ui.setconfig('ui', 'username', username)
846 repo.ui.setconfig('ui', 'username', username)
829 commands.commit(baseui, repo, message=message, close_branch=close_branch)
847 commands.commit(baseui, repo, message=message, close_branch=close_branch)
830
848
831
849
832 @reraise_safe_exceptions
850 @reraise_safe_exceptions
833 def rebase(self, wire, source=None, dest=None, abort=False):
851 def rebase(self, wire, source=None, dest=None, abort=False):
834 repo = self._factory.repo(wire)
852 repo = self._factory.repo(wire)
835 baseui = self._factory._create_config(wire['config'])
853 baseui = self._factory._create_config(wire['config'])
836 repo.ui.setconfig('ui', 'merge', 'internal:dump')
854 repo.ui.setconfig('ui', 'merge', 'internal:dump')
837 rebase.rebase(
855 rebase.rebase(
838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
856 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
839
857
840 @reraise_safe_exceptions
858 @reraise_safe_exceptions
841 def bookmark(self, wire, bookmark, revision=None):
859 def bookmark(self, wire, bookmark, revision=None):
842 repo = self._factory.repo(wire)
860 repo = self._factory.repo(wire)
843 baseui = self._factory._create_config(wire['config'])
861 baseui = self._factory._create_config(wire['config'])
844 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
862 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
845
863
846 @reraise_safe_exceptions
864 @reraise_safe_exceptions
847 def install_hooks(self, wire, force=False):
865 def install_hooks(self, wire, force=False):
848 # we don't need any special hooks for Mercurial
866 # we don't need any special hooks for Mercurial
849 pass
867 pass
850
868
851 @reraise_safe_exceptions
869 @reraise_safe_exceptions
852 def get_hooks_info(self, wire):
870 def get_hooks_info(self, wire):
853 return {
871 return {
854 'pre_version': vcsserver.__version__,
872 'pre_version': vcsserver.__version__,
855 'post_version': vcsserver.__version__,
873 'post_version': vcsserver.__version__,
856 }
874 }
@@ -1,610 +1,611 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 from itertools import chain
27 from itertools import chain
28
28
29 import simplejson as json
29 import simplejson as json
30 import msgpack
30 import msgpack
31 from pyramid.config import Configurator
31 from pyramid.config import Configurator
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.compat import configparser
34 from pyramid.compat import configparser
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41
41
42 try:
42 try:
43 locale.setlocale(locale.LC_ALL, '')
43 locale.setlocale(locale.LC_ALL, '')
44 except locale.Error as e:
44 except locale.Error as e:
45 log.error(
45 log.error(
46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 os.environ['LC_ALL'] = 'C'
47 os.environ['LC_ALL'] = 'C'
48
48
49 import vcsserver
49 import vcsserver
50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 from vcsserver.echo_stub.echo_app import EchoApp
53 from vcsserver.echo_stub.echo_app import EchoApp
54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 from vcsserver.lib.exc_tracking import store_exception
55 from vcsserver.lib.exc_tracking import store_exception
56 from vcsserver.server import VcsServer
56 from vcsserver.server import VcsServer
57
57
58 try:
58 try:
59 from vcsserver.git import GitFactory, GitRemote
59 from vcsserver.git import GitFactory, GitRemote
60 except ImportError:
60 except ImportError:
61 GitFactory = None
61 GitFactory = None
62 GitRemote = None
62 GitRemote = None
63
63
64 try:
64 try:
65 from vcsserver.hg import MercurialFactory, HgRemote
65 from vcsserver.hg import MercurialFactory, HgRemote
66 except ImportError:
66 except ImportError:
67 MercurialFactory = None
67 MercurialFactory = None
68 HgRemote = None
68 HgRemote = None
69
69
70 try:
70 try:
71 from vcsserver.svn import SubversionFactory, SvnRemote
71 from vcsserver.svn import SubversionFactory, SvnRemote
72 except ImportError:
72 except ImportError:
73 SubversionFactory = None
73 SubversionFactory = None
74 SvnRemote = None
74 SvnRemote = None
75
75
76
76
77 def _is_request_chunked(environ):
77 def _is_request_chunked(environ):
78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 return stream
79 return stream
80
80
81
81
82 def _int_setting(settings, name, default):
82 def _int_setting(settings, name, default):
83 settings[name] = int(settings.get(name, default))
83 settings[name] = int(settings.get(name, default))
84 return settings[name]
84 return settings[name]
85
85
86
86
87 def _bool_setting(settings, name, default):
87 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
88 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
89 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
90 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
91 settings[name] = asbool(input_val)
92 return settings[name]
92 return settings[name]
93
93
94
94
95 def _list_setting(settings, name, default):
95 def _list_setting(settings, name, default):
96 raw_value = settings.get(name, default)
96 raw_value = settings.get(name, default)
97
97
98 # Otherwise we assume it uses pyramids space/newline separation.
98 # Otherwise we assume it uses pyramids space/newline separation.
99 settings[name] = aslist(raw_value)
99 settings[name] = aslist(raw_value)
100 return settings[name]
100 return settings[name]
101
101
102
102
103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 value = settings.get(name, default)
104 value = settings.get(name, default)
105
105
106 if default_when_empty and not value:
106 if default_when_empty and not value:
107 # use default value when value is empty
107 # use default value when value is empty
108 value = default
108 value = default
109
109
110 if lower:
110 if lower:
111 value = value.lower()
111 value = value.lower()
112 settings[name] = value
112 settings[name] = value
113 return settings[name]
113 return settings[name]
114
114
115
115
116 class VCS(object):
116 class VCS(object):
117 def __init__(self, locale=None, cache_config=None):
117 def __init__(self, locale=None, cache_config=None):
118 self.locale = locale
118 self.locale = locale
119 self.cache_config = cache_config
119 self.cache_config = cache_config
120 self._configure_locale()
120 self._configure_locale()
121
121
122 if GitFactory and GitRemote:
122 if GitFactory and GitRemote:
123 git_factory = GitFactory()
123 git_factory = GitFactory()
124 self._git_remote = GitRemote(git_factory)
124 self._git_remote = GitRemote(git_factory)
125 else:
125 else:
126 log.info("Git client import failed")
126 log.info("Git client import failed")
127
127
128 if MercurialFactory and HgRemote:
128 if MercurialFactory and HgRemote:
129 hg_factory = MercurialFactory()
129 hg_factory = MercurialFactory()
130 self._hg_remote = HgRemote(hg_factory)
130 self._hg_remote = HgRemote(hg_factory)
131 else:
131 else:
132 log.info("Mercurial client import failed")
132 log.info("Mercurial client import failed")
133
133
134 if SubversionFactory and SvnRemote:
134 if SubversionFactory and SvnRemote:
135 svn_factory = SubversionFactory()
135 svn_factory = SubversionFactory()
136
136
137 # hg factory is used for svn url validation
137 # hg factory is used for svn url validation
138 hg_factory = MercurialFactory()
138 hg_factory = MercurialFactory()
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 else:
140 else:
141 log.info("Subversion client import failed")
141 log.info("Subversion client import failed")
142
142
143 self._vcsserver = VcsServer()
143 self._vcsserver = VcsServer()
144
144
145 def _configure_locale(self):
145 def _configure_locale(self):
146 if self.locale:
146 if self.locale:
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 else:
148 else:
149 log.info(
149 log.info(
150 'Configuring locale subsystem based on environment variables')
150 'Configuring locale subsystem based on environment variables')
151 try:
151 try:
152 # If self.locale is the empty string, then the locale
152 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
153 # module will use the environment variables. See the
154 # documentation of the package `locale`.
154 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
155 locale.setlocale(locale.LC_ALL, self.locale)
156
156
157 language_code, encoding = locale.getlocale()
157 language_code, encoding = locale.getlocale()
158 log.info(
158 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
159 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
160 language_code, encoding)
161 except locale.Error:
161 except locale.Error:
162 log.exception(
162 log.exception(
163 'Cannot set locale, not configuring the locale system')
163 'Cannot set locale, not configuring the locale system')
164
164
165
165
166 class WsgiProxy(object):
166 class WsgiProxy(object):
167 def __init__(self, wsgi):
167 def __init__(self, wsgi):
168 self.wsgi = wsgi
168 self.wsgi = wsgi
169
169
170 def __call__(self, environ, start_response):
170 def __call__(self, environ, start_response):
171 input_data = environ['wsgi.input'].read()
171 input_data = environ['wsgi.input'].read()
172 input_data = msgpack.unpackb(input_data)
172 input_data = msgpack.unpackb(input_data)
173
173
174 error = None
174 error = None
175 try:
175 try:
176 data, status, headers = self.wsgi.handle(
176 data, status, headers = self.wsgi.handle(
177 input_data['environment'], input_data['input_data'],
177 input_data['environment'], input_data['input_data'],
178 *input_data['args'], **input_data['kwargs'])
178 *input_data['args'], **input_data['kwargs'])
179 except Exception as e:
179 except Exception as e:
180 data, status, headers = [], None, None
180 data, status, headers = [], None, None
181 error = {
181 error = {
182 'message': str(e),
182 'message': str(e),
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 }
184 }
185
185
186 start_response(200, {})
186 start_response(200, {})
187 return self._iterator(error, status, headers, data)
187 return self._iterator(error, status, headers, data)
188
188
189 def _iterator(self, error, status, headers, data):
189 def _iterator(self, error, status, headers, data):
190 initial_data = [
190 initial_data = [
191 error,
191 error,
192 status,
192 status,
193 headers,
193 headers,
194 ]
194 ]
195
195
196 for d in chain(initial_data, data):
196 for d in chain(initial_data, data):
197 yield msgpack.packb(d)
197 yield msgpack.packb(d)
198
198
199
199
200 def not_found(request):
200 def not_found(request):
201 return {'status': '404 NOT FOUND'}
201 return {'status': '404 NOT FOUND'}
202
202
203
203
204 class VCSViewPredicate(object):
204 class VCSViewPredicate(object):
205 def __init__(self, val, config):
205 def __init__(self, val, config):
206 self.remotes = val
206 self.remotes = val
207
207
208 def text(self):
208 def text(self):
209 return 'vcs view method = %s' % (self.remotes.keys(),)
209 return 'vcs view method = %s' % (self.remotes.keys(),)
210
210
211 phash = text
211 phash = text
212
212
213 def __call__(self, context, request):
213 def __call__(self, context, request):
214 """
214 """
215 View predicate that returns true if given backend is supported by
215 View predicate that returns true if given backend is supported by
216 defined remotes.
216 defined remotes.
217 """
217 """
218 backend = request.matchdict.get('backend')
218 backend = request.matchdict.get('backend')
219 return backend in self.remotes
219 return backend in self.remotes
220
220
221
221
222 class HTTPApplication(object):
222 class HTTPApplication(object):
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224
224
225 remote_wsgi = remote_wsgi
225 remote_wsgi = remote_wsgi
226 _use_echo_app = False
226 _use_echo_app = False
227
227
228 def __init__(self, settings=None, global_config=None):
228 def __init__(self, settings=None, global_config=None):
229 self._sanitize_settings_and_apply_defaults(settings)
229 self._sanitize_settings_and_apply_defaults(settings)
230
230
231 self.config = Configurator(settings=settings)
231 self.config = Configurator(settings=settings)
232 self.global_config = global_config
232 self.global_config = global_config
233 self.config.include('vcsserver.lib.rc_cache')
233 self.config.include('vcsserver.lib.rc_cache')
234
234
235 locale = settings.get('locale', '') or 'en_US.UTF-8'
235 locale = settings.get('locale', '') or 'en_US.UTF-8'
236 vcs = VCS(locale=locale, cache_config=settings)
236 vcs = VCS(locale=locale, cache_config=settings)
237 self._remotes = {
237 self._remotes = {
238 'hg': vcs._hg_remote,
238 'hg': vcs._hg_remote,
239 'git': vcs._git_remote,
239 'git': vcs._git_remote,
240 'svn': vcs._svn_remote,
240 'svn': vcs._svn_remote,
241 'server': vcs._vcsserver,
241 'server': vcs._vcsserver,
242 }
242 }
243 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
243 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
244 self._use_echo_app = True
244 self._use_echo_app = True
245 log.warning("Using EchoApp for VCS operations.")
245 log.warning("Using EchoApp for VCS operations.")
246 self.remote_wsgi = remote_wsgi_stub
246 self.remote_wsgi = remote_wsgi_stub
247
247
248 self._configure_settings(global_config, settings)
248 self._configure_settings(global_config, settings)
249 self._configure()
249 self._configure()
250
250
251 def _configure_settings(self, global_config, app_settings):
251 def _configure_settings(self, global_config, app_settings):
252 """
252 """
253 Configure the settings module.
253 Configure the settings module.
254 """
254 """
255 settings_merged = global_config.copy()
255 settings_merged = global_config.copy()
256 settings_merged.update(app_settings)
256 settings_merged.update(app_settings)
257
257
258 git_path = app_settings.get('git_path', None)
258 git_path = app_settings.get('git_path', None)
259 if git_path:
259 if git_path:
260 settings.GIT_EXECUTABLE = git_path
260 settings.GIT_EXECUTABLE = git_path
261 binary_dir = app_settings.get('core.binary_dir', None)
261 binary_dir = app_settings.get('core.binary_dir', None)
262 if binary_dir:
262 if binary_dir:
263 settings.BINARY_DIR = binary_dir
263 settings.BINARY_DIR = binary_dir
264
264
265 # Store the settings to make them available to other modules.
265 # Store the settings to make them available to other modules.
266 vcsserver.PYRAMID_SETTINGS = settings_merged
266 vcsserver.PYRAMID_SETTINGS = settings_merged
267 vcsserver.CONFIG = settings_merged
267 vcsserver.CONFIG = settings_merged
268
268
269 def _sanitize_settings_and_apply_defaults(self, settings):
269 def _sanitize_settings_and_apply_defaults(self, settings):
270 temp_store = tempfile.gettempdir()
270 temp_store = tempfile.gettempdir()
271 default_cache_dir = os.path.join(temp_store, 'rc_cache')
271 default_cache_dir = os.path.join(temp_store, 'rc_cache')
272
272
273 # save default, cache dir, and use it for all backends later.
273 # save default, cache dir, and use it for all backends later.
274 default_cache_dir = _string_setting(
274 default_cache_dir = _string_setting(
275 settings,
275 settings,
276 'cache_dir',
276 'cache_dir',
277 default_cache_dir, lower=False, default_when_empty=True)
277 default_cache_dir, lower=False, default_when_empty=True)
278
278
279 # ensure we have our dir created
279 # ensure we have our dir created
280 if not os.path.isdir(default_cache_dir):
280 if not os.path.isdir(default_cache_dir):
281 os.makedirs(default_cache_dir, mode=0o755)
281 os.makedirs(default_cache_dir, mode=0o755)
282
282
283 # exception store cache
283 # exception store cache
284 _string_setting(
284 _string_setting(
285 settings,
285 settings,
286 'exception_tracker.store_path',
286 'exception_tracker.store_path',
287 temp_store, lower=False, default_when_empty=True)
287 temp_store, lower=False, default_when_empty=True)
288
288
289 # repo_object cache
289 # repo_object cache
290 _string_setting(
290 _string_setting(
291 settings,
291 settings,
292 'rc_cache.repo_object.backend',
292 'rc_cache.repo_object.backend',
293 'dogpile.cache.rc.memory_lru')
293 'dogpile.cache.rc.memory_lru')
294 _int_setting(
294 _int_setting(
295 settings,
295 settings,
296 'rc_cache.repo_object.expiration_time',
296 'rc_cache.repo_object.expiration_time',
297 300)
297 300)
298 _int_setting(
298 _int_setting(
299 settings,
299 settings,
300 'rc_cache.repo_object.max_size',
300 'rc_cache.repo_object.max_size',
301 1024)
301 1024)
302
302
303 def _configure(self):
303 def _configure(self):
304 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
304 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
305
305
306 self.config.add_route('service', '/_service')
306 self.config.add_route('service', '/_service')
307 self.config.add_route('status', '/status')
307 self.config.add_route('status', '/status')
308 self.config.add_route('hg_proxy', '/proxy/hg')
308 self.config.add_route('hg_proxy', '/proxy/hg')
309 self.config.add_route('git_proxy', '/proxy/git')
309 self.config.add_route('git_proxy', '/proxy/git')
310 self.config.add_route('vcs', '/{backend}')
310 self.config.add_route('vcs', '/{backend}')
311 self.config.add_route('stream_git', '/stream/git/*repo_name')
311 self.config.add_route('stream_git', '/stream/git/*repo_name')
312 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
312 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
313
313
314 self.config.add_view(self.status_view, route_name='status', renderer='json')
314 self.config.add_view(self.status_view, route_name='status', renderer='json')
315 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
315 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
316
316
317 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
317 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
318 self.config.add_view(self.git_proxy(), route_name='git_proxy')
318 self.config.add_view(self.git_proxy(), route_name='git_proxy')
319 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
319 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
320 vcs_view=self._remotes)
320 vcs_view=self._remotes)
321
321
322 self.config.add_view(self.hg_stream(), route_name='stream_hg')
322 self.config.add_view(self.hg_stream(), route_name='stream_hg')
323 self.config.add_view(self.git_stream(), route_name='stream_git')
323 self.config.add_view(self.git_stream(), route_name='stream_git')
324
324
325 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
325 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
326
326
327 self.config.add_notfound_view(not_found, renderer='json')
327 self.config.add_notfound_view(not_found, renderer='json')
328
328
329 self.config.add_view(self.handle_vcs_exception, context=Exception)
329 self.config.add_view(self.handle_vcs_exception, context=Exception)
330
330
331 self.config.add_tween(
331 self.config.add_tween(
332 'vcsserver.tweens.RequestWrapperTween',
332 'vcsserver.tweens.RequestWrapperTween',
333 )
333 )
334
334
335 def wsgi_app(self):
335 def wsgi_app(self):
336 return self.config.make_wsgi_app()
336 return self.config.make_wsgi_app()
337
337
338 def vcs_view(self, request):
338 def vcs_view(self, request):
339 remote = self._remotes[request.matchdict['backend']]
339 remote = self._remotes[request.matchdict['backend']]
340 payload = msgpack.unpackb(request.body, use_list=True)
340 payload = msgpack.unpackb(request.body, use_list=True)
341 method = payload.get('method')
341 method = payload.get('method')
342 params = payload.get('params')
342 params = payload.get('params')
343 wire = params.get('wire')
343 wire = params.get('wire')
344 args = params.get('args')
344 args = params.get('args')
345 kwargs = params.get('kwargs')
345 kwargs = params.get('kwargs')
346 context_uid = None
346 context_uid = None
347
347
348 if wire:
348 if wire:
349 try:
349 try:
350 wire['context'] = context_uid = uuid.UUID(wire['context'])
350 wire['context'] = context_uid = uuid.UUID(wire['context'])
351 except KeyError:
351 except KeyError:
352 pass
352 pass
353 args.insert(0, wire)
353 args.insert(0, wire)
354
354
355 log.debug('method called:%s with kwargs:%s context_uid: %s',
355 log.debug('method called:%s with kwargs:%s context_uid: %s',
356 method, kwargs, context_uid)
356 method, kwargs, context_uid)
357
357 try:
358 try:
358 resp = getattr(remote, method)(*args, **kwargs)
359 resp = getattr(remote, method)(*args, **kwargs)
359 except Exception as e:
360 except Exception as e:
360 exc_info = list(sys.exc_info())
361 exc_info = list(sys.exc_info())
361 exc_type, exc_value, exc_traceback = exc_info
362 exc_type, exc_value, exc_traceback = exc_info
362
363
363 org_exc = getattr(e, '_org_exc', None)
364 org_exc = getattr(e, '_org_exc', None)
364 org_exc_name = None
365 org_exc_name = None
365 org_exc_tb = ''
366 org_exc_tb = ''
366 if org_exc:
367 if org_exc:
367 org_exc_name = org_exc.__class__.__name__
368 org_exc_name = org_exc.__class__.__name__
368 org_exc_tb = getattr(e, '_org_exc_tb', '')
369 org_exc_tb = getattr(e, '_org_exc_tb', '')
369 # replace our "faked" exception with our org
370 # replace our "faked" exception with our org
370 exc_info[0] = org_exc.__class__
371 exc_info[0] = org_exc.__class__
371 exc_info[1] = org_exc
372 exc_info[1] = org_exc
372
373
373 store_exception(id(exc_info), exc_info)
374 store_exception(id(exc_info), exc_info)
374
375
375 tb_info = ''.join(
376 tb_info = ''.join(
376 traceback.format_exception(exc_type, exc_value, exc_traceback))
377 traceback.format_exception(exc_type, exc_value, exc_traceback))
377
378
378 type_ = e.__class__.__name__
379 type_ = e.__class__.__name__
379 if type_ not in self.ALLOWED_EXCEPTIONS:
380 if type_ not in self.ALLOWED_EXCEPTIONS:
380 type_ = None
381 type_ = None
381
382
382 resp = {
383 resp = {
383 'id': payload.get('id'),
384 'id': payload.get('id'),
384 'error': {
385 'error': {
385 'message': e.message,
386 'message': e.message,
386 'traceback': tb_info,
387 'traceback': tb_info,
387 'org_exc': org_exc_name,
388 'org_exc': org_exc_name,
388 'org_exc_tb': org_exc_tb,
389 'org_exc_tb': org_exc_tb,
389 'type': type_
390 'type': type_
390 }
391 }
391 }
392 }
392 try:
393 try:
393 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
394 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
394 except AttributeError:
395 except AttributeError:
395 pass
396 pass
396 else:
397 else:
397 resp = {
398 resp = {
398 'id': payload.get('id'),
399 'id': payload.get('id'),
399 'result': resp
400 'result': resp
400 }
401 }
401
402
402 return resp
403 return resp
403
404
404 def status_view(self, request):
405 def status_view(self, request):
405 import vcsserver
406 import vcsserver
406 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
407 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
407 'pid': os.getpid()}
408 'pid': os.getpid()}
408
409
409 def service_view(self, request):
410 def service_view(self, request):
410 import vcsserver
411 import vcsserver
411
412
412 payload = msgpack.unpackb(request.body, use_list=True)
413 payload = msgpack.unpackb(request.body, use_list=True)
413
414
414 try:
415 try:
415 path = self.global_config['__file__']
416 path = self.global_config['__file__']
416 config = configparser.ConfigParser()
417 config = configparser.ConfigParser()
417 config.read(path)
418 config.read(path)
418 parsed_ini = config
419 parsed_ini = config
419 if parsed_ini.has_section('server:main'):
420 if parsed_ini.has_section('server:main'):
420 parsed_ini = dict(parsed_ini.items('server:main'))
421 parsed_ini = dict(parsed_ini.items('server:main'))
421 except Exception:
422 except Exception:
422 log.exception('Failed to read .ini file for display')
423 log.exception('Failed to read .ini file for display')
423 parsed_ini = {}
424 parsed_ini = {}
424
425
425 resp = {
426 resp = {
426 'id': payload.get('id'),
427 'id': payload.get('id'),
427 'result': dict(
428 'result': dict(
428 version=vcsserver.__version__,
429 version=vcsserver.__version__,
429 config=parsed_ini,
430 config=parsed_ini,
430 payload=payload,
431 payload=payload,
431 )
432 )
432 }
433 }
433 return resp
434 return resp
434
435
435 def _msgpack_renderer_factory(self, info):
436 def _msgpack_renderer_factory(self, info):
436 def _render(value, system):
437 def _render(value, system):
437 value = msgpack.packb(value)
438 value = msgpack.packb(value)
438 request = system.get('request')
439 request = system.get('request')
439 if request is not None:
440 if request is not None:
440 response = request.response
441 response = request.response
441 ct = response.content_type
442 ct = response.content_type
442 if ct == response.default_content_type:
443 if ct == response.default_content_type:
443 response.content_type = 'application/x-msgpack'
444 response.content_type = 'application/x-msgpack'
444 return value
445 return value
445 return _render
446 return _render
446
447
447 def set_env_from_config(self, environ, config):
448 def set_env_from_config(self, environ, config):
448 dict_conf = {}
449 dict_conf = {}
449 try:
450 try:
450 for elem in config:
451 for elem in config:
451 if elem[0] == 'rhodecode':
452 if elem[0] == 'rhodecode':
452 dict_conf = json.loads(elem[2])
453 dict_conf = json.loads(elem[2])
453 break
454 break
454 except Exception:
455 except Exception:
455 log.exception('Failed to fetch SCM CONFIG')
456 log.exception('Failed to fetch SCM CONFIG')
456 return
457 return
457
458
458 username = dict_conf.get('username')
459 username = dict_conf.get('username')
459 if username:
460 if username:
460 environ['REMOTE_USER'] = username
461 environ['REMOTE_USER'] = username
461 # mercurial specific, some extension api rely on this
462 # mercurial specific, some extension api rely on this
462 environ['HGUSER'] = username
463 environ['HGUSER'] = username
463
464
464 ip = dict_conf.get('ip')
465 ip = dict_conf.get('ip')
465 if ip:
466 if ip:
466 environ['REMOTE_HOST'] = ip
467 environ['REMOTE_HOST'] = ip
467
468
468 if _is_request_chunked(environ):
469 if _is_request_chunked(environ):
469 # set the compatibility flag for webob
470 # set the compatibility flag for webob
470 environ['wsgi.input_terminated'] = True
471 environ['wsgi.input_terminated'] = True
471
472
472 def hg_proxy(self):
473 def hg_proxy(self):
473 @wsgiapp
474 @wsgiapp
474 def _hg_proxy(environ, start_response):
475 def _hg_proxy(environ, start_response):
475 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
476 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
476 return app(environ, start_response)
477 return app(environ, start_response)
477 return _hg_proxy
478 return _hg_proxy
478
479
479 def git_proxy(self):
480 def git_proxy(self):
480 @wsgiapp
481 @wsgiapp
481 def _git_proxy(environ, start_response):
482 def _git_proxy(environ, start_response):
482 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
483 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
483 return app(environ, start_response)
484 return app(environ, start_response)
484 return _git_proxy
485 return _git_proxy
485
486
486 def hg_stream(self):
487 def hg_stream(self):
487 if self._use_echo_app:
488 if self._use_echo_app:
488 @wsgiapp
489 @wsgiapp
489 def _hg_stream(environ, start_response):
490 def _hg_stream(environ, start_response):
490 app = EchoApp('fake_path', 'fake_name', None)
491 app = EchoApp('fake_path', 'fake_name', None)
491 return app(environ, start_response)
492 return app(environ, start_response)
492 return _hg_stream
493 return _hg_stream
493 else:
494 else:
494 @wsgiapp
495 @wsgiapp
495 def _hg_stream(environ, start_response):
496 def _hg_stream(environ, start_response):
496 log.debug('http-app: handling hg stream')
497 log.debug('http-app: handling hg stream')
497 repo_path = environ['HTTP_X_RC_REPO_PATH']
498 repo_path = environ['HTTP_X_RC_REPO_PATH']
498 repo_name = environ['HTTP_X_RC_REPO_NAME']
499 repo_name = environ['HTTP_X_RC_REPO_NAME']
499 packed_config = base64.b64decode(
500 packed_config = base64.b64decode(
500 environ['HTTP_X_RC_REPO_CONFIG'])
501 environ['HTTP_X_RC_REPO_CONFIG'])
501 config = msgpack.unpackb(packed_config)
502 config = msgpack.unpackb(packed_config)
502 app = scm_app.create_hg_wsgi_app(
503 app = scm_app.create_hg_wsgi_app(
503 repo_path, repo_name, config)
504 repo_path, repo_name, config)
504
505
505 # Consistent path information for hgweb
506 # Consistent path information for hgweb
506 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
507 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
507 environ['REPO_NAME'] = repo_name
508 environ['REPO_NAME'] = repo_name
508 self.set_env_from_config(environ, config)
509 self.set_env_from_config(environ, config)
509
510
510 log.debug('http-app: starting app handler '
511 log.debug('http-app: starting app handler '
511 'with %s and process request', app)
512 'with %s and process request', app)
512 return app(environ, ResponseFilter(start_response))
513 return app(environ, ResponseFilter(start_response))
513 return _hg_stream
514 return _hg_stream
514
515
515 def git_stream(self):
516 def git_stream(self):
516 if self._use_echo_app:
517 if self._use_echo_app:
517 @wsgiapp
518 @wsgiapp
518 def _git_stream(environ, start_response):
519 def _git_stream(environ, start_response):
519 app = EchoApp('fake_path', 'fake_name', None)
520 app = EchoApp('fake_path', 'fake_name', None)
520 return app(environ, start_response)
521 return app(environ, start_response)
521 return _git_stream
522 return _git_stream
522 else:
523 else:
523 @wsgiapp
524 @wsgiapp
524 def _git_stream(environ, start_response):
525 def _git_stream(environ, start_response):
525 log.debug('http-app: handling git stream')
526 log.debug('http-app: handling git stream')
526 repo_path = environ['HTTP_X_RC_REPO_PATH']
527 repo_path = environ['HTTP_X_RC_REPO_PATH']
527 repo_name = environ['HTTP_X_RC_REPO_NAME']
528 repo_name = environ['HTTP_X_RC_REPO_NAME']
528 packed_config = base64.b64decode(
529 packed_config = base64.b64decode(
529 environ['HTTP_X_RC_REPO_CONFIG'])
530 environ['HTTP_X_RC_REPO_CONFIG'])
530 config = msgpack.unpackb(packed_config)
531 config = msgpack.unpackb(packed_config)
531
532
532 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
533 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
533 self.set_env_from_config(environ, config)
534 self.set_env_from_config(environ, config)
534
535
535 content_type = environ.get('CONTENT_TYPE', '')
536 content_type = environ.get('CONTENT_TYPE', '')
536
537
537 path = environ['PATH_INFO']
538 path = environ['PATH_INFO']
538 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
539 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
539 log.debug(
540 log.debug(
540 'LFS: Detecting if request `%s` is LFS server path based '
541 'LFS: Detecting if request `%s` is LFS server path based '
541 'on content type:`%s`, is_lfs:%s',
542 'on content type:`%s`, is_lfs:%s',
542 path, content_type, is_lfs_request)
543 path, content_type, is_lfs_request)
543
544
544 if not is_lfs_request:
545 if not is_lfs_request:
545 # fallback detection by path
546 # fallback detection by path
546 if GIT_LFS_PROTO_PAT.match(path):
547 if GIT_LFS_PROTO_PAT.match(path):
547 is_lfs_request = True
548 is_lfs_request = True
548 log.debug(
549 log.debug(
549 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
550 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
550 path, is_lfs_request)
551 path, is_lfs_request)
551
552
552 if is_lfs_request:
553 if is_lfs_request:
553 app = scm_app.create_git_lfs_wsgi_app(
554 app = scm_app.create_git_lfs_wsgi_app(
554 repo_path, repo_name, config)
555 repo_path, repo_name, config)
555 else:
556 else:
556 app = scm_app.create_git_wsgi_app(
557 app = scm_app.create_git_wsgi_app(
557 repo_path, repo_name, config)
558 repo_path, repo_name, config)
558
559
559 log.debug('http-app: starting app handler '
560 log.debug('http-app: starting app handler '
560 'with %s and process request', app)
561 'with %s and process request', app)
561
562
562 return app(environ, start_response)
563 return app(environ, start_response)
563
564
564 return _git_stream
565 return _git_stream
565
566
566 def handle_vcs_exception(self, exception, request):
567 def handle_vcs_exception(self, exception, request):
567 _vcs_kind = getattr(exception, '_vcs_kind', '')
568 _vcs_kind = getattr(exception, '_vcs_kind', '')
568 if _vcs_kind == 'repo_locked':
569 if _vcs_kind == 'repo_locked':
569 # Get custom repo-locked status code if present.
570 # Get custom repo-locked status code if present.
570 status_code = request.headers.get('X-RC-Locked-Status-Code')
571 status_code = request.headers.get('X-RC-Locked-Status-Code')
571 return HTTPRepoLocked(
572 return HTTPRepoLocked(
572 title=exception.message, status_code=status_code)
573 title=exception.message, status_code=status_code)
573
574
574 elif _vcs_kind == 'repo_branch_protected':
575 elif _vcs_kind == 'repo_branch_protected':
575 # Get custom repo-branch-protected status code if present.
576 # Get custom repo-branch-protected status code if present.
576 return HTTPRepoBranchProtected(title=exception.message)
577 return HTTPRepoBranchProtected(title=exception.message)
577
578
578 exc_info = request.exc_info
579 exc_info = request.exc_info
579 store_exception(id(exc_info), exc_info)
580 store_exception(id(exc_info), exc_info)
580
581
581 traceback_info = 'unavailable'
582 traceback_info = 'unavailable'
582 if request.exc_info:
583 if request.exc_info:
583 exc_type, exc_value, exc_tb = request.exc_info
584 exc_type, exc_value, exc_tb = request.exc_info
584 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
585 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
585
586
586 log.error(
587 log.error(
587 'error occurred handling this request for path: %s, \n tb: %s',
588 'error occurred handling this request for path: %s, \n tb: %s',
588 request.path, traceback_info)
589 request.path, traceback_info)
589 raise exception
590 raise exception
590
591
591
592
592 class ResponseFilter(object):
593 class ResponseFilter(object):
593
594
594 def __init__(self, start_response):
595 def __init__(self, start_response):
595 self._start_response = start_response
596 self._start_response = start_response
596
597
597 def __call__(self, status, response_headers, exc_info=None):
598 def __call__(self, status, response_headers, exc_info=None):
598 headers = tuple(
599 headers = tuple(
599 (h, v) for h, v in response_headers
600 (h, v) for h, v in response_headers
600 if not wsgiref.util.is_hop_by_hop(h))
601 if not wsgiref.util.is_hop_by_hop(h))
601 return self._start_response(status, headers, exc_info)
602 return self._start_response(status, headers, exc_info)
602
603
603
604
604 def main(global_config, **settings):
605 def main(global_config, **settings):
605 if MercurialFactory:
606 if MercurialFactory:
606 hgpatches.patch_largefiles_capabilities()
607 hgpatches.patch_largefiles_capabilities()
607 hgpatches.patch_subrepo_type_mapping()
608 hgpatches.patch_subrepo_type_mapping()
608
609
609 app = HTTPApplication(settings=settings, global_config=global_config)
610 app = HTTPApplication(settings=settings, global_config=global_config)
610 return app.wsgi_app()
611 return app.wsgi_app()
@@ -1,775 +1,772 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 # Set of svn compatible version flags.
43 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
44 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
45 svn_compatible_versions = {
46 'pre-1.4-compatible',
46 'pre-1.4-compatible',
47 'pre-1.5-compatible',
47 'pre-1.5-compatible',
48 'pre-1.6-compatible',
48 'pre-1.6-compatible',
49 'pre-1.8-compatible',
49 'pre-1.8-compatible',
50 'pre-1.9-compatible'
50 'pre-1.9-compatible'
51 }
51 }
52
52
53 svn_compatible_versions_map = {
53 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
54 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
58 'pre-1.9-compatible': '1.8',
59 }
59 }
60
60
61
61
62 def reraise_safe_exceptions(func):
62 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
63 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
65 try:
65 try:
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 except Exception as e:
67 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
68 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
69 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
70 raise_from_original(exceptions.UnhandledException(e))
71 raise
71 raise
72 return wrapper
72 return wrapper
73
73
74
74
75 class SubversionFactory(RepoFactory):
75 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
76 repo_type = 'svn'
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {'compatible-version': '1.9'}
81 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 fs_config['compatible-version'] = \
86 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
87 svn_compatible_versions_map[compatible_version]
88
88
89 log.debug('Create SVN repo with config "%s"', fs_config)
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
91 else:
92 repo = svn.repos.open(path)
92 repo = svn.repos.open(path)
93
93
94 log.debug('Got SVN object: %s', repo)
94 log.debug('Got SVN object: %s', repo)
95 return repo
95 return repo
96
96
97 def repo(self, wire, create=False, compatible_version=None):
97 def repo(self, wire, create=False, compatible_version=None):
98 """
98 """
99 Get a repository instance for the given path.
99 Get a repository instance for the given path.
100
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
103 """
100 """
104 region = self._cache_region
101 region = self._cache_region
105 context = wire.get('context', None)
102 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
103 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
104 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
105 cache = wire.get('cache', True)
109 cache_on = context and cache
106 cache_on = context and cache
110
107
111 @region.conditional_cache_on_arguments(condition=cache_on)
108 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
109 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
110 return self._create_repo(wire, create, compatible_version)
114
111
115 return create_new_repo(self.repo_type, repo_path, context_uid,
112 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
113 compatible_version)
117
114
118
115
119 NODE_TYPE_MAPPING = {
116 NODE_TYPE_MAPPING = {
120 svn.core.svn_node_file: 'file',
117 svn.core.svn_node_file: 'file',
121 svn.core.svn_node_dir: 'dir',
118 svn.core.svn_node_dir: 'dir',
122 }
119 }
123
120
124
121
125 class SvnRemote(object):
122 class SvnRemote(object):
126
123
127 def __init__(self, factory, hg_factory=None):
124 def __init__(self, factory, hg_factory=None):
128 self._factory = factory
125 self._factory = factory
129 # TODO: Remove once we do not use internal Mercurial objects anymore
126 # TODO: Remove once we do not use internal Mercurial objects anymore
130 # for subversion
127 # for subversion
131 self._hg_factory = hg_factory
128 self._hg_factory = hg_factory
132
129
133 @reraise_safe_exceptions
130 @reraise_safe_exceptions
134 def discover_svn_version(self):
131 def discover_svn_version(self):
135 try:
132 try:
136 import svn.core
133 import svn.core
137 svn_ver = svn.core.SVN_VERSION
134 svn_ver = svn.core.SVN_VERSION
138 except ImportError:
135 except ImportError:
139 svn_ver = None
136 svn_ver = None
140 return svn_ver
137 return svn_ver
141
138
142 @reraise_safe_exceptions
139 @reraise_safe_exceptions
143 def is_empty(self, wire):
140 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
141 repo = self._factory.repo(wire)
145
142
146 try:
143 try:
147 return self.lookup(wire, -1) == 0
144 return self.lookup(wire, -1) == 0
148 except Exception:
145 except Exception:
149 log.exception("failed to read object_store")
146 log.exception("failed to read object_store")
150 return False
147 return False
151
148
152 def check_url(self, url, config_items):
149 def check_url(self, url, config_items):
153 # this can throw exception if not installed, but we detect this
150 # this can throw exception if not installed, but we detect this
154 from hgsubversion import svnrepo
151 from hgsubversion import svnrepo
155
152
156 baseui = self._hg_factory._create_config(config_items)
153 baseui = self._hg_factory._create_config(config_items)
157 # uuid function get's only valid UUID from proper repo, else
154 # uuid function get's only valid UUID from proper repo, else
158 # throws exception
155 # throws exception
159 try:
156 try:
160 svnrepo.svnremoterepo(baseui, url).svn.uuid
157 svnrepo.svnremoterepo(baseui, url).svn.uuid
161 except Exception:
158 except Exception:
162 tb = traceback.format_exc()
159 tb = traceback.format_exc()
163 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
160 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
164 raise URLError(
161 raise URLError(
165 '"%s" is not a valid Subversion source url.' % (url, ))
162 '"%s" is not a valid Subversion source url.' % (url, ))
166 return True
163 return True
167
164
168 def is_path_valid_repository(self, wire, path):
165 def is_path_valid_repository(self, wire, path):
169
166
170 # NOTE(marcink): short circuit the check for SVN repo
167 # NOTE(marcink): short circuit the check for SVN repo
171 # the repos.open might be expensive to check, but we have one cheap
168 # the repos.open might be expensive to check, but we have one cheap
172 # pre condition that we can use, to check for 'format' file
169 # pre condition that we can use, to check for 'format' file
173
170
174 if not os.path.isfile(os.path.join(path, 'format')):
171 if not os.path.isfile(os.path.join(path, 'format')):
175 return False
172 return False
176
173
177 try:
174 try:
178 svn.repos.open(path)
175 svn.repos.open(path)
179 except svn.core.SubversionException:
176 except svn.core.SubversionException:
180 tb = traceback.format_exc()
177 tb = traceback.format_exc()
181 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
178 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
182 return False
179 return False
183 return True
180 return True
184
181
185 @reraise_safe_exceptions
182 @reraise_safe_exceptions
186 def verify(self, wire,):
183 def verify(self, wire,):
187 repo_path = wire['path']
184 repo_path = wire['path']
188 if not self.is_path_valid_repository(wire, repo_path):
185 if not self.is_path_valid_repository(wire, repo_path):
189 raise Exception(
186 raise Exception(
190 "Path %s is not a valid Subversion repository." % repo_path)
187 "Path %s is not a valid Subversion repository." % repo_path)
191
188
192 cmd = ['svnadmin', 'info', repo_path]
189 cmd = ['svnadmin', 'info', repo_path]
193 stdout, stderr = subprocessio.run_command(cmd)
190 stdout, stderr = subprocessio.run_command(cmd)
194 return stdout
191 return stdout
195
192
196 def lookup(self, wire, revision):
193 def lookup(self, wire, revision):
197 if revision not in [-1, None, 'HEAD']:
194 if revision not in [-1, None, 'HEAD']:
198 raise NotImplementedError
195 raise NotImplementedError
199 repo = self._factory.repo(wire)
196 repo = self._factory.repo(wire)
200 fs_ptr = svn.repos.fs(repo)
197 fs_ptr = svn.repos.fs(repo)
201 head = svn.fs.youngest_rev(fs_ptr)
198 head = svn.fs.youngest_rev(fs_ptr)
202 return head
199 return head
203
200
204 def lookup_interval(self, wire, start_ts, end_ts):
201 def lookup_interval(self, wire, start_ts, end_ts):
205 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
206 fsobj = svn.repos.fs(repo)
203 fsobj = svn.repos.fs(repo)
207 start_rev = None
204 start_rev = None
208 end_rev = None
205 end_rev = None
209 if start_ts:
206 if start_ts:
210 start_ts_svn = apr_time_t(start_ts)
207 start_ts_svn = apr_time_t(start_ts)
211 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
208 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
212 else:
209 else:
213 start_rev = 1
210 start_rev = 1
214 if end_ts:
211 if end_ts:
215 end_ts_svn = apr_time_t(end_ts)
212 end_ts_svn = apr_time_t(end_ts)
216 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
213 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
217 else:
214 else:
218 end_rev = svn.fs.youngest_rev(fsobj)
215 end_rev = svn.fs.youngest_rev(fsobj)
219 return start_rev, end_rev
216 return start_rev, end_rev
220
217
221 def revision_properties(self, wire, revision):
218 def revision_properties(self, wire, revision):
222 repo = self._factory.repo(wire)
219 repo = self._factory.repo(wire)
223 fs_ptr = svn.repos.fs(repo)
220 fs_ptr = svn.repos.fs(repo)
224 return svn.fs.revision_proplist(fs_ptr, revision)
221 return svn.fs.revision_proplist(fs_ptr, revision)
225
222
226 def revision_changes(self, wire, revision):
223 def revision_changes(self, wire, revision):
227
224
228 repo = self._factory.repo(wire)
225 repo = self._factory.repo(wire)
229 fsobj = svn.repos.fs(repo)
226 fsobj = svn.repos.fs(repo)
230 rev_root = svn.fs.revision_root(fsobj, revision)
227 rev_root = svn.fs.revision_root(fsobj, revision)
231
228
232 editor = svn.repos.ChangeCollector(fsobj, rev_root)
229 editor = svn.repos.ChangeCollector(fsobj, rev_root)
233 editor_ptr, editor_baton = svn.delta.make_editor(editor)
230 editor_ptr, editor_baton = svn.delta.make_editor(editor)
234 base_dir = ""
231 base_dir = ""
235 send_deltas = False
232 send_deltas = False
236 svn.repos.replay2(
233 svn.repos.replay2(
237 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
234 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
238 editor_ptr, editor_baton, None)
235 editor_ptr, editor_baton, None)
239
236
240 added = []
237 added = []
241 changed = []
238 changed = []
242 removed = []
239 removed = []
243
240
244 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
241 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
245 for path, change in editor.changes.iteritems():
242 for path, change in editor.changes.iteritems():
246 # TODO: Decide what to do with directory nodes. Subversion can add
243 # TODO: Decide what to do with directory nodes. Subversion can add
247 # empty directories.
244 # empty directories.
248
245
249 if change.item_kind == svn.core.svn_node_dir:
246 if change.item_kind == svn.core.svn_node_dir:
250 continue
247 continue
251 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
248 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
252 added.append(path)
249 added.append(path)
253 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
250 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
254 svn.repos.CHANGE_ACTION_REPLACE]:
251 svn.repos.CHANGE_ACTION_REPLACE]:
255 changed.append(path)
252 changed.append(path)
256 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
253 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
257 removed.append(path)
254 removed.append(path)
258 else:
255 else:
259 raise NotImplementedError(
256 raise NotImplementedError(
260 "Action %s not supported on path %s" % (
257 "Action %s not supported on path %s" % (
261 change.action, path))
258 change.action, path))
262
259
263 changes = {
260 changes = {
264 'added': added,
261 'added': added,
265 'changed': changed,
262 'changed': changed,
266 'removed': removed,
263 'removed': removed,
267 }
264 }
268 return changes
265 return changes
269
266
270 def node_history(self, wire, path, revision, limit):
267 def node_history(self, wire, path, revision, limit):
271 cross_copies = False
268 cross_copies = False
272 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
273 fsobj = svn.repos.fs(repo)
270 fsobj = svn.repos.fs(repo)
274 rev_root = svn.fs.revision_root(fsobj, revision)
271 rev_root = svn.fs.revision_root(fsobj, revision)
275
272
276 history_revisions = []
273 history_revisions = []
277 history = svn.fs.node_history(rev_root, path)
274 history = svn.fs.node_history(rev_root, path)
278 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
279 while history:
276 while history:
280 __, node_revision = svn.fs.history_location(history)
277 __, node_revision = svn.fs.history_location(history)
281 history_revisions.append(node_revision)
278 history_revisions.append(node_revision)
282 if limit and len(history_revisions) >= limit:
279 if limit and len(history_revisions) >= limit:
283 break
280 break
284 history = svn.fs.history_prev(history, cross_copies)
281 history = svn.fs.history_prev(history, cross_copies)
285 return history_revisions
282 return history_revisions
286
283
287 def node_properties(self, wire, path, revision):
284 def node_properties(self, wire, path, revision):
288 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
289 fsobj = svn.repos.fs(repo)
286 fsobj = svn.repos.fs(repo)
290 rev_root = svn.fs.revision_root(fsobj, revision)
287 rev_root = svn.fs.revision_root(fsobj, revision)
291 return svn.fs.node_proplist(rev_root, path)
288 return svn.fs.node_proplist(rev_root, path)
292
289
293 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
294 abs_path = 'file://' + urllib.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
295 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
297
294
298 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
299 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
300 end_rev = peg_rev
297 end_rev = peg_rev
301
298
302 annotations = []
299 annotations = []
303
300
304 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
305 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
306
303
307 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
308 try:
305 try:
309 svn.client.blame2(
306 svn.client.blame2(
310 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
311 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
312 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
313 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
314 raise Exception(
311 raise Exception(
315 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
316 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
317
314
318 return annotations
315 return annotations
319
316
320 def get_node_type(self, wire, path, rev=None):
317 def get_node_type(self, wire, path, rev=None):
321 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
322 fs_ptr = svn.repos.fs(repo)
319 fs_ptr = svn.repos.fs(repo)
323 if rev is None:
320 if rev is None:
324 rev = svn.fs.youngest_rev(fs_ptr)
321 rev = svn.fs.youngest_rev(fs_ptr)
325 root = svn.fs.revision_root(fs_ptr, rev)
322 root = svn.fs.revision_root(fs_ptr, rev)
326 node = svn.fs.check_path(root, path)
323 node = svn.fs.check_path(root, path)
327 return NODE_TYPE_MAPPING.get(node, None)
324 return NODE_TYPE_MAPPING.get(node, None)
328
325
329 def get_nodes(self, wire, path, revision=None):
326 def get_nodes(self, wire, path, revision=None):
330 repo = self._factory.repo(wire)
327 repo = self._factory.repo(wire)
331 fsobj = svn.repos.fs(repo)
328 fsobj = svn.repos.fs(repo)
332 if revision is None:
329 if revision is None:
333 revision = svn.fs.youngest_rev(fsobj)
330 revision = svn.fs.youngest_rev(fsobj)
334 root = svn.fs.revision_root(fsobj, revision)
331 root = svn.fs.revision_root(fsobj, revision)
335 entries = svn.fs.dir_entries(root, path)
332 entries = svn.fs.dir_entries(root, path)
336 result = []
333 result = []
337 for entry_path, entry_info in entries.iteritems():
334 for entry_path, entry_info in entries.iteritems():
338 result.append(
335 result.append(
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
336 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 return result
337 return result
341
338
342 def get_file_content(self, wire, path, rev=None):
339 def get_file_content(self, wire, path, rev=None):
343 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
344 fsobj = svn.repos.fs(repo)
341 fsobj = svn.repos.fs(repo)
345 if rev is None:
342 if rev is None:
346 rev = svn.fs.youngest_revision(fsobj)
343 rev = svn.fs.youngest_revision(fsobj)
347 root = svn.fs.revision_root(fsobj, rev)
344 root = svn.fs.revision_root(fsobj, rev)
348 content = svn.core.Stream(svn.fs.file_contents(root, path))
345 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 return content.read()
346 return content.read()
350
347
351 def get_file_size(self, wire, path, revision=None):
348 def get_file_size(self, wire, path, revision=None):
352 repo = self._factory.repo(wire)
349 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
350 fsobj = svn.repos.fs(repo)
354 if revision is None:
351 if revision is None:
355 revision = svn.fs.youngest_revision(fsobj)
352 revision = svn.fs.youngest_revision(fsobj)
356 root = svn.fs.revision_root(fsobj, revision)
353 root = svn.fs.revision_root(fsobj, revision)
357 size = svn.fs.file_length(root, path)
354 size = svn.fs.file_length(root, path)
358 return size
355 return size
359
356
360 def create_repository(self, wire, compatible_version=None):
357 def create_repository(self, wire, compatible_version=None):
361 log.info('Creating Subversion repository in path "%s"', wire['path'])
358 log.info('Creating Subversion repository in path "%s"', wire['path'])
362 self._factory.repo(wire, create=True,
359 self._factory.repo(wire, create=True,
363 compatible_version=compatible_version)
360 compatible_version=compatible_version)
364
361
365 def get_url_and_credentials(self, src_url):
362 def get_url_and_credentials(self, src_url):
366 obj = urlparse.urlparse(src_url)
363 obj = urlparse.urlparse(src_url)
367 username = obj.username or None
364 username = obj.username or None
368 password = obj.password or None
365 password = obj.password or None
369 return username, password, src_url
366 return username, password, src_url
370
367
371 def import_remote_repository(self, wire, src_url):
368 def import_remote_repository(self, wire, src_url):
372 repo_path = wire['path']
369 repo_path = wire['path']
373 if not self.is_path_valid_repository(wire, repo_path):
370 if not self.is_path_valid_repository(wire, repo_path):
374 raise Exception(
371 raise Exception(
375 "Path %s is not a valid Subversion repository." % repo_path)
372 "Path %s is not a valid Subversion repository." % repo_path)
376
373
377 username, password, src_url = self.get_url_and_credentials(src_url)
374 username, password, src_url = self.get_url_and_credentials(src_url)
378 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
375 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
379 '--trust-server-cert-failures=unknown-ca']
376 '--trust-server-cert-failures=unknown-ca']
380 if username and password:
377 if username and password:
381 rdump_cmd += ['--username', username, '--password', password]
378 rdump_cmd += ['--username', username, '--password', password]
382 rdump_cmd += [src_url]
379 rdump_cmd += [src_url]
383
380
384 rdump = subprocess.Popen(
381 rdump = subprocess.Popen(
385 rdump_cmd,
382 rdump_cmd,
386 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
383 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
387 load = subprocess.Popen(
384 load = subprocess.Popen(
388 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
385 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
389
386
390 # TODO: johbo: This can be a very long operation, might be better
387 # TODO: johbo: This can be a very long operation, might be better
391 # to track some kind of status and provide an api to check if the
388 # to track some kind of status and provide an api to check if the
392 # import is done.
389 # import is done.
393 rdump.wait()
390 rdump.wait()
394 load.wait()
391 load.wait()
395
392
396 log.debug('Return process ended with code: %s', rdump.returncode)
393 log.debug('Return process ended with code: %s', rdump.returncode)
397 if rdump.returncode != 0:
394 if rdump.returncode != 0:
398 errors = rdump.stderr.read()
395 errors = rdump.stderr.read()
399 log.error('svnrdump dump failed: statuscode %s: message: %s',
396 log.error('svnrdump dump failed: statuscode %s: message: %s',
400 rdump.returncode, errors)
397 rdump.returncode, errors)
401 reason = 'UNKNOWN'
398 reason = 'UNKNOWN'
402 if 'svnrdump: E230001:' in errors:
399 if 'svnrdump: E230001:' in errors:
403 reason = 'INVALID_CERTIFICATE'
400 reason = 'INVALID_CERTIFICATE'
404
401
405 if reason == 'UNKNOWN':
402 if reason == 'UNKNOWN':
406 reason = 'UNKNOWN:{}'.format(errors)
403 reason = 'UNKNOWN:{}'.format(errors)
407 raise Exception(
404 raise Exception(
408 'Failed to dump the remote repository from %s. Reason:%s' % (
405 'Failed to dump the remote repository from %s. Reason:%s' % (
409 src_url, reason))
406 src_url, reason))
410 if load.returncode != 0:
407 if load.returncode != 0:
411 raise Exception(
408 raise Exception(
412 'Failed to load the dump of remote repository from %s.' %
409 'Failed to load the dump of remote repository from %s.' %
413 (src_url, ))
410 (src_url, ))
414
411
415 def commit(self, wire, message, author, timestamp, updated, removed):
412 def commit(self, wire, message, author, timestamp, updated, removed):
416 assert isinstance(message, str)
413 assert isinstance(message, str)
417 assert isinstance(author, str)
414 assert isinstance(author, str)
418
415
419 repo = self._factory.repo(wire)
416 repo = self._factory.repo(wire)
420 fsobj = svn.repos.fs(repo)
417 fsobj = svn.repos.fs(repo)
421
418
422 rev = svn.fs.youngest_rev(fsobj)
419 rev = svn.fs.youngest_rev(fsobj)
423 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
420 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
424 txn_root = svn.fs.txn_root(txn)
421 txn_root = svn.fs.txn_root(txn)
425
422
426 for node in updated:
423 for node in updated:
427 TxnNodeProcessor(node, txn_root).update()
424 TxnNodeProcessor(node, txn_root).update()
428 for node in removed:
425 for node in removed:
429 TxnNodeProcessor(node, txn_root).remove()
426 TxnNodeProcessor(node, txn_root).remove()
430
427
431 commit_id = svn.repos.fs_commit_txn(repo, txn)
428 commit_id = svn.repos.fs_commit_txn(repo, txn)
432
429
433 if timestamp:
430 if timestamp:
434 apr_time = apr_time_t(timestamp)
431 apr_time = apr_time_t(timestamp)
435 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
432 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
436 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
433 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
437
434
438 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
435 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
439 return commit_id
436 return commit_id
440
437
441 def diff(self, wire, rev1, rev2, path1=None, path2=None,
438 def diff(self, wire, rev1, rev2, path1=None, path2=None,
442 ignore_whitespace=False, context=3):
439 ignore_whitespace=False, context=3):
443
440
444 wire.update(cache=False)
441 wire.update(cache=False)
445 repo = self._factory.repo(wire)
442 repo = self._factory.repo(wire)
446 diff_creator = SvnDiffer(
443 diff_creator = SvnDiffer(
447 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
444 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
448 try:
445 try:
449 return diff_creator.generate_diff()
446 return diff_creator.generate_diff()
450 except svn.core.SubversionException as e:
447 except svn.core.SubversionException as e:
451 log.exception(
448 log.exception(
452 "Error during diff operation operation. "
449 "Error during diff operation operation. "
453 "Path might not exist %s, %s" % (path1, path2))
450 "Path might not exist %s, %s" % (path1, path2))
454 return ""
451 return ""
455
452
456 @reraise_safe_exceptions
453 @reraise_safe_exceptions
457 def is_large_file(self, wire, path):
454 def is_large_file(self, wire, path):
458 return False
455 return False
459
456
460 @reraise_safe_exceptions
457 @reraise_safe_exceptions
461 def run_svn_command(self, wire, cmd, **opts):
458 def run_svn_command(self, wire, cmd, **opts):
462 path = wire.get('path', None)
459 path = wire.get('path', None)
463
460
464 if path and os.path.isdir(path):
461 if path and os.path.isdir(path):
465 opts['cwd'] = path
462 opts['cwd'] = path
466
463
467 safe_call = False
464 safe_call = False
468 if '_safe' in opts:
465 if '_safe' in opts:
469 safe_call = True
466 safe_call = True
470
467
471 svnenv = os.environ.copy()
468 svnenv = os.environ.copy()
472 svnenv.update(opts.pop('extra_env', {}))
469 svnenv.update(opts.pop('extra_env', {}))
473
470
474 _opts = {'env': svnenv, 'shell': False}
471 _opts = {'env': svnenv, 'shell': False}
475
472
476 try:
473 try:
477 _opts.update(opts)
474 _opts.update(opts)
478 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
475 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479
476
480 return ''.join(p), ''.join(p.error)
477 return ''.join(p), ''.join(p.error)
481 except (EnvironmentError, OSError) as err:
478 except (EnvironmentError, OSError) as err:
482 cmd = ' '.join(cmd) # human friendly CMD
479 cmd = ' '.join(cmd) # human friendly CMD
483 tb_err = ("Couldn't run svn command (%s).\n"
480 tb_err = ("Couldn't run svn command (%s).\n"
484 "Original error was:%s\n"
481 "Original error was:%s\n"
485 "Call options:%s\n"
482 "Call options:%s\n"
486 % (cmd, err, _opts))
483 % (cmd, err, _opts))
487 log.exception(tb_err)
484 log.exception(tb_err)
488 if safe_call:
485 if safe_call:
489 return '', err
486 return '', err
490 else:
487 else:
491 raise exceptions.VcsException()(tb_err)
488 raise exceptions.VcsException()(tb_err)
492
489
493 @reraise_safe_exceptions
490 @reraise_safe_exceptions
494 def install_hooks(self, wire, force=False):
491 def install_hooks(self, wire, force=False):
495 from vcsserver.hook_utils import install_svn_hooks
492 from vcsserver.hook_utils import install_svn_hooks
496 repo_path = wire['path']
493 repo_path = wire['path']
497 binary_dir = settings.BINARY_DIR
494 binary_dir = settings.BINARY_DIR
498 executable = None
495 executable = None
499 if binary_dir:
496 if binary_dir:
500 executable = os.path.join(binary_dir, 'python')
497 executable = os.path.join(binary_dir, 'python')
501 return install_svn_hooks(
498 return install_svn_hooks(
502 repo_path, executable=executable, force_create=force)
499 repo_path, executable=executable, force_create=force)
503
500
504 @reraise_safe_exceptions
501 @reraise_safe_exceptions
505 def get_hooks_info(self, wire):
502 def get_hooks_info(self, wire):
506 from vcsserver.hook_utils import (
503 from vcsserver.hook_utils import (
507 get_svn_pre_hook_version, get_svn_post_hook_version)
504 get_svn_pre_hook_version, get_svn_post_hook_version)
508 repo_path = wire['path']
505 repo_path = wire['path']
509 return {
506 return {
510 'pre_version': get_svn_pre_hook_version(repo_path),
507 'pre_version': get_svn_pre_hook_version(repo_path),
511 'post_version': get_svn_post_hook_version(repo_path),
508 'post_version': get_svn_post_hook_version(repo_path),
512 }
509 }
513
510
514
511
515 class SvnDiffer(object):
512 class SvnDiffer(object):
516 """
513 """
517 Utility to create diffs based on difflib and the Subversion api
514 Utility to create diffs based on difflib and the Subversion api
518 """
515 """
519
516
520 binary_content = False
517 binary_content = False
521
518
522 def __init__(
519 def __init__(
523 self, repo, src_rev, src_path, tgt_rev, tgt_path,
520 self, repo, src_rev, src_path, tgt_rev, tgt_path,
524 ignore_whitespace, context):
521 ignore_whitespace, context):
525 self.repo = repo
522 self.repo = repo
526 self.ignore_whitespace = ignore_whitespace
523 self.ignore_whitespace = ignore_whitespace
527 self.context = context
524 self.context = context
528
525
529 fsobj = svn.repos.fs(repo)
526 fsobj = svn.repos.fs(repo)
530
527
531 self.tgt_rev = tgt_rev
528 self.tgt_rev = tgt_rev
532 self.tgt_path = tgt_path or ''
529 self.tgt_path = tgt_path or ''
533 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
530 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
534 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
531 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
535
532
536 self.src_rev = src_rev
533 self.src_rev = src_rev
537 self.src_path = src_path or self.tgt_path
534 self.src_path = src_path or self.tgt_path
538 self.src_root = svn.fs.revision_root(fsobj, src_rev)
535 self.src_root = svn.fs.revision_root(fsobj, src_rev)
539 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
536 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
540
537
541 self._validate()
538 self._validate()
542
539
543 def _validate(self):
540 def _validate(self):
544 if (self.tgt_kind != svn.core.svn_node_none and
541 if (self.tgt_kind != svn.core.svn_node_none and
545 self.src_kind != svn.core.svn_node_none and
542 self.src_kind != svn.core.svn_node_none and
546 self.src_kind != self.tgt_kind):
543 self.src_kind != self.tgt_kind):
547 # TODO: johbo: proper error handling
544 # TODO: johbo: proper error handling
548 raise Exception(
545 raise Exception(
549 "Source and target are not compatible for diff generation. "
546 "Source and target are not compatible for diff generation. "
550 "Source type: %s, target type: %s" %
547 "Source type: %s, target type: %s" %
551 (self.src_kind, self.tgt_kind))
548 (self.src_kind, self.tgt_kind))
552
549
553 def generate_diff(self):
550 def generate_diff(self):
554 buf = StringIO.StringIO()
551 buf = StringIO.StringIO()
555 if self.tgt_kind == svn.core.svn_node_dir:
552 if self.tgt_kind == svn.core.svn_node_dir:
556 self._generate_dir_diff(buf)
553 self._generate_dir_diff(buf)
557 else:
554 else:
558 self._generate_file_diff(buf)
555 self._generate_file_diff(buf)
559 return buf.getvalue()
556 return buf.getvalue()
560
557
561 def _generate_dir_diff(self, buf):
558 def _generate_dir_diff(self, buf):
562 editor = DiffChangeEditor()
559 editor = DiffChangeEditor()
563 editor_ptr, editor_baton = svn.delta.make_editor(editor)
560 editor_ptr, editor_baton = svn.delta.make_editor(editor)
564 svn.repos.dir_delta2(
561 svn.repos.dir_delta2(
565 self.src_root,
562 self.src_root,
566 self.src_path,
563 self.src_path,
567 '', # src_entry
564 '', # src_entry
568 self.tgt_root,
565 self.tgt_root,
569 self.tgt_path,
566 self.tgt_path,
570 editor_ptr, editor_baton,
567 editor_ptr, editor_baton,
571 authorization_callback_allow_all,
568 authorization_callback_allow_all,
572 False, # text_deltas
569 False, # text_deltas
573 svn.core.svn_depth_infinity, # depth
570 svn.core.svn_depth_infinity, # depth
574 False, # entry_props
571 False, # entry_props
575 False, # ignore_ancestry
572 False, # ignore_ancestry
576 )
573 )
577
574
578 for path, __, change in sorted(editor.changes):
575 for path, __, change in sorted(editor.changes):
579 self._generate_node_diff(
576 self._generate_node_diff(
580 buf, change, path, self.tgt_path, path, self.src_path)
577 buf, change, path, self.tgt_path, path, self.src_path)
581
578
582 def _generate_file_diff(self, buf):
579 def _generate_file_diff(self, buf):
583 change = None
580 change = None
584 if self.src_kind == svn.core.svn_node_none:
581 if self.src_kind == svn.core.svn_node_none:
585 change = "add"
582 change = "add"
586 elif self.tgt_kind == svn.core.svn_node_none:
583 elif self.tgt_kind == svn.core.svn_node_none:
587 change = "delete"
584 change = "delete"
588 tgt_base, tgt_path = vcspath.split(self.tgt_path)
585 tgt_base, tgt_path = vcspath.split(self.tgt_path)
589 src_base, src_path = vcspath.split(self.src_path)
586 src_base, src_path = vcspath.split(self.src_path)
590 self._generate_node_diff(
587 self._generate_node_diff(
591 buf, change, tgt_path, tgt_base, src_path, src_base)
588 buf, change, tgt_path, tgt_base, src_path, src_base)
592
589
593 def _generate_node_diff(
590 def _generate_node_diff(
594 self, buf, change, tgt_path, tgt_base, src_path, src_base):
591 self, buf, change, tgt_path, tgt_base, src_path, src_base):
595
592
596 if self.src_rev == self.tgt_rev and tgt_base == src_base:
593 if self.src_rev == self.tgt_rev and tgt_base == src_base:
597 # makes consistent behaviour with git/hg to return empty diff if
594 # makes consistent behaviour with git/hg to return empty diff if
598 # we compare same revisions
595 # we compare same revisions
599 return
596 return
600
597
601 tgt_full_path = vcspath.join(tgt_base, tgt_path)
598 tgt_full_path = vcspath.join(tgt_base, tgt_path)
602 src_full_path = vcspath.join(src_base, src_path)
599 src_full_path = vcspath.join(src_base, src_path)
603
600
604 self.binary_content = False
601 self.binary_content = False
605 mime_type = self._get_mime_type(tgt_full_path)
602 mime_type = self._get_mime_type(tgt_full_path)
606
603
607 if mime_type and not mime_type.startswith('text'):
604 if mime_type and not mime_type.startswith('text'):
608 self.binary_content = True
605 self.binary_content = True
609 buf.write("=" * 67 + '\n')
606 buf.write("=" * 67 + '\n')
610 buf.write("Cannot display: file marked as a binary type.\n")
607 buf.write("Cannot display: file marked as a binary type.\n")
611 buf.write("svn:mime-type = %s\n" % mime_type)
608 buf.write("svn:mime-type = %s\n" % mime_type)
612 buf.write("Index: %s\n" % (tgt_path, ))
609 buf.write("Index: %s\n" % (tgt_path, ))
613 buf.write("=" * 67 + '\n')
610 buf.write("=" * 67 + '\n')
614 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
611 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
615 'tgt_path': tgt_path})
612 'tgt_path': tgt_path})
616
613
617 if change == 'add':
614 if change == 'add':
618 # TODO: johbo: SVN is missing a zero here compared to git
615 # TODO: johbo: SVN is missing a zero here compared to git
619 buf.write("new file mode 10644\n")
616 buf.write("new file mode 10644\n")
620
617
621 #TODO(marcink): intro to binary detection of svn patches
618 #TODO(marcink): intro to binary detection of svn patches
622 # if self.binary_content:
619 # if self.binary_content:
623 # buf.write('GIT binary patch\n')
620 # buf.write('GIT binary patch\n')
624
621
625 buf.write("--- /dev/null\t(revision 0)\n")
622 buf.write("--- /dev/null\t(revision 0)\n")
626 src_lines = []
623 src_lines = []
627 else:
624 else:
628 if change == 'delete':
625 if change == 'delete':
629 buf.write("deleted file mode 10644\n")
626 buf.write("deleted file mode 10644\n")
630
627
631 #TODO(marcink): intro to binary detection of svn patches
628 #TODO(marcink): intro to binary detection of svn patches
632 # if self.binary_content:
629 # if self.binary_content:
633 # buf.write('GIT binary patch\n')
630 # buf.write('GIT binary patch\n')
634
631
635 buf.write("--- a/%s\t(revision %s)\n" % (
632 buf.write("--- a/%s\t(revision %s)\n" % (
636 src_path, self.src_rev))
633 src_path, self.src_rev))
637 src_lines = self._svn_readlines(self.src_root, src_full_path)
634 src_lines = self._svn_readlines(self.src_root, src_full_path)
638
635
639 if change == 'delete':
636 if change == 'delete':
640 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
637 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
641 tgt_lines = []
638 tgt_lines = []
642 else:
639 else:
643 buf.write("+++ b/%s\t(revision %s)\n" % (
640 buf.write("+++ b/%s\t(revision %s)\n" % (
644 tgt_path, self.tgt_rev))
641 tgt_path, self.tgt_rev))
645 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
642 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
646
643
647 if not self.binary_content:
644 if not self.binary_content:
648 udiff = svn_diff.unified_diff(
645 udiff = svn_diff.unified_diff(
649 src_lines, tgt_lines, context=self.context,
646 src_lines, tgt_lines, context=self.context,
650 ignore_blank_lines=self.ignore_whitespace,
647 ignore_blank_lines=self.ignore_whitespace,
651 ignore_case=False,
648 ignore_case=False,
652 ignore_space_changes=self.ignore_whitespace)
649 ignore_space_changes=self.ignore_whitespace)
653 buf.writelines(udiff)
650 buf.writelines(udiff)
654
651
655 def _get_mime_type(self, path):
652 def _get_mime_type(self, path):
656 try:
653 try:
657 mime_type = svn.fs.node_prop(
654 mime_type = svn.fs.node_prop(
658 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
655 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
659 except svn.core.SubversionException:
656 except svn.core.SubversionException:
660 mime_type = svn.fs.node_prop(
657 mime_type = svn.fs.node_prop(
661 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
658 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
662 return mime_type
659 return mime_type
663
660
664 def _svn_readlines(self, fs_root, node_path):
661 def _svn_readlines(self, fs_root, node_path):
665 if self.binary_content:
662 if self.binary_content:
666 return []
663 return []
667 node_kind = svn.fs.check_path(fs_root, node_path)
664 node_kind = svn.fs.check_path(fs_root, node_path)
668 if node_kind not in (
665 if node_kind not in (
669 svn.core.svn_node_file, svn.core.svn_node_symlink):
666 svn.core.svn_node_file, svn.core.svn_node_symlink):
670 return []
667 return []
671 content = svn.core.Stream(
668 content = svn.core.Stream(
672 svn.fs.file_contents(fs_root, node_path)).read()
669 svn.fs.file_contents(fs_root, node_path)).read()
673 return content.splitlines(True)
670 return content.splitlines(True)
674
671
675
672
676
673
677 class DiffChangeEditor(svn.delta.Editor):
674 class DiffChangeEditor(svn.delta.Editor):
678 """
675 """
679 Records changes between two given revisions
676 Records changes between two given revisions
680 """
677 """
681
678
682 def __init__(self):
679 def __init__(self):
683 self.changes = []
680 self.changes = []
684
681
685 def delete_entry(self, path, revision, parent_baton, pool=None):
682 def delete_entry(self, path, revision, parent_baton, pool=None):
686 self.changes.append((path, None, 'delete'))
683 self.changes.append((path, None, 'delete'))
687
684
688 def add_file(
685 def add_file(
689 self, path, parent_baton, copyfrom_path, copyfrom_revision,
686 self, path, parent_baton, copyfrom_path, copyfrom_revision,
690 file_pool=None):
687 file_pool=None):
691 self.changes.append((path, 'file', 'add'))
688 self.changes.append((path, 'file', 'add'))
692
689
693 def open_file(self, path, parent_baton, base_revision, file_pool=None):
690 def open_file(self, path, parent_baton, base_revision, file_pool=None):
694 self.changes.append((path, 'file', 'change'))
691 self.changes.append((path, 'file', 'change'))
695
692
696
693
697 def authorization_callback_allow_all(root, path, pool):
694 def authorization_callback_allow_all(root, path, pool):
698 return True
695 return True
699
696
700
697
701 class TxnNodeProcessor(object):
698 class TxnNodeProcessor(object):
702 """
699 """
703 Utility to process the change of one node within a transaction root.
700 Utility to process the change of one node within a transaction root.
704
701
705 It encapsulates the knowledge of how to add, update or remove
702 It encapsulates the knowledge of how to add, update or remove
706 a node for a given transaction root. The purpose is to support the method
703 a node for a given transaction root. The purpose is to support the method
707 `SvnRemote.commit`.
704 `SvnRemote.commit`.
708 """
705 """
709
706
710 def __init__(self, node, txn_root):
707 def __init__(self, node, txn_root):
711 assert isinstance(node['path'], str)
708 assert isinstance(node['path'], str)
712
709
713 self.node = node
710 self.node = node
714 self.txn_root = txn_root
711 self.txn_root = txn_root
715
712
716 def update(self):
713 def update(self):
717 self._ensure_parent_dirs()
714 self._ensure_parent_dirs()
718 self._add_file_if_node_does_not_exist()
715 self._add_file_if_node_does_not_exist()
719 self._update_file_content()
716 self._update_file_content()
720 self._update_file_properties()
717 self._update_file_properties()
721
718
722 def remove(self):
719 def remove(self):
723 svn.fs.delete(self.txn_root, self.node['path'])
720 svn.fs.delete(self.txn_root, self.node['path'])
724 # TODO: Clean up directory if empty
721 # TODO: Clean up directory if empty
725
722
726 def _ensure_parent_dirs(self):
723 def _ensure_parent_dirs(self):
727 curdir = vcspath.dirname(self.node['path'])
724 curdir = vcspath.dirname(self.node['path'])
728 dirs_to_create = []
725 dirs_to_create = []
729 while not self._svn_path_exists(curdir):
726 while not self._svn_path_exists(curdir):
730 dirs_to_create.append(curdir)
727 dirs_to_create.append(curdir)
731 curdir = vcspath.dirname(curdir)
728 curdir = vcspath.dirname(curdir)
732
729
733 for curdir in reversed(dirs_to_create):
730 for curdir in reversed(dirs_to_create):
734 log.debug('Creating missing directory "%s"', curdir)
731 log.debug('Creating missing directory "%s"', curdir)
735 svn.fs.make_dir(self.txn_root, curdir)
732 svn.fs.make_dir(self.txn_root, curdir)
736
733
737 def _svn_path_exists(self, path):
734 def _svn_path_exists(self, path):
738 path_status = svn.fs.check_path(self.txn_root, path)
735 path_status = svn.fs.check_path(self.txn_root, path)
739 return path_status != svn.core.svn_node_none
736 return path_status != svn.core.svn_node_none
740
737
741 def _add_file_if_node_does_not_exist(self):
738 def _add_file_if_node_does_not_exist(self):
742 kind = svn.fs.check_path(self.txn_root, self.node['path'])
739 kind = svn.fs.check_path(self.txn_root, self.node['path'])
743 if kind == svn.core.svn_node_none:
740 if kind == svn.core.svn_node_none:
744 svn.fs.make_file(self.txn_root, self.node['path'])
741 svn.fs.make_file(self.txn_root, self.node['path'])
745
742
746 def _update_file_content(self):
743 def _update_file_content(self):
747 assert isinstance(self.node['content'], str)
744 assert isinstance(self.node['content'], str)
748 handler, baton = svn.fs.apply_textdelta(
745 handler, baton = svn.fs.apply_textdelta(
749 self.txn_root, self.node['path'], None, None)
746 self.txn_root, self.node['path'], None, None)
750 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
747 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
751
748
752 def _update_file_properties(self):
749 def _update_file_properties(self):
753 properties = self.node.get('properties', {})
750 properties = self.node.get('properties', {})
754 for key, value in properties.iteritems():
751 for key, value in properties.iteritems():
755 svn.fs.change_node_prop(
752 svn.fs.change_node_prop(
756 self.txn_root, self.node['path'], key, value)
753 self.txn_root, self.node['path'], key, value)
757
754
758
755
759 def apr_time_t(timestamp):
756 def apr_time_t(timestamp):
760 """
757 """
761 Convert a Python timestamp into APR timestamp type apr_time_t
758 Convert a Python timestamp into APR timestamp type apr_time_t
762 """
759 """
763 return timestamp * 1E6
760 return timestamp * 1E6
764
761
765
762
766 def svn_opt_revision_value_t(num):
763 def svn_opt_revision_value_t(num):
767 """
764 """
768 Put `num` into a `svn_opt_revision_value_t` structure.
765 Put `num` into a `svn_opt_revision_value_t` structure.
769 """
766 """
770 value = svn.core.svn_opt_revision_value_t()
767 value = svn.core.svn_opt_revision_value_t()
771 value.number = num
768 value.number = num
772 revision = svn.core.svn_opt_revision_t()
769 revision = svn.core.svn_opt_revision_t()
773 revision.kind = svn.core.svn_opt_revision_number
770 revision.kind = svn.core.svn_opt_revision_number
774 revision.value = value
771 revision.value = value
775 return revision
772 return revision
@@ -1,165 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
107
108
102
109 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
110 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
106 factory = Mock()
112 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
113
108
114 def fake_function():
109 def fake_function():
115 return None
110 return None
116
111
117 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
118
113
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
115 for method_name, method in methods:
121 if not method_name.startswith('_'):
116 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
123
118
124 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
126 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
134 def fake_method():
129 def fake_method():
135 raise side_effect
130 raise side_effect
136
131
137 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
138 fake_method()
133 fake_method()
139 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
141
136
142
137
143 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
141 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
144 del repo
150 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
151
146
152
147
153 class TestGitFactory(object):
148 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
155
150
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
153 factory = git.GitFactory()
159 wire = {
154 wire = {
160 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
161 }
156 }
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
158 with isdir_patcher:
164 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
General Comments 0
You need to be logged in to leave comments. Login now