##// END OF EJS Templates
exception-handling: better handling of remote exception and logging....
marcink -
r171:c608ea73 default
parent child Browse files
Show More
@@ -1,82 +1,98 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
19 import traceback
18 import logging
20 import logging
19 import urlparse
21 import urlparse
20
22
21 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
22
24
23
25
24 class RepoFactory(object):
26 class RepoFactory(object):
25 """
27 """
26 Utility to create instances of repository
28 Utility to create instances of repository
27
29
28 It provides internal caching of the `repo` object based on
30 It provides internal caching of the `repo` object based on
29 the :term:`call context`.
31 the :term:`call context`.
30 """
32 """
31
33
32 def __init__(self, repo_cache):
34 def __init__(self, repo_cache):
33 self._cache = repo_cache
35 self._cache = repo_cache
34
36
35 def _create_config(self, path, config):
37 def _create_config(self, path, config):
36 config = {}
38 config = {}
37 return config
39 return config
38
40
39 def _create_repo(self, wire, create):
41 def _create_repo(self, wire, create):
40 raise NotImplementedError()
42 raise NotImplementedError()
41
43
42 def repo(self, wire, create=False):
44 def repo(self, wire, create=False):
43 """
45 """
44 Get a repository instance for the given path.
46 Get a repository instance for the given path.
45
47
46 Uses internally the low level beaker API since the decorators introduce
48 Uses internally the low level beaker API since the decorators introduce
47 significant overhead.
49 significant overhead.
48 """
50 """
49 def create_new_repo():
51 def create_new_repo():
50 return self._create_repo(wire, create)
52 return self._create_repo(wire, create)
51
53
52 return self._repo(wire, create_new_repo)
54 return self._repo(wire, create_new_repo)
53
55
54 def _repo(self, wire, createfunc):
56 def _repo(self, wire, createfunc):
55 context = wire.get('context', None)
57 context = wire.get('context', None)
56 cache = wire.get('cache', True)
58 cache = wire.get('cache', True)
57
59
58 if context and cache:
60 if context and cache:
59 cache_key = (context, wire['path'])
61 cache_key = (context, wire['path'])
60 log.debug(
62 log.debug(
61 'FETCH %s@%s repo object from cache. Context: %s',
63 'FETCH %s@%s repo object from cache. Context: %s',
62 self.__class__.__name__, wire['path'], context)
64 self.__class__.__name__, wire['path'], context)
63 return self._cache.get(key=cache_key, createfunc=createfunc)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
64 else:
66 else:
65 log.debug(
67 log.debug(
66 'INIT %s@%s repo object based on wire %s. Context: %s',
68 'INIT %s@%s repo object based on wire %s. Context: %s',
67 self.__class__.__name__, wire['path'], wire, context)
69 self.__class__.__name__, wire['path'], wire, context)
68 return createfunc()
70 return createfunc()
69
71
70
72
71 def obfuscate_qs(query_string):
73 def obfuscate_qs(query_string):
72 if query_string is None:
74 if query_string is None:
73 return None
75 return None
74
76
75 parsed = []
77 parsed = []
76 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
77 if k in ['auth_token', 'api_key']:
79 if k in ['auth_token', 'api_key']:
78 v = "*****"
80 v = "*****"
79 parsed.append((k, v))
81 parsed.append((k, v))
80
82
81 return '&'.join('{}{}'.format(
83 return '&'.join('{}{}'.format(
82 k, '={}'.format(v) if v else '') for k, v in parsed)
84 k, '={}'.format(v) if v else '') for k, v in parsed)
85
86
87 def raise_from_original(new_type):
88 """
89 Raise a new exception type with original args and traceback.
90 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
92
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 try:
96 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
98 del exc_traceback
@@ -1,581 +1,586 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 except Exception as e:
62 if not hasattr(e, '_vcs_kind'):
63 log.exception("Unhandled exception in git remote call")
64 raise_from_original(exceptions.UnhandledException)
65 raise
61 return wrapper
66 return wrapper
62
67
63
68
64 class Repo(DulwichRepo):
69 class Repo(DulwichRepo):
65 """
70 """
66 A wrapper for dulwich Repo class.
71 A wrapper for dulwich Repo class.
67
72
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
73 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 "Too many open files" error. We need to close all opened file descriptors
74 "Too many open files" error. We need to close all opened file descriptors
70 once the repo object is destroyed.
75 once the repo object is destroyed.
71
76
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
77 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 to 0.12.0 +
78 to 0.12.0 +
74 """
79 """
75 def __del__(self):
80 def __del__(self):
76 if hasattr(self, 'object_store'):
81 if hasattr(self, 'object_store'):
77 self.close()
82 self.close()
78
83
79
84
80 class GitFactory(RepoFactory):
85 class GitFactory(RepoFactory):
81
86
82 def _create_repo(self, wire, create):
87 def _create_repo(self, wire, create):
83 repo_path = str_to_dulwich(wire['path'])
88 repo_path = str_to_dulwich(wire['path'])
84 return Repo(repo_path)
89 return Repo(repo_path)
85
90
86
91
87 class GitRemote(object):
92 class GitRemote(object):
88
93
89 def __init__(self, factory):
94 def __init__(self, factory):
90 self._factory = factory
95 self._factory = factory
91
96
92 self._bulk_methods = {
97 self._bulk_methods = {
93 "author": self.commit_attribute,
98 "author": self.commit_attribute,
94 "date": self.get_object_attrs,
99 "date": self.get_object_attrs,
95 "message": self.commit_attribute,
100 "message": self.commit_attribute,
96 "parents": self.commit_attribute,
101 "parents": self.commit_attribute,
97 "_commit": self.revision,
102 "_commit": self.revision,
98 }
103 }
99
104
100 def _assign_ref(self, wire, ref, commit_id):
105 def _assign_ref(self, wire, ref, commit_id):
101 repo = self._factory.repo(wire)
106 repo = self._factory.repo(wire)
102 repo[ref] = commit_id
107 repo[ref] = commit_id
103
108
104 @reraise_safe_exceptions
109 @reraise_safe_exceptions
105 def add_object(self, wire, content):
110 def add_object(self, wire, content):
106 repo = self._factory.repo(wire)
111 repo = self._factory.repo(wire)
107 blob = objects.Blob()
112 blob = objects.Blob()
108 blob.set_raw_string(content)
113 blob.set_raw_string(content)
109 repo.object_store.add_object(blob)
114 repo.object_store.add_object(blob)
110 return blob.id
115 return blob.id
111
116
112 @reraise_safe_exceptions
117 @reraise_safe_exceptions
113 def assert_correct_path(self, wire):
118 def assert_correct_path(self, wire):
114 try:
119 try:
115 self._factory.repo(wire)
120 self._factory.repo(wire)
116 except NotGitRepository as e:
121 except NotGitRepository as e:
117 # Exception can contain unicode which we convert
122 # Exception can contain unicode which we convert
118 raise exceptions.AbortException(repr(e))
123 raise exceptions.AbortException(repr(e))
119
124
120 @reraise_safe_exceptions
125 @reraise_safe_exceptions
121 def bare(self, wire):
126 def bare(self, wire):
122 repo = self._factory.repo(wire)
127 repo = self._factory.repo(wire)
123 return repo.bare
128 return repo.bare
124
129
125 @reraise_safe_exceptions
130 @reraise_safe_exceptions
126 def blob_as_pretty_string(self, wire, sha):
131 def blob_as_pretty_string(self, wire, sha):
127 repo = self._factory.repo(wire)
132 repo = self._factory.repo(wire)
128 return repo[sha].as_pretty_string()
133 return repo[sha].as_pretty_string()
129
134
130 @reraise_safe_exceptions
135 @reraise_safe_exceptions
131 def blob_raw_length(self, wire, sha):
136 def blob_raw_length(self, wire, sha):
132 repo = self._factory.repo(wire)
137 repo = self._factory.repo(wire)
133 blob = repo[sha]
138 blob = repo[sha]
134 return blob.raw_length()
139 return blob.raw_length()
135
140
136 @reraise_safe_exceptions
141 @reraise_safe_exceptions
137 def bulk_request(self, wire, rev, pre_load):
142 def bulk_request(self, wire, rev, pre_load):
138 result = {}
143 result = {}
139 for attr in pre_load:
144 for attr in pre_load:
140 try:
145 try:
141 method = self._bulk_methods[attr]
146 method = self._bulk_methods[attr]
142 args = [wire, rev]
147 args = [wire, rev]
143 if attr == "date":
148 if attr == "date":
144 args.extend(["commit_time", "commit_timezone"])
149 args.extend(["commit_time", "commit_timezone"])
145 elif attr in ["author", "message", "parents"]:
150 elif attr in ["author", "message", "parents"]:
146 args.append(attr)
151 args.append(attr)
147 result[attr] = method(*args)
152 result[attr] = method(*args)
148 except KeyError:
153 except KeyError:
149 raise exceptions.VcsException(
154 raise exceptions.VcsException(
150 "Unknown bulk attribute: %s" % attr)
155 "Unknown bulk attribute: %s" % attr)
151 return result
156 return result
152
157
153 def _build_opener(self, url):
158 def _build_opener(self, url):
154 handlers = []
159 handlers = []
155 url_obj = url_parser(url)
160 url_obj = url_parser(url)
156 _, authinfo = url_obj.authinfo()
161 _, authinfo = url_obj.authinfo()
157
162
158 if authinfo:
163 if authinfo:
159 # create a password manager
164 # create a password manager
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
165 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 passmgr.add_password(*authinfo)
166 passmgr.add_password(*authinfo)
162
167
163 handlers.extend((httpbasicauthhandler(passmgr),
168 handlers.extend((httpbasicauthhandler(passmgr),
164 httpdigestauthhandler(passmgr)))
169 httpdigestauthhandler(passmgr)))
165
170
166 return urllib2.build_opener(*handlers)
171 return urllib2.build_opener(*handlers)
167
172
168 @reraise_safe_exceptions
173 @reraise_safe_exceptions
169 def check_url(self, url, config):
174 def check_url(self, url, config):
170 url_obj = url_parser(url)
175 url_obj = url_parser(url)
171 test_uri, _ = url_obj.authinfo()
176 test_uri, _ = url_obj.authinfo()
172 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
177 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
173 url_obj.query = obfuscate_qs(url_obj.query)
178 url_obj.query = obfuscate_qs(url_obj.query)
174 cleaned_uri = str(url_obj)
179 cleaned_uri = str(url_obj)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
180 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
176
181
177 if not test_uri.endswith('info/refs'):
182 if not test_uri.endswith('info/refs'):
178 test_uri = test_uri.rstrip('/') + '/info/refs'
183 test_uri = test_uri.rstrip('/') + '/info/refs'
179
184
180 o = self._build_opener(url)
185 o = self._build_opener(url)
181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
186 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
182
187
183 q = {"service": 'git-upload-pack'}
188 q = {"service": 'git-upload-pack'}
184 qs = '?%s' % urllib.urlencode(q)
189 qs = '?%s' % urllib.urlencode(q)
185 cu = "%s%s" % (test_uri, qs)
190 cu = "%s%s" % (test_uri, qs)
186 req = urllib2.Request(cu, None, {})
191 req = urllib2.Request(cu, None, {})
187
192
188 try:
193 try:
189 log.debug("Trying to open URL %s", cleaned_uri)
194 log.debug("Trying to open URL %s", cleaned_uri)
190 resp = o.open(req)
195 resp = o.open(req)
191 if resp.code != 200:
196 if resp.code != 200:
192 raise exceptions.URLError('Return Code is not 200')
197 raise exceptions.URLError('Return Code is not 200')
193 except Exception as e:
198 except Exception as e:
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
199 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
195 # means it cannot be cloned
200 # means it cannot be cloned
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
201 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
197
202
198 # now detect if it's proper git repo
203 # now detect if it's proper git repo
199 gitdata = resp.read()
204 gitdata = resp.read()
200 if 'service=git-upload-pack' in gitdata:
205 if 'service=git-upload-pack' in gitdata:
201 pass
206 pass
202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
207 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
203 # old style git can return some other format !
208 # old style git can return some other format !
204 pass
209 pass
205 else:
210 else:
206 raise exceptions.URLError(
211 raise exceptions.URLError(
207 "url [%s] does not look like an git" % (cleaned_uri,))
212 "url [%s] does not look like an git" % (cleaned_uri,))
208
213
209 return True
214 return True
210
215
211 @reraise_safe_exceptions
216 @reraise_safe_exceptions
212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
217 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
213 remote_refs = self.fetch(wire, url, apply_refs=False)
218 remote_refs = self.fetch(wire, url, apply_refs=False)
214 repo = self._factory.repo(wire)
219 repo = self._factory.repo(wire)
215 if isinstance(valid_refs, list):
220 if isinstance(valid_refs, list):
216 valid_refs = tuple(valid_refs)
221 valid_refs = tuple(valid_refs)
217
222
218 for k in remote_refs:
223 for k in remote_refs:
219 # only parse heads/tags and skip so called deferred tags
224 # only parse heads/tags and skip so called deferred tags
220 if k.startswith(valid_refs) and not k.endswith(deferred):
225 if k.startswith(valid_refs) and not k.endswith(deferred):
221 repo[k] = remote_refs[k]
226 repo[k] = remote_refs[k]
222
227
223 if update_after_clone:
228 if update_after_clone:
224 # we want to checkout HEAD
229 # we want to checkout HEAD
225 repo["HEAD"] = remote_refs["HEAD"]
230 repo["HEAD"] = remote_refs["HEAD"]
226 index.build_index_from_tree(repo.path, repo.index_path(),
231 index.build_index_from_tree(repo.path, repo.index_path(),
227 repo.object_store, repo["HEAD"].tree)
232 repo.object_store, repo["HEAD"].tree)
228
233
229 # TODO: this is quite complex, check if that can be simplified
234 # TODO: this is quite complex, check if that can be simplified
230 @reraise_safe_exceptions
235 @reraise_safe_exceptions
231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
236 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
232 repo = self._factory.repo(wire)
237 repo = self._factory.repo(wire)
233 object_store = repo.object_store
238 object_store = repo.object_store
234
239
235 # Create tree and populates it with blobs
240 # Create tree and populates it with blobs
236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
241 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
237
242
238 for node in updated:
243 for node in updated:
239 # Compute subdirs if needed
244 # Compute subdirs if needed
240 dirpath, nodename = vcspath.split(node['path'])
245 dirpath, nodename = vcspath.split(node['path'])
241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
246 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
242 parent = commit_tree
247 parent = commit_tree
243 ancestors = [('', parent)]
248 ancestors = [('', parent)]
244
249
245 # Tries to dig for the deepest existing tree
250 # Tries to dig for the deepest existing tree
246 while dirnames:
251 while dirnames:
247 curdir = dirnames.pop(0)
252 curdir = dirnames.pop(0)
248 try:
253 try:
249 dir_id = parent[curdir][1]
254 dir_id = parent[curdir][1]
250 except KeyError:
255 except KeyError:
251 # put curdir back into dirnames and stops
256 # put curdir back into dirnames and stops
252 dirnames.insert(0, curdir)
257 dirnames.insert(0, curdir)
253 break
258 break
254 else:
259 else:
255 # If found, updates parent
260 # If found, updates parent
256 parent = repo[dir_id]
261 parent = repo[dir_id]
257 ancestors.append((curdir, parent))
262 ancestors.append((curdir, parent))
258 # Now parent is deepest existing tree and we need to create
263 # Now parent is deepest existing tree and we need to create
259 # subtrees for dirnames (in reverse order)
264 # subtrees for dirnames (in reverse order)
260 # [this only applies for nodes from added]
265 # [this only applies for nodes from added]
261 new_trees = []
266 new_trees = []
262
267
263 blob = objects.Blob.from_string(node['content'])
268 blob = objects.Blob.from_string(node['content'])
264
269
265 if dirnames:
270 if dirnames:
266 # If there are trees which should be created we need to build
271 # If there are trees which should be created we need to build
267 # them now (in reverse order)
272 # them now (in reverse order)
268 reversed_dirnames = list(reversed(dirnames))
273 reversed_dirnames = list(reversed(dirnames))
269 curtree = objects.Tree()
274 curtree = objects.Tree()
270 curtree[node['node_path']] = node['mode'], blob.id
275 curtree[node['node_path']] = node['mode'], blob.id
271 new_trees.append(curtree)
276 new_trees.append(curtree)
272 for dirname in reversed_dirnames[:-1]:
277 for dirname in reversed_dirnames[:-1]:
273 newtree = objects.Tree()
278 newtree = objects.Tree()
274 newtree[dirname] = (DIR_STAT, curtree.id)
279 newtree[dirname] = (DIR_STAT, curtree.id)
275 new_trees.append(newtree)
280 new_trees.append(newtree)
276 curtree = newtree
281 curtree = newtree
277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
282 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
278 else:
283 else:
279 parent.add(
284 parent.add(
280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
285 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
281
286
282 new_trees.append(parent)
287 new_trees.append(parent)
283 # Update ancestors
288 # Update ancestors
284 reversed_ancestors = reversed(
289 reversed_ancestors = reversed(
285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
290 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
286 for parent, tree, path in reversed_ancestors:
291 for parent, tree, path in reversed_ancestors:
287 parent[path] = (DIR_STAT, tree.id)
292 parent[path] = (DIR_STAT, tree.id)
288 object_store.add_object(tree)
293 object_store.add_object(tree)
289
294
290 object_store.add_object(blob)
295 object_store.add_object(blob)
291 for tree in new_trees:
296 for tree in new_trees:
292 object_store.add_object(tree)
297 object_store.add_object(tree)
293
298
294 for node_path in removed:
299 for node_path in removed:
295 paths = node_path.split('/')
300 paths = node_path.split('/')
296 tree = commit_tree
301 tree = commit_tree
297 trees = [tree]
302 trees = [tree]
298 # Traverse deep into the forest...
303 # Traverse deep into the forest...
299 for path in paths:
304 for path in paths:
300 try:
305 try:
301 obj = repo[tree[path][1]]
306 obj = repo[tree[path][1]]
302 if isinstance(obj, objects.Tree):
307 if isinstance(obj, objects.Tree):
303 trees.append(obj)
308 trees.append(obj)
304 tree = obj
309 tree = obj
305 except KeyError:
310 except KeyError:
306 break
311 break
307 # Cut down the blob and all rotten trees on the way back...
312 # Cut down the blob and all rotten trees on the way back...
308 for path, tree in reversed(zip(paths, trees)):
313 for path, tree in reversed(zip(paths, trees)):
309 del tree[path]
314 del tree[path]
310 if tree:
315 if tree:
311 # This tree still has elements - don't remove it or any
316 # This tree still has elements - don't remove it or any
312 # of it's parents
317 # of it's parents
313 break
318 break
314
319
315 object_store.add_object(commit_tree)
320 object_store.add_object(commit_tree)
316
321
317 # Create commit
322 # Create commit
318 commit = objects.Commit()
323 commit = objects.Commit()
319 commit.tree = commit_tree.id
324 commit.tree = commit_tree.id
320 for k, v in commit_data.iteritems():
325 for k, v in commit_data.iteritems():
321 setattr(commit, k, v)
326 setattr(commit, k, v)
322 object_store.add_object(commit)
327 object_store.add_object(commit)
323
328
324 ref = 'refs/heads/%s' % branch
329 ref = 'refs/heads/%s' % branch
325 repo.refs[ref] = commit.id
330 repo.refs[ref] = commit.id
326
331
327 return commit.id
332 return commit.id
328
333
329 @reraise_safe_exceptions
334 @reraise_safe_exceptions
330 def fetch(self, wire, url, apply_refs=True, refs=None):
335 def fetch(self, wire, url, apply_refs=True, refs=None):
331 if url != 'default' and '://' not in url:
336 if url != 'default' and '://' not in url:
332 client = LocalGitClient(url)
337 client = LocalGitClient(url)
333 else:
338 else:
334 url_obj = url_parser(url)
339 url_obj = url_parser(url)
335 o = self._build_opener(url)
340 o = self._build_opener(url)
336 url, _ = url_obj.authinfo()
341 url, _ = url_obj.authinfo()
337 client = HttpGitClient(base_url=url, opener=o)
342 client = HttpGitClient(base_url=url, opener=o)
338 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
339
344
340 determine_wants = repo.object_store.determine_wants_all
345 determine_wants = repo.object_store.determine_wants_all
341 if refs:
346 if refs:
342 def determine_wants_requested(references):
347 def determine_wants_requested(references):
343 return [references[r] for r in references if r in refs]
348 return [references[r] for r in references if r in refs]
344 determine_wants = determine_wants_requested
349 determine_wants = determine_wants_requested
345
350
346 try:
351 try:
347 remote_refs = client.fetch(
352 remote_refs = client.fetch(
348 path=url, target=repo, determine_wants=determine_wants)
353 path=url, target=repo, determine_wants=determine_wants)
349 except NotGitRepository as e:
354 except NotGitRepository as e:
350 log.warning(
355 log.warning(
351 'Trying to fetch from "%s" failed, not a Git repository.', url)
356 'Trying to fetch from "%s" failed, not a Git repository.', url)
352 # Exception can contain unicode which we convert
357 # Exception can contain unicode which we convert
353 raise exceptions.AbortException(repr(e))
358 raise exceptions.AbortException(repr(e))
354
359
355 # mikhail: client.fetch() returns all the remote refs, but fetches only
360 # mikhail: client.fetch() returns all the remote refs, but fetches only
356 # refs filtered by `determine_wants` function. We need to filter result
361 # refs filtered by `determine_wants` function. We need to filter result
357 # as well
362 # as well
358 if refs:
363 if refs:
359 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
364 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
360
365
361 if apply_refs:
366 if apply_refs:
362 # TODO: johbo: Needs proper test coverage with a git repository
367 # TODO: johbo: Needs proper test coverage with a git repository
363 # that contains a tag object, so that we would end up with
368 # that contains a tag object, so that we would end up with
364 # a peeled ref at this point.
369 # a peeled ref at this point.
365 PEELED_REF_MARKER = '^{}'
370 PEELED_REF_MARKER = '^{}'
366 for k in remote_refs:
371 for k in remote_refs:
367 if k.endswith(PEELED_REF_MARKER):
372 if k.endswith(PEELED_REF_MARKER):
368 log.info("Skipping peeled reference %s", k)
373 log.info("Skipping peeled reference %s", k)
369 continue
374 continue
370 repo[k] = remote_refs[k]
375 repo[k] = remote_refs[k]
371
376
372 if refs:
377 if refs:
373 # mikhail: explicitly set the head to the last ref.
378 # mikhail: explicitly set the head to the last ref.
374 repo['HEAD'] = remote_refs[refs[-1]]
379 repo['HEAD'] = remote_refs[refs[-1]]
375
380
376 # TODO: mikhail: should we return remote_refs here to be
381 # TODO: mikhail: should we return remote_refs here to be
377 # consistent?
382 # consistent?
378 else:
383 else:
379 return remote_refs
384 return remote_refs
380
385
381 @reraise_safe_exceptions
386 @reraise_safe_exceptions
382 def get_remote_refs(self, wire, url):
387 def get_remote_refs(self, wire, url):
383 repo = Repo(url)
388 repo = Repo(url)
384 return repo.get_refs()
389 return repo.get_refs()
385
390
386 @reraise_safe_exceptions
391 @reraise_safe_exceptions
387 def get_description(self, wire):
392 def get_description(self, wire):
388 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
389 return repo.get_description()
394 return repo.get_description()
390
395
391 @reraise_safe_exceptions
396 @reraise_safe_exceptions
392 def get_file_history(self, wire, file_path, commit_id, limit):
397 def get_file_history(self, wire, file_path, commit_id, limit):
393 repo = self._factory.repo(wire)
398 repo = self._factory.repo(wire)
394 include = [commit_id]
399 include = [commit_id]
395 paths = [file_path]
400 paths = [file_path]
396
401
397 walker = repo.get_walker(include, paths=paths, max_entries=limit)
402 walker = repo.get_walker(include, paths=paths, max_entries=limit)
398 return [x.commit.id for x in walker]
403 return [x.commit.id for x in walker]
399
404
400 @reraise_safe_exceptions
405 @reraise_safe_exceptions
401 def get_missing_revs(self, wire, rev1, rev2, path2):
406 def get_missing_revs(self, wire, rev1, rev2, path2):
402 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
403 LocalGitClient(thin_packs=False).fetch(path2, repo)
408 LocalGitClient(thin_packs=False).fetch(path2, repo)
404
409
405 wire_remote = wire.copy()
410 wire_remote = wire.copy()
406 wire_remote['path'] = path2
411 wire_remote['path'] = path2
407 repo_remote = self._factory.repo(wire_remote)
412 repo_remote = self._factory.repo(wire_remote)
408 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
413 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
409
414
410 revs = [
415 revs = [
411 x.commit.id
416 x.commit.id
412 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
417 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
413 return revs
418 return revs
414
419
415 @reraise_safe_exceptions
420 @reraise_safe_exceptions
416 def get_object(self, wire, sha):
421 def get_object(self, wire, sha):
417 repo = self._factory.repo(wire)
422 repo = self._factory.repo(wire)
418 obj = repo.get_object(sha)
423 obj = repo.get_object(sha)
419 commit_id = obj.id
424 commit_id = obj.id
420
425
421 if isinstance(obj, Tag):
426 if isinstance(obj, Tag):
422 commit_id = obj.object[1]
427 commit_id = obj.object[1]
423
428
424 return {
429 return {
425 'id': obj.id,
430 'id': obj.id,
426 'type': obj.type_name,
431 'type': obj.type_name,
427 'commit_id': commit_id
432 'commit_id': commit_id
428 }
433 }
429
434
430 @reraise_safe_exceptions
435 @reraise_safe_exceptions
431 def get_object_attrs(self, wire, sha, *attrs):
436 def get_object_attrs(self, wire, sha, *attrs):
432 repo = self._factory.repo(wire)
437 repo = self._factory.repo(wire)
433 obj = repo.get_object(sha)
438 obj = repo.get_object(sha)
434 return list(getattr(obj, a) for a in attrs)
439 return list(getattr(obj, a) for a in attrs)
435
440
436 @reraise_safe_exceptions
441 @reraise_safe_exceptions
437 def get_refs(self, wire):
442 def get_refs(self, wire):
438 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
439 result = {}
444 result = {}
440 for ref, sha in repo.refs.as_dict().items():
445 for ref, sha in repo.refs.as_dict().items():
441 peeled_sha = repo.get_peeled(ref)
446 peeled_sha = repo.get_peeled(ref)
442 result[ref] = peeled_sha
447 result[ref] = peeled_sha
443 return result
448 return result
444
449
445 @reraise_safe_exceptions
450 @reraise_safe_exceptions
446 def get_refs_path(self, wire):
451 def get_refs_path(self, wire):
447 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
448 return repo.refs.path
453 return repo.refs.path
449
454
450 @reraise_safe_exceptions
455 @reraise_safe_exceptions
451 def head(self, wire):
456 def head(self, wire):
452 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
453 return repo.head()
458 return repo.head()
454
459
455 @reraise_safe_exceptions
460 @reraise_safe_exceptions
456 def init(self, wire):
461 def init(self, wire):
457 repo_path = str_to_dulwich(wire['path'])
462 repo_path = str_to_dulwich(wire['path'])
458 self.repo = Repo.init(repo_path)
463 self.repo = Repo.init(repo_path)
459
464
460 @reraise_safe_exceptions
465 @reraise_safe_exceptions
461 def init_bare(self, wire):
466 def init_bare(self, wire):
462 repo_path = str_to_dulwich(wire['path'])
467 repo_path = str_to_dulwich(wire['path'])
463 self.repo = Repo.init_bare(repo_path)
468 self.repo = Repo.init_bare(repo_path)
464
469
465 @reraise_safe_exceptions
470 @reraise_safe_exceptions
466 def revision(self, wire, rev):
471 def revision(self, wire, rev):
467 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
468 obj = repo[rev]
473 obj = repo[rev]
469 obj_data = {
474 obj_data = {
470 'id': obj.id,
475 'id': obj.id,
471 }
476 }
472 try:
477 try:
473 obj_data['tree'] = obj.tree
478 obj_data['tree'] = obj.tree
474 except AttributeError:
479 except AttributeError:
475 pass
480 pass
476 return obj_data
481 return obj_data
477
482
478 @reraise_safe_exceptions
483 @reraise_safe_exceptions
479 def commit_attribute(self, wire, rev, attr):
484 def commit_attribute(self, wire, rev, attr):
480 repo = self._factory.repo(wire)
485 repo = self._factory.repo(wire)
481 obj = repo[rev]
486 obj = repo[rev]
482 return getattr(obj, attr)
487 return getattr(obj, attr)
483
488
484 @reraise_safe_exceptions
489 @reraise_safe_exceptions
485 def set_refs(self, wire, key, value):
490 def set_refs(self, wire, key, value):
486 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
487 repo.refs[key] = value
492 repo.refs[key] = value
488
493
489 @reraise_safe_exceptions
494 @reraise_safe_exceptions
490 def remove_ref(self, wire, key):
495 def remove_ref(self, wire, key):
491 repo = self._factory.repo(wire)
496 repo = self._factory.repo(wire)
492 del repo.refs[key]
497 del repo.refs[key]
493
498
494 @reraise_safe_exceptions
499 @reraise_safe_exceptions
495 def tree_changes(self, wire, source_id, target_id):
500 def tree_changes(self, wire, source_id, target_id):
496 repo = self._factory.repo(wire)
501 repo = self._factory.repo(wire)
497 source = repo[source_id].tree if source_id else None
502 source = repo[source_id].tree if source_id else None
498 target = repo[target_id].tree
503 target = repo[target_id].tree
499 result = repo.object_store.tree_changes(source, target)
504 result = repo.object_store.tree_changes(source, target)
500 return list(result)
505 return list(result)
501
506
502 @reraise_safe_exceptions
507 @reraise_safe_exceptions
503 def tree_items(self, wire, tree_id):
508 def tree_items(self, wire, tree_id):
504 repo = self._factory.repo(wire)
509 repo = self._factory.repo(wire)
505 tree = repo[tree_id]
510 tree = repo[tree_id]
506
511
507 result = []
512 result = []
508 for item in tree.iteritems():
513 for item in tree.iteritems():
509 item_sha = item.sha
514 item_sha = item.sha
510 item_mode = item.mode
515 item_mode = item.mode
511
516
512 if FILE_MODE(item_mode) == GIT_LINK:
517 if FILE_MODE(item_mode) == GIT_LINK:
513 item_type = "link"
518 item_type = "link"
514 else:
519 else:
515 item_type = repo[item_sha].type_name
520 item_type = repo[item_sha].type_name
516
521
517 result.append((item.path, item_mode, item_sha, item_type))
522 result.append((item.path, item_mode, item_sha, item_type))
518 return result
523 return result
519
524
520 @reraise_safe_exceptions
525 @reraise_safe_exceptions
521 def update_server_info(self, wire):
526 def update_server_info(self, wire):
522 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
523 update_server_info(repo)
528 update_server_info(repo)
524
529
525 @reraise_safe_exceptions
530 @reraise_safe_exceptions
526 def discover_git_version(self):
531 def discover_git_version(self):
527 stdout, _ = self.run_git_command(
532 stdout, _ = self.run_git_command(
528 {}, ['--version'], _bare=True, _safe=True)
533 {}, ['--version'], _bare=True, _safe=True)
529 prefix = 'git version'
534 prefix = 'git version'
530 if stdout.startswith(prefix):
535 if stdout.startswith(prefix):
531 stdout = stdout[len(prefix):]
536 stdout = stdout[len(prefix):]
532 return stdout.strip()
537 return stdout.strip()
533
538
534 @reraise_safe_exceptions
539 @reraise_safe_exceptions
535 def run_git_command(self, wire, cmd, **opts):
540 def run_git_command(self, wire, cmd, **opts):
536 path = wire.get('path', None)
541 path = wire.get('path', None)
537
542
538 if path and os.path.isdir(path):
543 if path and os.path.isdir(path):
539 opts['cwd'] = path
544 opts['cwd'] = path
540
545
541 if '_bare' in opts:
546 if '_bare' in opts:
542 _copts = []
547 _copts = []
543 del opts['_bare']
548 del opts['_bare']
544 else:
549 else:
545 _copts = ['-c', 'core.quotepath=false', ]
550 _copts = ['-c', 'core.quotepath=false', ]
546 safe_call = False
551 safe_call = False
547 if '_safe' in opts:
552 if '_safe' in opts:
548 # no exc on failure
553 # no exc on failure
549 del opts['_safe']
554 del opts['_safe']
550 safe_call = True
555 safe_call = True
551
556
552 gitenv = os.environ.copy()
557 gitenv = os.environ.copy()
553 gitenv.update(opts.pop('extra_env', {}))
558 gitenv.update(opts.pop('extra_env', {}))
554 # need to clean fix GIT_DIR !
559 # need to clean fix GIT_DIR !
555 if 'GIT_DIR' in gitenv:
560 if 'GIT_DIR' in gitenv:
556 del gitenv['GIT_DIR']
561 del gitenv['GIT_DIR']
557 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
562 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
558
563
559 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
564 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
560
565
561 try:
566 try:
562 _opts = {'env': gitenv, 'shell': False}
567 _opts = {'env': gitenv, 'shell': False}
563 _opts.update(opts)
568 _opts.update(opts)
564 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
569 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
565
570
566 return ''.join(p), ''.join(p.error)
571 return ''.join(p), ''.join(p.error)
567 except (EnvironmentError, OSError) as err:
572 except (EnvironmentError, OSError) as err:
568 tb_err = ("Couldn't run git command (%s).\n"
573 tb_err = ("Couldn't run git command (%s).\n"
569 "Original error was:%s\n" % (cmd, err))
574 "Original error was:%s\n" % (cmd, err))
570 log.exception(tb_err)
575 log.exception(tb_err)
571 if safe_call:
576 if safe_call:
572 return '', err
577 return '', err
573 else:
578 else:
574 raise exceptions.VcsException(tb_err)
579 raise exceptions.VcsException(tb_err)
575
580
576
581
577 def str_to_dulwich(value):
582 def str_to_dulwich(value):
578 """
583 """
579 Dulwich 0.10.1a requires `unicode` objects to be passed in.
584 Dulwich 0.10.1a requires `unicode` objects to be passed in.
580 """
585 """
581 return value.decode(settings.WIRE_ENCODING)
586 return value.decode(settings.WIRE_ENCODING)
@@ -1,723 +1,711 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
22 import urllib
21 import urllib
23 import urllib2
22 import urllib2
24
23
25 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
26 from mercurial import commands
28 from mercurial import unionrepo
27 from mercurial import unionrepo
29
28
30 from vcsserver import exceptions
29 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs
30 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
31 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
32 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
33 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
34 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
35 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
36 InterventionRequired, RequirementError)
38
37
39 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
40
39
41
40
42 def make_ui_from_config(repo_config):
41 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
42 baseui = ui.ui()
44
43
45 # clean the baseui object
44 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
45 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
46 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
47 baseui._tcfg = hgconfig.config()
49
48
50 for section, option, value in repo_config:
49 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
50 baseui.setconfig(section, option, value)
52
51
53 # make our hgweb quiet so it doesn't print output
52 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
53 baseui.setconfig('ui', 'quiet', 'true')
55
54
56 # force mercurial to only use 1 thread, otherwise it may try to set a
55 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
56 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
57 baseui.setconfig('worker', 'numcpus', 1)
59
58
60 # If there is no config for the largefiles extension, we explicitly disable
59 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
60 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
61 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
62 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
63 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
64 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
65 baseui.setconfig('extensions', 'largefiles', '!')
67
66
68 return baseui
67 return baseui
69
68
70
69
71 def reraise_safe_exceptions(func):
70 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
71 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
72 def wrapper(*args, **kwargs):
74 try:
73 try:
75 return func(*args, **kwargs)
74 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
75 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
76 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
77 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
78 raise_from_original(exceptions.LookupException)
80 except RequirementError:
79 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
80 raise_from_original(exceptions.RequirementException)
82 except RepoError:
81 except RepoError:
83 raise_from_original(exceptions.VcsException)
82 raise_from_original(exceptions.VcsException)
84 except LookupError:
83 except LookupError:
85 raise_from_original(exceptions.LookupException)
84 raise_from_original(exceptions.LookupException)
86 except Exception as e:
85 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
86 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
87 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
88 raise_from_original(exceptions.UnhandledException)
90 raise
89 raise
91 return wrapper
90 return wrapper
92
91
93
92
94 def raise_from_original(new_type):
95 """
96 Raise a new exception type with original args and traceback.
97 """
98 _, original, traceback = sys.exc_info()
99 try:
100 raise new_type(*original.args), None, traceback
101 finally:
102 del traceback
103
104
105 class MercurialFactory(RepoFactory):
93 class MercurialFactory(RepoFactory):
106
94
107 def _create_config(self, config, hooks=True):
95 def _create_config(self, config, hooks=True):
108 if not hooks:
96 if not hooks:
109 hooks_to_clean = frozenset((
97 hooks_to_clean = frozenset((
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
98 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
99 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 new_config = []
100 new_config = []
113 for section, option, value in config:
101 for section, option, value in config:
114 if section == 'hooks' and option in hooks_to_clean:
102 if section == 'hooks' and option in hooks_to_clean:
115 continue
103 continue
116 new_config.append((section, option, value))
104 new_config.append((section, option, value))
117 config = new_config
105 config = new_config
118
106
119 baseui = make_ui_from_config(config)
107 baseui = make_ui_from_config(config)
120 return baseui
108 return baseui
121
109
122 def _create_repo(self, wire, create):
110 def _create_repo(self, wire, create):
123 baseui = self._create_config(wire["config"])
111 baseui = self._create_config(wire["config"])
124 return localrepository(baseui, wire["path"], create)
112 return localrepository(baseui, wire["path"], create)
125
113
126
114
127 class HgRemote(object):
115 class HgRemote(object):
128
116
129 def __init__(self, factory):
117 def __init__(self, factory):
130 self._factory = factory
118 self._factory = factory
131
119
132 self._bulk_methods = {
120 self._bulk_methods = {
133 "affected_files": self.ctx_files,
121 "affected_files": self.ctx_files,
134 "author": self.ctx_user,
122 "author": self.ctx_user,
135 "branch": self.ctx_branch,
123 "branch": self.ctx_branch,
136 "children": self.ctx_children,
124 "children": self.ctx_children,
137 "date": self.ctx_date,
125 "date": self.ctx_date,
138 "message": self.ctx_description,
126 "message": self.ctx_description,
139 "parents": self.ctx_parents,
127 "parents": self.ctx_parents,
140 "status": self.ctx_status,
128 "status": self.ctx_status,
141 "_file_paths": self.ctx_list,
129 "_file_paths": self.ctx_list,
142 }
130 }
143
131
144 @reraise_safe_exceptions
132 @reraise_safe_exceptions
145 def discover_hg_version(self):
133 def discover_hg_version(self):
146 from mercurial import util
134 from mercurial import util
147 return util.version()
135 return util.version()
148
136
149 @reraise_safe_exceptions
137 @reraise_safe_exceptions
150 def archive_repo(self, archive_path, mtime, file_info, kind):
138 def archive_repo(self, archive_path, mtime, file_info, kind):
151 if kind == "tgz":
139 if kind == "tgz":
152 archiver = archival.tarit(archive_path, mtime, "gz")
140 archiver = archival.tarit(archive_path, mtime, "gz")
153 elif kind == "tbz2":
141 elif kind == "tbz2":
154 archiver = archival.tarit(archive_path, mtime, "bz2")
142 archiver = archival.tarit(archive_path, mtime, "bz2")
155 elif kind == 'zip':
143 elif kind == 'zip':
156 archiver = archival.zipit(archive_path, mtime)
144 archiver = archival.zipit(archive_path, mtime)
157 else:
145 else:
158 raise exceptions.ArchiveException(
146 raise exceptions.ArchiveException(
159 'Remote does not support: "%s".' % kind)
147 'Remote does not support: "%s".' % kind)
160
148
161 for f_path, f_mode, f_is_link, f_content in file_info:
149 for f_path, f_mode, f_is_link, f_content in file_info:
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
150 archiver.addfile(f_path, f_mode, f_is_link, f_content)
163 archiver.done()
151 archiver.done()
164
152
165 @reraise_safe_exceptions
153 @reraise_safe_exceptions
166 def bookmarks(self, wire):
154 def bookmarks(self, wire):
167 repo = self._factory.repo(wire)
155 repo = self._factory.repo(wire)
168 return dict(repo._bookmarks)
156 return dict(repo._bookmarks)
169
157
170 @reraise_safe_exceptions
158 @reraise_safe_exceptions
171 def branches(self, wire, normal, closed):
159 def branches(self, wire, normal, closed):
172 repo = self._factory.repo(wire)
160 repo = self._factory.repo(wire)
173 iter_branches = repo.branchmap().iterbranches()
161 iter_branches = repo.branchmap().iterbranches()
174 bt = {}
162 bt = {}
175 for branch_name, _heads, tip, is_closed in iter_branches:
163 for branch_name, _heads, tip, is_closed in iter_branches:
176 if normal and not is_closed:
164 if normal and not is_closed:
177 bt[branch_name] = tip
165 bt[branch_name] = tip
178 if closed and is_closed:
166 if closed and is_closed:
179 bt[branch_name] = tip
167 bt[branch_name] = tip
180
168
181 return bt
169 return bt
182
170
183 @reraise_safe_exceptions
171 @reraise_safe_exceptions
184 def bulk_request(self, wire, rev, pre_load):
172 def bulk_request(self, wire, rev, pre_load):
185 result = {}
173 result = {}
186 for attr in pre_load:
174 for attr in pre_load:
187 try:
175 try:
188 method = self._bulk_methods[attr]
176 method = self._bulk_methods[attr]
189 result[attr] = method(wire, rev)
177 result[attr] = method(wire, rev)
190 except KeyError:
178 except KeyError:
191 raise exceptions.VcsException(
179 raise exceptions.VcsException(
192 'Unknown bulk attribute: "%s"' % attr)
180 'Unknown bulk attribute: "%s"' % attr)
193 return result
181 return result
194
182
195 @reraise_safe_exceptions
183 @reraise_safe_exceptions
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
184 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
185 baseui = self._factory._create_config(wire["config"], hooks=hooks)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
186 clone(baseui, source, dest, noupdate=not update_after_clone)
199
187
200 @reraise_safe_exceptions
188 @reraise_safe_exceptions
201 def commitctx(
189 def commitctx(
202 self, wire, message, parents, commit_time, commit_timezone,
190 self, wire, message, parents, commit_time, commit_timezone,
203 user, files, extra, removed, updated):
191 user, files, extra, removed, updated):
204
192
205 def _filectxfn(_repo, memctx, path):
193 def _filectxfn(_repo, memctx, path):
206 """
194 """
207 Marks given path as added/changed/removed in a given _repo. This is
195 Marks given path as added/changed/removed in a given _repo. This is
208 for internal mercurial commit function.
196 for internal mercurial commit function.
209 """
197 """
210
198
211 # check if this path is removed
199 # check if this path is removed
212 if path in removed:
200 if path in removed:
213 # returning None is a way to mark node for removal
201 # returning None is a way to mark node for removal
214 return None
202 return None
215
203
216 # check if this path is added
204 # check if this path is added
217 for node in updated:
205 for node in updated:
218 if node['path'] == path:
206 if node['path'] == path:
219 return memfilectx(
207 return memfilectx(
220 _repo,
208 _repo,
221 path=node['path'],
209 path=node['path'],
222 data=node['content'],
210 data=node['content'],
223 islink=False,
211 islink=False,
224 isexec=bool(node['mode'] & stat.S_IXUSR),
212 isexec=bool(node['mode'] & stat.S_IXUSR),
225 copied=False,
213 copied=False,
226 memctx=memctx)
214 memctx=memctx)
227
215
228 raise exceptions.AbortException(
216 raise exceptions.AbortException(
229 "Given path haven't been marked as added, "
217 "Given path haven't been marked as added, "
230 "changed or removed (%s)" % path)
218 "changed or removed (%s)" % path)
231
219
232 repo = self._factory.repo(wire)
220 repo = self._factory.repo(wire)
233
221
234 commit_ctx = memctx(
222 commit_ctx = memctx(
235 repo=repo,
223 repo=repo,
236 parents=parents,
224 parents=parents,
237 text=message,
225 text=message,
238 files=files,
226 files=files,
239 filectxfn=_filectxfn,
227 filectxfn=_filectxfn,
240 user=user,
228 user=user,
241 date=(commit_time, commit_timezone),
229 date=(commit_time, commit_timezone),
242 extra=extra)
230 extra=extra)
243
231
244 n = repo.commitctx(commit_ctx)
232 n = repo.commitctx(commit_ctx)
245 new_id = hex(n)
233 new_id = hex(n)
246
234
247 return new_id
235 return new_id
248
236
249 @reraise_safe_exceptions
237 @reraise_safe_exceptions
250 def ctx_branch(self, wire, revision):
238 def ctx_branch(self, wire, revision):
251 repo = self._factory.repo(wire)
239 repo = self._factory.repo(wire)
252 ctx = repo[revision]
240 ctx = repo[revision]
253 return ctx.branch()
241 return ctx.branch()
254
242
255 @reraise_safe_exceptions
243 @reraise_safe_exceptions
256 def ctx_children(self, wire, revision):
244 def ctx_children(self, wire, revision):
257 repo = self._factory.repo(wire)
245 repo = self._factory.repo(wire)
258 ctx = repo[revision]
246 ctx = repo[revision]
259 return [child.rev() for child in ctx.children()]
247 return [child.rev() for child in ctx.children()]
260
248
261 @reraise_safe_exceptions
249 @reraise_safe_exceptions
262 def ctx_date(self, wire, revision):
250 def ctx_date(self, wire, revision):
263 repo = self._factory.repo(wire)
251 repo = self._factory.repo(wire)
264 ctx = repo[revision]
252 ctx = repo[revision]
265 return ctx.date()
253 return ctx.date()
266
254
267 @reraise_safe_exceptions
255 @reraise_safe_exceptions
268 def ctx_description(self, wire, revision):
256 def ctx_description(self, wire, revision):
269 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
270 ctx = repo[revision]
258 ctx = repo[revision]
271 return ctx.description()
259 return ctx.description()
272
260
273 @reraise_safe_exceptions
261 @reraise_safe_exceptions
274 def ctx_diff(
262 def ctx_diff(
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
263 self, wire, revision, git=True, ignore_whitespace=True, context=3):
276 repo = self._factory.repo(wire)
264 repo = self._factory.repo(wire)
277 ctx = repo[revision]
265 ctx = repo[revision]
278 result = ctx.diff(
266 result = ctx.diff(
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
267 git=git, ignore_whitespace=ignore_whitespace, context=context)
280 return list(result)
268 return list(result)
281
269
282 @reraise_safe_exceptions
270 @reraise_safe_exceptions
283 def ctx_files(self, wire, revision):
271 def ctx_files(self, wire, revision):
284 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
285 ctx = repo[revision]
273 ctx = repo[revision]
286 return ctx.files()
274 return ctx.files()
287
275
288 @reraise_safe_exceptions
276 @reraise_safe_exceptions
289 def ctx_list(self, path, revision):
277 def ctx_list(self, path, revision):
290 repo = self._factory.repo(path)
278 repo = self._factory.repo(path)
291 ctx = repo[revision]
279 ctx = repo[revision]
292 return list(ctx)
280 return list(ctx)
293
281
294 @reraise_safe_exceptions
282 @reraise_safe_exceptions
295 def ctx_parents(self, wire, revision):
283 def ctx_parents(self, wire, revision):
296 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
297 ctx = repo[revision]
285 ctx = repo[revision]
298 return [parent.rev() for parent in ctx.parents()]
286 return [parent.rev() for parent in ctx.parents()]
299
287
300 @reraise_safe_exceptions
288 @reraise_safe_exceptions
301 def ctx_substate(self, wire, revision):
289 def ctx_substate(self, wire, revision):
302 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
303 ctx = repo[revision]
291 ctx = repo[revision]
304 return ctx.substate
292 return ctx.substate
305
293
306 @reraise_safe_exceptions
294 @reraise_safe_exceptions
307 def ctx_status(self, wire, revision):
295 def ctx_status(self, wire, revision):
308 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
309 ctx = repo[revision]
297 ctx = repo[revision]
310 status = repo[ctx.p1().node()].status(other=ctx.node())
298 status = repo[ctx.p1().node()].status(other=ctx.node())
311 # object of status (odd, custom named tuple in mercurial) is not
299 # object of status (odd, custom named tuple in mercurial) is not
312 # correctly serializable via Pyro, we make it a list, as the underling
300 # correctly serializable via Pyro, we make it a list, as the underling
313 # API expects this to be a list
301 # API expects this to be a list
314 return list(status)
302 return list(status)
315
303
316 @reraise_safe_exceptions
304 @reraise_safe_exceptions
317 def ctx_user(self, wire, revision):
305 def ctx_user(self, wire, revision):
318 repo = self._factory.repo(wire)
306 repo = self._factory.repo(wire)
319 ctx = repo[revision]
307 ctx = repo[revision]
320 return ctx.user()
308 return ctx.user()
321
309
322 @reraise_safe_exceptions
310 @reraise_safe_exceptions
323 def check_url(self, url, config):
311 def check_url(self, url, config):
324 _proto = None
312 _proto = None
325 if '+' in url[:url.find('://')]:
313 if '+' in url[:url.find('://')]:
326 _proto = url[0:url.find('+')]
314 _proto = url[0:url.find('+')]
327 url = url[url.find('+') + 1:]
315 url = url[url.find('+') + 1:]
328 handlers = []
316 handlers = []
329 url_obj = url_parser(url)
317 url_obj = url_parser(url)
330 test_uri, authinfo = url_obj.authinfo()
318 test_uri, authinfo = url_obj.authinfo()
331 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
319 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
332 url_obj.query = obfuscate_qs(url_obj.query)
320 url_obj.query = obfuscate_qs(url_obj.query)
333
321
334 cleaned_uri = str(url_obj)
322 cleaned_uri = str(url_obj)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
323 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
336
324
337 if authinfo:
325 if authinfo:
338 # create a password manager
326 # create a password manager
339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
327 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
340 passmgr.add_password(*authinfo)
328 passmgr.add_password(*authinfo)
341
329
342 handlers.extend((httpbasicauthhandler(passmgr),
330 handlers.extend((httpbasicauthhandler(passmgr),
343 httpdigestauthhandler(passmgr)))
331 httpdigestauthhandler(passmgr)))
344
332
345 o = urllib2.build_opener(*handlers)
333 o = urllib2.build_opener(*handlers)
346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
334 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
347 ('Accept', 'application/mercurial-0.1')]
335 ('Accept', 'application/mercurial-0.1')]
348
336
349 q = {"cmd": 'between'}
337 q = {"cmd": 'between'}
350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
338 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
351 qs = '?%s' % urllib.urlencode(q)
339 qs = '?%s' % urllib.urlencode(q)
352 cu = "%s%s" % (test_uri, qs)
340 cu = "%s%s" % (test_uri, qs)
353 req = urllib2.Request(cu, None, {})
341 req = urllib2.Request(cu, None, {})
354
342
355 try:
343 try:
356 log.debug("Trying to open URL %s", cleaned_uri)
344 log.debug("Trying to open URL %s", cleaned_uri)
357 resp = o.open(req)
345 resp = o.open(req)
358 if resp.code != 200:
346 if resp.code != 200:
359 raise exceptions.URLError('Return Code is not 200')
347 raise exceptions.URLError('Return Code is not 200')
360 except Exception as e:
348 except Exception as e:
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
349 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
362 # means it cannot be cloned
350 # means it cannot be cloned
363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
351 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
364
352
365 # now check if it's a proper hg repo, but don't do it for svn
353 # now check if it's a proper hg repo, but don't do it for svn
366 try:
354 try:
367 if _proto == 'svn':
355 if _proto == 'svn':
368 pass
356 pass
369 else:
357 else:
370 # check for pure hg repos
358 # check for pure hg repos
371 log.debug(
359 log.debug(
372 "Verifying if URL is a Mercurial repository: %s",
360 "Verifying if URL is a Mercurial repository: %s",
373 cleaned_uri)
361 cleaned_uri)
374 httppeer(make_ui_from_config(config), url).lookup('tip')
362 httppeer(make_ui_from_config(config), url).lookup('tip')
375 except Exception as e:
363 except Exception as e:
376 log.warning("URL is not a valid Mercurial repository: %s",
364 log.warning("URL is not a valid Mercurial repository: %s",
377 cleaned_uri)
365 cleaned_uri)
378 raise exceptions.URLError(
366 raise exceptions.URLError(
379 "url [%s] does not look like an hg repo org_exc: %s"
367 "url [%s] does not look like an hg repo org_exc: %s"
380 % (cleaned_uri, e))
368 % (cleaned_uri, e))
381
369
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
370 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
383 return True
371 return True
384
372
385 @reraise_safe_exceptions
373 @reraise_safe_exceptions
386 def diff(
374 def diff(
387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
375 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
388 context):
376 context):
389 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
390
378
391 if file_filter:
379 if file_filter:
392 match_filter = match(file_filter[0], '', [file_filter[1]])
380 match_filter = match(file_filter[0], '', [file_filter[1]])
393 else:
381 else:
394 match_filter = file_filter
382 match_filter = file_filter
395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
383 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
396
384
397 try:
385 try:
398 return "".join(patch.diff(
386 return "".join(patch.diff(
399 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
387 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
400 except RepoLookupError:
388 except RepoLookupError:
401 raise exceptions.LookupException()
389 raise exceptions.LookupException()
402
390
403 @reraise_safe_exceptions
391 @reraise_safe_exceptions
404 def file_history(self, wire, revision, path, limit):
392 def file_history(self, wire, revision, path, limit):
405 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
406
394
407 ctx = repo[revision]
395 ctx = repo[revision]
408 fctx = ctx.filectx(path)
396 fctx = ctx.filectx(path)
409
397
410 def history_iter():
398 def history_iter():
411 limit_rev = fctx.rev()
399 limit_rev = fctx.rev()
412 for obj in reversed(list(fctx.filelog())):
400 for obj in reversed(list(fctx.filelog())):
413 obj = fctx.filectx(obj)
401 obj = fctx.filectx(obj)
414 if limit_rev >= obj.rev():
402 if limit_rev >= obj.rev():
415 yield obj
403 yield obj
416
404
417 history = []
405 history = []
418 for cnt, obj in enumerate(history_iter()):
406 for cnt, obj in enumerate(history_iter()):
419 if limit and cnt >= limit:
407 if limit and cnt >= limit:
420 break
408 break
421 history.append(hex(obj.node()))
409 history.append(hex(obj.node()))
422
410
423 return [x for x in history]
411 return [x for x in history]
424
412
425 @reraise_safe_exceptions
413 @reraise_safe_exceptions
426 def file_history_untill(self, wire, revision, path, limit):
414 def file_history_untill(self, wire, revision, path, limit):
427 repo = self._factory.repo(wire)
415 repo = self._factory.repo(wire)
428 ctx = repo[revision]
416 ctx = repo[revision]
429 fctx = ctx.filectx(path)
417 fctx = ctx.filectx(path)
430
418
431 file_log = list(fctx.filelog())
419 file_log = list(fctx.filelog())
432 if limit:
420 if limit:
433 # Limit to the last n items
421 # Limit to the last n items
434 file_log = file_log[-limit:]
422 file_log = file_log[-limit:]
435
423
436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
424 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
437
425
438 @reraise_safe_exceptions
426 @reraise_safe_exceptions
439 def fctx_annotate(self, wire, revision, path):
427 def fctx_annotate(self, wire, revision, path):
440 repo = self._factory.repo(wire)
428 repo = self._factory.repo(wire)
441 ctx = repo[revision]
429 ctx = repo[revision]
442 fctx = ctx.filectx(path)
430 fctx = ctx.filectx(path)
443
431
444 result = []
432 result = []
445 for i, annotate_data in enumerate(fctx.annotate()):
433 for i, annotate_data in enumerate(fctx.annotate()):
446 ln_no = i + 1
434 ln_no = i + 1
447 node_info, content = annotate_data
435 node_info, content = annotate_data
448 sha = hex(node_info[0].node())
436 sha = hex(node_info[0].node())
449 result.append((ln_no, sha, content))
437 result.append((ln_no, sha, content))
450 return result
438 return result
451
439
452 @reraise_safe_exceptions
440 @reraise_safe_exceptions
453 def fctx_data(self, wire, revision, path):
441 def fctx_data(self, wire, revision, path):
454 repo = self._factory.repo(wire)
442 repo = self._factory.repo(wire)
455 ctx = repo[revision]
443 ctx = repo[revision]
456 fctx = ctx.filectx(path)
444 fctx = ctx.filectx(path)
457 return fctx.data()
445 return fctx.data()
458
446
459 @reraise_safe_exceptions
447 @reraise_safe_exceptions
460 def fctx_flags(self, wire, revision, path):
448 def fctx_flags(self, wire, revision, path):
461 repo = self._factory.repo(wire)
449 repo = self._factory.repo(wire)
462 ctx = repo[revision]
450 ctx = repo[revision]
463 fctx = ctx.filectx(path)
451 fctx = ctx.filectx(path)
464 return fctx.flags()
452 return fctx.flags()
465
453
466 @reraise_safe_exceptions
454 @reraise_safe_exceptions
467 def fctx_size(self, wire, revision, path):
455 def fctx_size(self, wire, revision, path):
468 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
469 ctx = repo[revision]
457 ctx = repo[revision]
470 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
471 return fctx.size()
459 return fctx.size()
472
460
473 @reraise_safe_exceptions
461 @reraise_safe_exceptions
474 def get_all_commit_ids(self, wire, name):
462 def get_all_commit_ids(self, wire, name):
475 repo = self._factory.repo(wire)
463 repo = self._factory.repo(wire)
476 revs = repo.filtered(name).changelog.index
464 revs = repo.filtered(name).changelog.index
477 return map(lambda x: hex(x[7]), revs)[:-1]
465 return map(lambda x: hex(x[7]), revs)[:-1]
478
466
479 @reraise_safe_exceptions
467 @reraise_safe_exceptions
480 def get_config_value(self, wire, section, name, untrusted=False):
468 def get_config_value(self, wire, section, name, untrusted=False):
481 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
482 return repo.ui.config(section, name, untrusted=untrusted)
470 return repo.ui.config(section, name, untrusted=untrusted)
483
471
484 @reraise_safe_exceptions
472 @reraise_safe_exceptions
485 def get_config_bool(self, wire, section, name, untrusted=False):
473 def get_config_bool(self, wire, section, name, untrusted=False):
486 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
487 return repo.ui.configbool(section, name, untrusted=untrusted)
475 return repo.ui.configbool(section, name, untrusted=untrusted)
488
476
489 @reraise_safe_exceptions
477 @reraise_safe_exceptions
490 def get_config_list(self, wire, section, name, untrusted=False):
478 def get_config_list(self, wire, section, name, untrusted=False):
491 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
492 return repo.ui.configlist(section, name, untrusted=untrusted)
480 return repo.ui.configlist(section, name, untrusted=untrusted)
493
481
494 @reraise_safe_exceptions
482 @reraise_safe_exceptions
495 def is_large_file(self, wire, path):
483 def is_large_file(self, wire, path):
496 return largefiles.lfutil.isstandin(path)
484 return largefiles.lfutil.isstandin(path)
497
485
498 @reraise_safe_exceptions
486 @reraise_safe_exceptions
499 def in_store(self, wire, sha):
487 def in_store(self, wire, sha):
500 repo = self._factory.repo(wire)
488 repo = self._factory.repo(wire)
501 return largefiles.lfutil.instore(repo, sha)
489 return largefiles.lfutil.instore(repo, sha)
502
490
503 @reraise_safe_exceptions
491 @reraise_safe_exceptions
504 def in_user_cache(self, wire, sha):
492 def in_user_cache(self, wire, sha):
505 repo = self._factory.repo(wire)
493 repo = self._factory.repo(wire)
506 return largefiles.lfutil.inusercache(repo.ui, sha)
494 return largefiles.lfutil.inusercache(repo.ui, sha)
507
495
508 @reraise_safe_exceptions
496 @reraise_safe_exceptions
509 def store_path(self, wire, sha):
497 def store_path(self, wire, sha):
510 repo = self._factory.repo(wire)
498 repo = self._factory.repo(wire)
511 return largefiles.lfutil.storepath(repo, sha)
499 return largefiles.lfutil.storepath(repo, sha)
512
500
513 @reraise_safe_exceptions
501 @reraise_safe_exceptions
514 def link(self, wire, sha, path):
502 def link(self, wire, sha, path):
515 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
516 largefiles.lfutil.link(
504 largefiles.lfutil.link(
517 largefiles.lfutil.usercachepath(repo.ui, sha), path)
505 largefiles.lfutil.usercachepath(repo.ui, sha), path)
518
506
519 @reraise_safe_exceptions
507 @reraise_safe_exceptions
520 def localrepository(self, wire, create=False):
508 def localrepository(self, wire, create=False):
521 self._factory.repo(wire, create=create)
509 self._factory.repo(wire, create=create)
522
510
523 @reraise_safe_exceptions
511 @reraise_safe_exceptions
524 def lookup(self, wire, revision, both):
512 def lookup(self, wire, revision, both):
525 # TODO Paris: Ugly hack to "deserialize" long for msgpack
513 # TODO Paris: Ugly hack to "deserialize" long for msgpack
526 if isinstance(revision, float):
514 if isinstance(revision, float):
527 revision = long(revision)
515 revision = long(revision)
528 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
529 try:
517 try:
530 ctx = repo[revision]
518 ctx = repo[revision]
531 except RepoLookupError:
519 except RepoLookupError:
532 raise exceptions.LookupException(revision)
520 raise exceptions.LookupException(revision)
533 except LookupError as e:
521 except LookupError as e:
534 raise exceptions.LookupException(e.name)
522 raise exceptions.LookupException(e.name)
535
523
536 if not both:
524 if not both:
537 return ctx.hex()
525 return ctx.hex()
538
526
539 ctx = repo[ctx.hex()]
527 ctx = repo[ctx.hex()]
540 return ctx.hex(), ctx.rev()
528 return ctx.hex(), ctx.rev()
541
529
542 @reraise_safe_exceptions
530 @reraise_safe_exceptions
543 def pull(self, wire, url, commit_ids=None):
531 def pull(self, wire, url, commit_ids=None):
544 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
545 remote = peer(repo, {}, url)
533 remote = peer(repo, {}, url)
546 if commit_ids:
534 if commit_ids:
547 commit_ids = [bin(commit_id) for commit_id in commit_ids]
535 commit_ids = [bin(commit_id) for commit_id in commit_ids]
548
536
549 return exchange.pull(
537 return exchange.pull(
550 repo, remote, heads=commit_ids, force=None).cgresult
538 repo, remote, heads=commit_ids, force=None).cgresult
551
539
552 @reraise_safe_exceptions
540 @reraise_safe_exceptions
553 def revision(self, wire, rev):
541 def revision(self, wire, rev):
554 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
555 ctx = repo[rev]
543 ctx = repo[rev]
556 return ctx.rev()
544 return ctx.rev()
557
545
558 @reraise_safe_exceptions
546 @reraise_safe_exceptions
559 def rev_range(self, wire, filter):
547 def rev_range(self, wire, filter):
560 repo = self._factory.repo(wire)
548 repo = self._factory.repo(wire)
561 revisions = [rev for rev in revrange(repo, filter)]
549 revisions = [rev for rev in revrange(repo, filter)]
562 return revisions
550 return revisions
563
551
564 @reraise_safe_exceptions
552 @reraise_safe_exceptions
565 def rev_range_hash(self, wire, node):
553 def rev_range_hash(self, wire, node):
566 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
567
555
568 def get_revs(repo, rev_opt):
556 def get_revs(repo, rev_opt):
569 if rev_opt:
557 if rev_opt:
570 revs = revrange(repo, rev_opt)
558 revs = revrange(repo, rev_opt)
571 if len(revs) == 0:
559 if len(revs) == 0:
572 return (nullrev, nullrev)
560 return (nullrev, nullrev)
573 return max(revs), min(revs)
561 return max(revs), min(revs)
574 else:
562 else:
575 return len(repo) - 1, 0
563 return len(repo) - 1, 0
576
564
577 stop, start = get_revs(repo, [node + ':'])
565 stop, start = get_revs(repo, [node + ':'])
578 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
566 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
579 return revs
567 return revs
580
568
581 @reraise_safe_exceptions
569 @reraise_safe_exceptions
582 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
570 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
583 other_path = kwargs.pop('other_path', None)
571 other_path = kwargs.pop('other_path', None)
584
572
585 # case when we want to compare two independent repositories
573 # case when we want to compare two independent repositories
586 if other_path and other_path != wire["path"]:
574 if other_path and other_path != wire["path"]:
587 baseui = self._factory._create_config(wire["config"])
575 baseui = self._factory._create_config(wire["config"])
588 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
576 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
589 else:
577 else:
590 repo = self._factory.repo(wire)
578 repo = self._factory.repo(wire)
591 return list(repo.revs(rev_spec, *args))
579 return list(repo.revs(rev_spec, *args))
592
580
593 @reraise_safe_exceptions
581 @reraise_safe_exceptions
594 def strip(self, wire, revision, update, backup):
582 def strip(self, wire, revision, update, backup):
595 repo = self._factory.repo(wire)
583 repo = self._factory.repo(wire)
596 ctx = repo[revision]
584 ctx = repo[revision]
597 hgext_strip(
585 hgext_strip(
598 repo.baseui, repo, ctx.node(), update=update, backup=backup)
586 repo.baseui, repo, ctx.node(), update=update, backup=backup)
599
587
600 @reraise_safe_exceptions
588 @reraise_safe_exceptions
601 def tag(self, wire, name, revision, message, local, user,
589 def tag(self, wire, name, revision, message, local, user,
602 tag_time, tag_timezone):
590 tag_time, tag_timezone):
603 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
604 ctx = repo[revision]
592 ctx = repo[revision]
605 node = ctx.node()
593 node = ctx.node()
606
594
607 date = (tag_time, tag_timezone)
595 date = (tag_time, tag_timezone)
608 try:
596 try:
609 repo.tag(name, node, message, local, user, date)
597 repo.tag(name, node, message, local, user, date)
610 except Abort as e:
598 except Abort as e:
611 log.exception("Tag operation aborted")
599 log.exception("Tag operation aborted")
612 # Exception can contain unicode which we convert
600 # Exception can contain unicode which we convert
613 raise exceptions.AbortException(repr(e))
601 raise exceptions.AbortException(repr(e))
614
602
615 @reraise_safe_exceptions
603 @reraise_safe_exceptions
616 def tags(self, wire):
604 def tags(self, wire):
617 repo = self._factory.repo(wire)
605 repo = self._factory.repo(wire)
618 return repo.tags()
606 return repo.tags()
619
607
620 @reraise_safe_exceptions
608 @reraise_safe_exceptions
621 def update(self, wire, node=None, clean=False):
609 def update(self, wire, node=None, clean=False):
622 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
623 baseui = self._factory._create_config(wire['config'])
611 baseui = self._factory._create_config(wire['config'])
624 commands.update(baseui, repo, node=node, clean=clean)
612 commands.update(baseui, repo, node=node, clean=clean)
625
613
626 @reraise_safe_exceptions
614 @reraise_safe_exceptions
627 def identify(self, wire):
615 def identify(self, wire):
628 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
629 baseui = self._factory._create_config(wire['config'])
617 baseui = self._factory._create_config(wire['config'])
630 output = io.BytesIO()
618 output = io.BytesIO()
631 baseui.write = output.write
619 baseui.write = output.write
632 # This is required to get a full node id
620 # This is required to get a full node id
633 baseui.debugflag = True
621 baseui.debugflag = True
634 commands.identify(baseui, repo, id=True)
622 commands.identify(baseui, repo, id=True)
635
623
636 return output.getvalue()
624 return output.getvalue()
637
625
638 @reraise_safe_exceptions
626 @reraise_safe_exceptions
639 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
627 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
640 hooks=True):
628 hooks=True):
641 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
642 baseui = self._factory._create_config(wire['config'], hooks=hooks)
630 baseui = self._factory._create_config(wire['config'], hooks=hooks)
643
631
644 # Mercurial internally has a lot of logic that checks ONLY if
632 # Mercurial internally has a lot of logic that checks ONLY if
645 # option is defined, we just pass those if they are defined then
633 # option is defined, we just pass those if they are defined then
646 opts = {}
634 opts = {}
647 if bookmark:
635 if bookmark:
648 opts['bookmark'] = bookmark
636 opts['bookmark'] = bookmark
649 if branch:
637 if branch:
650 opts['branch'] = branch
638 opts['branch'] = branch
651 if revision:
639 if revision:
652 opts['rev'] = revision
640 opts['rev'] = revision
653
641
654 commands.pull(baseui, repo, source, **opts)
642 commands.pull(baseui, repo, source, **opts)
655
643
656 @reraise_safe_exceptions
644 @reraise_safe_exceptions
657 def heads(self, wire, branch=None):
645 def heads(self, wire, branch=None):
658 repo = self._factory.repo(wire)
646 repo = self._factory.repo(wire)
659 baseui = self._factory._create_config(wire['config'])
647 baseui = self._factory._create_config(wire['config'])
660 output = io.BytesIO()
648 output = io.BytesIO()
661
649
662 def write(data, **unused_kwargs):
650 def write(data, **unused_kwargs):
663 output.write(data)
651 output.write(data)
664
652
665 baseui.write = write
653 baseui.write = write
666 if branch:
654 if branch:
667 args = [branch]
655 args = [branch]
668 else:
656 else:
669 args = []
657 args = []
670 commands.heads(baseui, repo, template='{node} ', *args)
658 commands.heads(baseui, repo, template='{node} ', *args)
671
659
672 return output.getvalue()
660 return output.getvalue()
673
661
674 @reraise_safe_exceptions
662 @reraise_safe_exceptions
675 def ancestor(self, wire, revision1, revision2):
663 def ancestor(self, wire, revision1, revision2):
676 repo = self._factory.repo(wire)
664 repo = self._factory.repo(wire)
677 changelog = repo.changelog
665 changelog = repo.changelog
678 lookup = repo.lookup
666 lookup = repo.lookup
679 a = changelog.ancestor(lookup(revision1), lookup(revision2))
667 a = changelog.ancestor(lookup(revision1), lookup(revision2))
680 return hex(a)
668 return hex(a)
681
669
682 @reraise_safe_exceptions
670 @reraise_safe_exceptions
683 def push(self, wire, revisions, dest_path, hooks=True,
671 def push(self, wire, revisions, dest_path, hooks=True,
684 push_branches=False):
672 push_branches=False):
685 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
686 baseui = self._factory._create_config(wire['config'], hooks=hooks)
674 baseui = self._factory._create_config(wire['config'], hooks=hooks)
687 commands.push(baseui, repo, dest=dest_path, rev=revisions,
675 commands.push(baseui, repo, dest=dest_path, rev=revisions,
688 new_branch=push_branches)
676 new_branch=push_branches)
689
677
690 @reraise_safe_exceptions
678 @reraise_safe_exceptions
691 def merge(self, wire, revision):
679 def merge(self, wire, revision):
692 repo = self._factory.repo(wire)
680 repo = self._factory.repo(wire)
693 baseui = self._factory._create_config(wire['config'])
681 baseui = self._factory._create_config(wire['config'])
694 repo.ui.setconfig('ui', 'merge', 'internal:dump')
682 repo.ui.setconfig('ui', 'merge', 'internal:dump')
695
683
696 # In case of sub repositories are used mercurial prompts the user in
684 # In case of sub repositories are used mercurial prompts the user in
697 # case of merge conflicts or different sub repository sources. By
685 # case of merge conflicts or different sub repository sources. By
698 # setting the interactive flag to `False` mercurial doesn't prompt the
686 # setting the interactive flag to `False` mercurial doesn't prompt the
699 # used but instead uses a default value.
687 # used but instead uses a default value.
700 repo.ui.setconfig('ui', 'interactive', False)
688 repo.ui.setconfig('ui', 'interactive', False)
701
689
702 commands.merge(baseui, repo, rev=revision)
690 commands.merge(baseui, repo, rev=revision)
703
691
704 @reraise_safe_exceptions
692 @reraise_safe_exceptions
705 def commit(self, wire, message, username):
693 def commit(self, wire, message, username):
706 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
707 baseui = self._factory._create_config(wire['config'])
695 baseui = self._factory._create_config(wire['config'])
708 repo.ui.setconfig('ui', 'username', username)
696 repo.ui.setconfig('ui', 'username', username)
709 commands.commit(baseui, repo, message=message)
697 commands.commit(baseui, repo, message=message)
710
698
711 @reraise_safe_exceptions
699 @reraise_safe_exceptions
712 def rebase(self, wire, source=None, dest=None, abort=False):
700 def rebase(self, wire, source=None, dest=None, abort=False):
713 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
714 baseui = self._factory._create_config(wire['config'])
702 baseui = self._factory._create_config(wire['config'])
715 repo.ui.setconfig('ui', 'merge', 'internal:dump')
703 repo.ui.setconfig('ui', 'merge', 'internal:dump')
716 rebase.rebase(
704 rebase.rebase(
717 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
705 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
718
706
719 @reraise_safe_exceptions
707 @reraise_safe_exceptions
720 def bookmark(self, wire, bookmark, revision=None):
708 def bookmark(self, wire, bookmark, revision=None):
721 repo = self._factory.repo(wire)
709 repo = self._factory.repo(wire)
722 baseui = self._factory._create_config(wire['config'])
710 baseui = self._factory._create_config(wire['config'])
723 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
711 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,395 +1,404 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2017 RodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
21 import sys
22 import json
23 import logging
20 import collections
24 import collections
21 import importlib
25 import importlib
22 import io
23 import json
24 import subprocess
26 import subprocess
25 import sys
27
26 from httplib import HTTPConnection
28 from httplib import HTTPConnection
27
29
28
30
29 import mercurial.scmutil
31 import mercurial.scmutil
30 import mercurial.node
32 import mercurial.node
31 import Pyro4
33 import Pyro4
32 import simplejson as json
34 import simplejson as json
33
35
34 from vcsserver import exceptions
36 from vcsserver import exceptions
35
37
38 log = logging.getLogger(__name__)
39
36
40
37 class HooksHttpClient(object):
41 class HooksHttpClient(object):
38 connection = None
42 connection = None
39
43
40 def __init__(self, hooks_uri):
44 def __init__(self, hooks_uri):
41 self.hooks_uri = hooks_uri
45 self.hooks_uri = hooks_uri
42
46
43 def __call__(self, method, extras):
47 def __call__(self, method, extras):
44 connection = HTTPConnection(self.hooks_uri)
48 connection = HTTPConnection(self.hooks_uri)
45 body = self._serialize(method, extras)
49 body = self._serialize(method, extras)
46 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
47 response = connection.getresponse()
51 response = connection.getresponse()
48 return json.loads(response.read())
52 return json.loads(response.read())
49
53
50 def _serialize(self, hook_name, extras):
54 def _serialize(self, hook_name, extras):
51 data = {
55 data = {
52 'method': hook_name,
56 'method': hook_name,
53 'extras': extras
57 'extras': extras
54 }
58 }
55 return json.dumps(data)
59 return json.dumps(data)
56
60
57
61
58 class HooksDummyClient(object):
62 class HooksDummyClient(object):
59 def __init__(self, hooks_module):
63 def __init__(self, hooks_module):
60 self._hooks_module = importlib.import_module(hooks_module)
64 self._hooks_module = importlib.import_module(hooks_module)
61
65
62 def __call__(self, hook_name, extras):
66 def __call__(self, hook_name, extras):
63 with self._hooks_module.Hooks() as hooks:
67 with self._hooks_module.Hooks() as hooks:
64 return getattr(hooks, hook_name)(extras)
68 return getattr(hooks, hook_name)(extras)
65
69
66
70
67 class HooksPyro4Client(object):
71 class HooksPyro4Client(object):
68 def __init__(self, hooks_uri):
72 def __init__(self, hooks_uri):
69 self.hooks_uri = hooks_uri
73 self.hooks_uri = hooks_uri
70
74
71 def __call__(self, hook_name, extras):
75 def __call__(self, hook_name, extras):
72 with Pyro4.Proxy(self.hooks_uri) as hooks:
76 with Pyro4.Proxy(self.hooks_uri) as hooks:
73 return getattr(hooks, hook_name)(extras)
77 return getattr(hooks, hook_name)(extras)
74
78
75
79
76 class RemoteMessageWriter(object):
80 class RemoteMessageWriter(object):
77 """Writer base class."""
81 """Writer base class."""
78 def write(message):
82 def write(message):
79 raise NotImplementedError()
83 raise NotImplementedError()
80
84
81
85
82 class HgMessageWriter(RemoteMessageWriter):
86 class HgMessageWriter(RemoteMessageWriter):
83 """Writer that knows how to send messages to mercurial clients."""
87 """Writer that knows how to send messages to mercurial clients."""
84
88
85 def __init__(self, ui):
89 def __init__(self, ui):
86 self.ui = ui
90 self.ui = ui
87
91
88 def write(self, message):
92 def write(self, message):
89 # TODO: Check why the quiet flag is set by default.
93 # TODO: Check why the quiet flag is set by default.
90 old = self.ui.quiet
94 old = self.ui.quiet
91 self.ui.quiet = False
95 self.ui.quiet = False
92 self.ui.status(message.encode('utf-8'))
96 self.ui.status(message.encode('utf-8'))
93 self.ui.quiet = old
97 self.ui.quiet = old
94
98
95
99
96 class GitMessageWriter(RemoteMessageWriter):
100 class GitMessageWriter(RemoteMessageWriter):
97 """Writer that knows how to send messages to git clients."""
101 """Writer that knows how to send messages to git clients."""
98
102
99 def __init__(self, stdout=None):
103 def __init__(self, stdout=None):
100 self.stdout = stdout or sys.stdout
104 self.stdout = stdout or sys.stdout
101
105
102 def write(self, message):
106 def write(self, message):
103 self.stdout.write(message.encode('utf-8'))
107 self.stdout.write(message.encode('utf-8'))
104
108
105
109
106 def _handle_exception(result):
110 def _handle_exception(result):
107 exception_class = result.get('exception')
111 exception_class = result.get('exception')
112 exception_traceback = result.get('exception_traceback')
113
114 if exception_traceback:
115 log.error('Got traceback from remote call:%s', exception_traceback)
116
108 if exception_class == 'HTTPLockedRC':
117 if exception_class == 'HTTPLockedRC':
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
118 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 elif exception_class == 'RepositoryError':
119 elif exception_class == 'RepositoryError':
111 raise exceptions.VcsException(*result['exception_args'])
120 raise exceptions.VcsException(*result['exception_args'])
112 elif exception_class:
121 elif exception_class:
113 raise Exception('Got remote exception "%s" with args "%s"' %
122 raise Exception('Got remote exception "%s" with args "%s"' %
114 (exception_class, result['exception_args']))
123 (exception_class, result['exception_args']))
115
124
116
125
117 def _get_hooks_client(extras):
126 def _get_hooks_client(extras):
118 if 'hooks_uri' in extras:
127 if 'hooks_uri' in extras:
119 protocol = extras.get('hooks_protocol')
128 protocol = extras.get('hooks_protocol')
120 return (
129 return (
121 HooksHttpClient(extras['hooks_uri'])
130 HooksHttpClient(extras['hooks_uri'])
122 if protocol == 'http'
131 if protocol == 'http'
123 else HooksPyro4Client(extras['hooks_uri'])
132 else HooksPyro4Client(extras['hooks_uri'])
124 )
133 )
125 else:
134 else:
126 return HooksDummyClient(extras['hooks_module'])
135 return HooksDummyClient(extras['hooks_module'])
127
136
128
137
129 def _call_hook(hook_name, extras, writer):
138 def _call_hook(hook_name, extras, writer):
130 hooks = _get_hooks_client(extras)
139 hooks = _get_hooks_client(extras)
131 result = hooks(hook_name, extras)
140 result = hooks(hook_name, extras)
132 writer.write(result['output'])
141 writer.write(result['output'])
133 _handle_exception(result)
142 _handle_exception(result)
134
143
135 return result['status']
144 return result['status']
136
145
137
146
138 def _extras_from_ui(ui):
147 def _extras_from_ui(ui):
139 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
148 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
140 return extras
149 return extras
141
150
142
151
143 def repo_size(ui, repo, **kwargs):
152 def repo_size(ui, repo, **kwargs):
144 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
153 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
145
154
146
155
147 def pre_pull(ui, repo, **kwargs):
156 def pre_pull(ui, repo, **kwargs):
148 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
157 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
149
158
150
159
151 def post_pull(ui, repo, **kwargs):
160 def post_pull(ui, repo, **kwargs):
152 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
161 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
153
162
154
163
155 def pre_push(ui, repo, node=None, **kwargs):
164 def pre_push(ui, repo, node=None, **kwargs):
156 extras = _extras_from_ui(ui)
165 extras = _extras_from_ui(ui)
157
166
158 rev_data = []
167 rev_data = []
159 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
168 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
160 branches = collections.defaultdict(list)
169 branches = collections.defaultdict(list)
161 for commit_id, branch in _rev_range_hash(repo, node, with_branch=True):
170 for commit_id, branch in _rev_range_hash(repo, node, with_branch=True):
162 branches[branch].append(commit_id)
171 branches[branch].append(commit_id)
163
172
164 for branch, commits in branches.iteritems():
173 for branch, commits in branches.iteritems():
165 old_rev = kwargs.get('node_last') or commits[0]
174 old_rev = kwargs.get('node_last') or commits[0]
166 rev_data.append({
175 rev_data.append({
167 'old_rev': old_rev,
176 'old_rev': old_rev,
168 'new_rev': commits[-1],
177 'new_rev': commits[-1],
169 'ref': '',
178 'ref': '',
170 'type': 'branch',
179 'type': 'branch',
171 'name': branch,
180 'name': branch,
172 })
181 })
173
182
174 extras['commit_ids'] = rev_data
183 extras['commit_ids'] = rev_data
175 return _call_hook('pre_push', extras, HgMessageWriter(ui))
184 return _call_hook('pre_push', extras, HgMessageWriter(ui))
176
185
177
186
178 def _rev_range_hash(repo, node, with_branch=False):
187 def _rev_range_hash(repo, node, with_branch=False):
179
188
180 commits = []
189 commits = []
181 for rev in xrange(repo[node], len(repo)):
190 for rev in xrange(repo[node], len(repo)):
182 ctx = repo[rev]
191 ctx = repo[rev]
183 commit_id = mercurial.node.hex(ctx.node())
192 commit_id = mercurial.node.hex(ctx.node())
184 branch = ctx.branch()
193 branch = ctx.branch()
185 if with_branch:
194 if with_branch:
186 commits.append((commit_id, branch))
195 commits.append((commit_id, branch))
187 else:
196 else:
188 commits.append(commit_id)
197 commits.append(commit_id)
189
198
190 return commits
199 return commits
191
200
192
201
193 def post_push(ui, repo, node, **kwargs):
202 def post_push(ui, repo, node, **kwargs):
194 commit_ids = _rev_range_hash(repo, node)
203 commit_ids = _rev_range_hash(repo, node)
195
204
196 extras = _extras_from_ui(ui)
205 extras = _extras_from_ui(ui)
197 extras['commit_ids'] = commit_ids
206 extras['commit_ids'] = commit_ids
198
207
199 return _call_hook('post_push', extras, HgMessageWriter(ui))
208 return _call_hook('post_push', extras, HgMessageWriter(ui))
200
209
201
210
202 # backward compat
211 # backward compat
203 log_pull_action = post_pull
212 log_pull_action = post_pull
204
213
205 # backward compat
214 # backward compat
206 log_push_action = post_push
215 log_push_action = post_push
207
216
208
217
209 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
218 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
210 """
219 """
211 Old hook name: keep here for backward compatibility.
220 Old hook name: keep here for backward compatibility.
212
221
213 This is only required when the installed git hooks are not upgraded.
222 This is only required when the installed git hooks are not upgraded.
214 """
223 """
215 pass
224 pass
216
225
217
226
218 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
227 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
219 """
228 """
220 Old hook name: keep here for backward compatibility.
229 Old hook name: keep here for backward compatibility.
221
230
222 This is only required when the installed git hooks are not upgraded.
231 This is only required when the installed git hooks are not upgraded.
223 """
232 """
224 pass
233 pass
225
234
226
235
227 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
236 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
228
237
229
238
230 def git_pre_pull(extras):
239 def git_pre_pull(extras):
231 """
240 """
232 Pre pull hook.
241 Pre pull hook.
233
242
234 :param extras: dictionary containing the keys defined in simplevcs
243 :param extras: dictionary containing the keys defined in simplevcs
235 :type extras: dict
244 :type extras: dict
236
245
237 :return: status code of the hook. 0 for success.
246 :return: status code of the hook. 0 for success.
238 :rtype: int
247 :rtype: int
239 """
248 """
240 if 'pull' not in extras['hooks']:
249 if 'pull' not in extras['hooks']:
241 return HookResponse(0, '')
250 return HookResponse(0, '')
242
251
243 stdout = io.BytesIO()
252 stdout = io.BytesIO()
244 try:
253 try:
245 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
254 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
246 except Exception as error:
255 except Exception as error:
247 status = 128
256 status = 128
248 stdout.write('ERROR: %s\n' % str(error))
257 stdout.write('ERROR: %s\n' % str(error))
249
258
250 return HookResponse(status, stdout.getvalue())
259 return HookResponse(status, stdout.getvalue())
251
260
252
261
253 def git_post_pull(extras):
262 def git_post_pull(extras):
254 """
263 """
255 Post pull hook.
264 Post pull hook.
256
265
257 :param extras: dictionary containing the keys defined in simplevcs
266 :param extras: dictionary containing the keys defined in simplevcs
258 :type extras: dict
267 :type extras: dict
259
268
260 :return: status code of the hook. 0 for success.
269 :return: status code of the hook. 0 for success.
261 :rtype: int
270 :rtype: int
262 """
271 """
263 if 'pull' not in extras['hooks']:
272 if 'pull' not in extras['hooks']:
264 return HookResponse(0, '')
273 return HookResponse(0, '')
265
274
266 stdout = io.BytesIO()
275 stdout = io.BytesIO()
267 try:
276 try:
268 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
277 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
269 except Exception as error:
278 except Exception as error:
270 status = 128
279 status = 128
271 stdout.write('ERROR: %s\n' % error)
280 stdout.write('ERROR: %s\n' % error)
272
281
273 return HookResponse(status, stdout.getvalue())
282 return HookResponse(status, stdout.getvalue())
274
283
275
284
276 def _parse_git_ref_lines(revision_lines):
285 def _parse_git_ref_lines(revision_lines):
277 rev_data = []
286 rev_data = []
278 for revision_line in revision_lines or []:
287 for revision_line in revision_lines or []:
279 old_rev, new_rev, ref = revision_line.strip().split(' ')
288 old_rev, new_rev, ref = revision_line.strip().split(' ')
280 ref_data = ref.split('/', 2)
289 ref_data = ref.split('/', 2)
281 if ref_data[1] in ('tags', 'heads'):
290 if ref_data[1] in ('tags', 'heads'):
282 rev_data.append({
291 rev_data.append({
283 'old_rev': old_rev,
292 'old_rev': old_rev,
284 'new_rev': new_rev,
293 'new_rev': new_rev,
285 'ref': ref,
294 'ref': ref,
286 'type': ref_data[1],
295 'type': ref_data[1],
287 'name': ref_data[2],
296 'name': ref_data[2],
288 })
297 })
289 return rev_data
298 return rev_data
290
299
291
300
292 def git_pre_receive(unused_repo_path, revision_lines, env):
301 def git_pre_receive(unused_repo_path, revision_lines, env):
293 """
302 """
294 Pre push hook.
303 Pre push hook.
295
304
296 :param extras: dictionary containing the keys defined in simplevcs
305 :param extras: dictionary containing the keys defined in simplevcs
297 :type extras: dict
306 :type extras: dict
298
307
299 :return: status code of the hook. 0 for success.
308 :return: status code of the hook. 0 for success.
300 :rtype: int
309 :rtype: int
301 """
310 """
302 extras = json.loads(env['RC_SCM_DATA'])
311 extras = json.loads(env['RC_SCM_DATA'])
303 rev_data = _parse_git_ref_lines(revision_lines)
312 rev_data = _parse_git_ref_lines(revision_lines)
304 if 'push' not in extras['hooks']:
313 if 'push' not in extras['hooks']:
305 return 0
314 return 0
306 extras['commit_ids'] = rev_data
315 extras['commit_ids'] = rev_data
307 return _call_hook('pre_push', extras, GitMessageWriter())
316 return _call_hook('pre_push', extras, GitMessageWriter())
308
317
309
318
310 def _run_command(arguments):
319 def _run_command(arguments):
311 """
320 """
312 Run the specified command and return the stdout.
321 Run the specified command and return the stdout.
313
322
314 :param arguments: sequence of program arguments (including the program name)
323 :param arguments: sequence of program arguments (including the program name)
315 :type arguments: list[str]
324 :type arguments: list[str]
316 """
325 """
317 # TODO(skreft): refactor this method and all the other similar ones.
326 # TODO(skreft): refactor this method and all the other similar ones.
318 # Probably this should be using subprocessio.
327 # Probably this should be using subprocessio.
319 process = subprocess.Popen(
328 process = subprocess.Popen(
320 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
329 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
321 stdout, _ = process.communicate()
330 stdout, _ = process.communicate()
322
331
323 if process.returncode != 0:
332 if process.returncode != 0:
324 raise Exception(
333 raise Exception(
325 'Command %s exited with exit code %s' % (arguments,
334 'Command %s exited with exit code %s' % (arguments,
326 process.returncode))
335 process.returncode))
327
336
328 return stdout
337 return stdout
329
338
330
339
331 def git_post_receive(unused_repo_path, revision_lines, env):
340 def git_post_receive(unused_repo_path, revision_lines, env):
332 """
341 """
333 Post push hook.
342 Post push hook.
334
343
335 :param extras: dictionary containing the keys defined in simplevcs
344 :param extras: dictionary containing the keys defined in simplevcs
336 :type extras: dict
345 :type extras: dict
337
346
338 :return: status code of the hook. 0 for success.
347 :return: status code of the hook. 0 for success.
339 :rtype: int
348 :rtype: int
340 """
349 """
341 extras = json.loads(env['RC_SCM_DATA'])
350 extras = json.loads(env['RC_SCM_DATA'])
342 if 'push' not in extras['hooks']:
351 if 'push' not in extras['hooks']:
343 return 0
352 return 0
344
353
345 rev_data = _parse_git_ref_lines(revision_lines)
354 rev_data = _parse_git_ref_lines(revision_lines)
346
355
347 git_revs = []
356 git_revs = []
348
357
349 # N.B.(skreft): it is ok to just call git, as git before calling a
358 # N.B.(skreft): it is ok to just call git, as git before calling a
350 # subcommand sets the PATH environment variable so that it point to the
359 # subcommand sets the PATH environment variable so that it point to the
351 # correct version of the git executable.
360 # correct version of the git executable.
352 empty_commit_id = '0' * 40
361 empty_commit_id = '0' * 40
353 for push_ref in rev_data:
362 for push_ref in rev_data:
354 type_ = push_ref['type']
363 type_ = push_ref['type']
355 if type_ == 'heads':
364 if type_ == 'heads':
356 if push_ref['old_rev'] == empty_commit_id:
365 if push_ref['old_rev'] == empty_commit_id:
357
366
358 # Fix up head revision if needed
367 # Fix up head revision if needed
359 cmd = ['git', 'show', 'HEAD']
368 cmd = ['git', 'show', 'HEAD']
360 try:
369 try:
361 _run_command(cmd)
370 _run_command(cmd)
362 except Exception:
371 except Exception:
363 cmd = ['git', 'symbolic-ref', 'HEAD',
372 cmd = ['git', 'symbolic-ref', 'HEAD',
364 'refs/heads/%s' % push_ref['name']]
373 'refs/heads/%s' % push_ref['name']]
365 print("Setting default branch to %s" % push_ref['name'])
374 print("Setting default branch to %s" % push_ref['name'])
366 _run_command(cmd)
375 _run_command(cmd)
367
376
368 cmd = ['git', 'for-each-ref', '--format=%(refname)',
377 cmd = ['git', 'for-each-ref', '--format=%(refname)',
369 'refs/heads/*']
378 'refs/heads/*']
370 heads = _run_command(cmd)
379 heads = _run_command(cmd)
371 heads = heads.replace(push_ref['ref'], '')
380 heads = heads.replace(push_ref['ref'], '')
372 heads = ' '.join(head for head in heads.splitlines() if head)
381 heads = ' '.join(head for head in heads.splitlines() if head)
373 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
382 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
374 '--', push_ref['new_rev'], '--not', heads]
383 '--', push_ref['new_rev'], '--not', heads]
375 git_revs.extend(_run_command(cmd).splitlines())
384 git_revs.extend(_run_command(cmd).splitlines())
376 elif push_ref['new_rev'] == empty_commit_id:
385 elif push_ref['new_rev'] == empty_commit_id:
377 # delete branch case
386 # delete branch case
378 git_revs.append('delete_branch=>%s' % push_ref['name'])
387 git_revs.append('delete_branch=>%s' % push_ref['name'])
379 else:
388 else:
380 cmd = ['git', 'log',
389 cmd = ['git', 'log',
381 '{old_rev}..{new_rev}'.format(**push_ref),
390 '{old_rev}..{new_rev}'.format(**push_ref),
382 '--reverse', '--pretty=format:%H']
391 '--reverse', '--pretty=format:%H']
383 git_revs.extend(_run_command(cmd).splitlines())
392 git_revs.extend(_run_command(cmd).splitlines())
384 elif type_ == 'tags':
393 elif type_ == 'tags':
385 git_revs.append('tag=>%s' % push_ref['name'])
394 git_revs.append('tag=>%s' % push_ref['name'])
386
395
387 extras['commit_ids'] = git_revs
396 extras['commit_ids'] = git_revs
388
397
389 if 'repo_size' in extras['hooks']:
398 if 'repo_size' in extras['hooks']:
390 try:
399 try:
391 _call_hook('repo_size', extras, GitMessageWriter())
400 _call_hook('repo_size', extras, GitMessageWriter())
392 except:
401 except:
393 pass
402 pass
394
403
395 return _call_hook('post_push', extras, GitMessageWriter())
404 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,651 +1,640 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
43 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
44 svn_compatible_versions = set([
44 svn_compatible_versions = set([
45 'pre-1.4-compatible',
45 'pre-1.4-compatible',
46 'pre-1.5-compatible',
46 'pre-1.5-compatible',
47 'pre-1.6-compatible',
47 'pre-1.6-compatible',
48 'pre-1.8-compatible',
48 'pre-1.8-compatible',
49 ])
49 ])
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except Exception as e:
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
60 raise_from_original(exceptions.UnhandledException)
61 raise
61 raise
62 return wrapper
62 return wrapper
63
63
64
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
76 class SubversionFactory(RepoFactory):
65 class SubversionFactory(RepoFactory):
77
66
78 def _create_repo(self, wire, create, compatible_version):
67 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
68 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
69 if create:
81 fs_config = {}
70 fs_config = {}
82 if compatible_version:
71 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
72 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
73 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
74 .format(compatible_version))
86 log.debug('Create SVN repo with compatible version "%s"',
75 log.debug('Create SVN repo with compatible version "%s"',
87 compatible_version)
76 compatible_version)
88 fs_config[compatible_version] = '1'
77 fs_config[compatible_version] = '1'
89 repo = svn.repos.create(path, "", "", None, fs_config)
78 repo = svn.repos.create(path, "", "", None, fs_config)
90 else:
79 else:
91 repo = svn.repos.open(path)
80 repo = svn.repos.open(path)
92 return repo
81 return repo
93
82
94 def repo(self, wire, create=False, compatible_version=None):
83 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
84 def create_new_repo():
96 return self._create_repo(wire, create, compatible_version)
85 return self._create_repo(wire, create, compatible_version)
97
86
98 return self._repo(wire, create_new_repo)
87 return self._repo(wire, create_new_repo)
99
88
100
89
101
90
102 NODE_TYPE_MAPPING = {
91 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
92 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
93 svn.core.svn_node_dir: 'dir',
105 }
94 }
106
95
107
96
108 class SvnRemote(object):
97 class SvnRemote(object):
109
98
110 def __init__(self, factory, hg_factory=None):
99 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
100 self._factory = factory
112 # TODO: Remove once we do not use internal Mercurial objects anymore
101 # TODO: Remove once we do not use internal Mercurial objects anymore
113 # for subversion
102 # for subversion
114 self._hg_factory = hg_factory
103 self._hg_factory = hg_factory
115
104
116 @reraise_safe_exceptions
105 @reraise_safe_exceptions
117 def discover_svn_version(self):
106 def discover_svn_version(self):
118 try:
107 try:
119 import svn.core
108 import svn.core
120 svn_ver = svn.core.SVN_VERSION
109 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
110 except ImportError:
122 svn_ver = None
111 svn_ver = None
123 return svn_ver
112 return svn_ver
124
113
125 def check_url(self, url, config_items):
114 def check_url(self, url, config_items):
126 # this can throw exception if not installed, but we detect this
115 # this can throw exception if not installed, but we detect this
127 from hgsubversion import svnrepo
116 from hgsubversion import svnrepo
128
117
129 baseui = self._hg_factory._create_config(config_items)
118 baseui = self._hg_factory._create_config(config_items)
130 # uuid function get's only valid UUID from proper repo, else
119 # uuid function get's only valid UUID from proper repo, else
131 # throws exception
120 # throws exception
132 try:
121 try:
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
122 svnrepo.svnremoterepo(baseui, url).svn.uuid
134 except:
123 except:
135 log.debug("Invalid svn url: %s", url)
124 log.debug("Invalid svn url: %s", url)
136 raise URLError(
125 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
126 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
127 return True
139
128
140 def is_path_valid_repository(self, wire, path):
129 def is_path_valid_repository(self, wire, path):
141 try:
130 try:
142 svn.repos.open(path)
131 svn.repos.open(path)
143 except svn.core.SubversionException:
132 except svn.core.SubversionException:
144 log.debug("Invalid Subversion path %s", path)
133 log.debug("Invalid Subversion path %s", path)
145 return False
134 return False
146 return True
135 return True
147
136
148 def lookup(self, wire, revision):
137 def lookup(self, wire, revision):
149 if revision not in [-1, None, 'HEAD']:
138 if revision not in [-1, None, 'HEAD']:
150 raise NotImplementedError
139 raise NotImplementedError
151 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
152 fs_ptr = svn.repos.fs(repo)
141 fs_ptr = svn.repos.fs(repo)
153 head = svn.fs.youngest_rev(fs_ptr)
142 head = svn.fs.youngest_rev(fs_ptr)
154 return head
143 return head
155
144
156 def lookup_interval(self, wire, start_ts, end_ts):
145 def lookup_interval(self, wire, start_ts, end_ts):
157 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
158 fsobj = svn.repos.fs(repo)
147 fsobj = svn.repos.fs(repo)
159 start_rev = None
148 start_rev = None
160 end_rev = None
149 end_rev = None
161 if start_ts:
150 if start_ts:
162 start_ts_svn = apr_time_t(start_ts)
151 start_ts_svn = apr_time_t(start_ts)
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
164 else:
153 else:
165 start_rev = 1
154 start_rev = 1
166 if end_ts:
155 if end_ts:
167 end_ts_svn = apr_time_t(end_ts)
156 end_ts_svn = apr_time_t(end_ts)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
169 else:
158 else:
170 end_rev = svn.fs.youngest_rev(fsobj)
159 end_rev = svn.fs.youngest_rev(fsobj)
171 return start_rev, end_rev
160 return start_rev, end_rev
172
161
173 def revision_properties(self, wire, revision):
162 def revision_properties(self, wire, revision):
174 repo = self._factory.repo(wire)
163 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
164 fs_ptr = svn.repos.fs(repo)
176 return svn.fs.revision_proplist(fs_ptr, revision)
165 return svn.fs.revision_proplist(fs_ptr, revision)
177
166
178 def revision_changes(self, wire, revision):
167 def revision_changes(self, wire, revision):
179
168
180 repo = self._factory.repo(wire)
169 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
170 fsobj = svn.repos.fs(repo)
182 rev_root = svn.fs.revision_root(fsobj, revision)
171 rev_root = svn.fs.revision_root(fsobj, revision)
183
172
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
186 base_dir = ""
175 base_dir = ""
187 send_deltas = False
176 send_deltas = False
188 svn.repos.replay2(
177 svn.repos.replay2(
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
190 editor_ptr, editor_baton, None)
179 editor_ptr, editor_baton, None)
191
180
192 added = []
181 added = []
193 changed = []
182 changed = []
194 removed = []
183 removed = []
195
184
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
197 for path, change in editor.changes.iteritems():
186 for path, change in editor.changes.iteritems():
198 # TODO: Decide what to do with directory nodes. Subversion can add
187 # TODO: Decide what to do with directory nodes. Subversion can add
199 # empty directories.
188 # empty directories.
200
189
201 if change.item_kind == svn.core.svn_node_dir:
190 if change.item_kind == svn.core.svn_node_dir:
202 continue
191 continue
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
204 added.append(path)
193 added.append(path)
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 svn.repos.CHANGE_ACTION_REPLACE]:
195 svn.repos.CHANGE_ACTION_REPLACE]:
207 changed.append(path)
196 changed.append(path)
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
209 removed.append(path)
198 removed.append(path)
210 else:
199 else:
211 raise NotImplementedError(
200 raise NotImplementedError(
212 "Action %s not supported on path %s" % (
201 "Action %s not supported on path %s" % (
213 change.action, path))
202 change.action, path))
214
203
215 changes = {
204 changes = {
216 'added': added,
205 'added': added,
217 'changed': changed,
206 'changed': changed,
218 'removed': removed,
207 'removed': removed,
219 }
208 }
220 return changes
209 return changes
221
210
222 def node_history(self, wire, path, revision, limit):
211 def node_history(self, wire, path, revision, limit):
223 cross_copies = False
212 cross_copies = False
224 repo = self._factory.repo(wire)
213 repo = self._factory.repo(wire)
225 fsobj = svn.repos.fs(repo)
214 fsobj = svn.repos.fs(repo)
226 rev_root = svn.fs.revision_root(fsobj, revision)
215 rev_root = svn.fs.revision_root(fsobj, revision)
227
216
228 history_revisions = []
217 history_revisions = []
229 history = svn.fs.node_history(rev_root, path)
218 history = svn.fs.node_history(rev_root, path)
230 history = svn.fs.history_prev(history, cross_copies)
219 history = svn.fs.history_prev(history, cross_copies)
231 while history:
220 while history:
232 __, node_revision = svn.fs.history_location(history)
221 __, node_revision = svn.fs.history_location(history)
233 history_revisions.append(node_revision)
222 history_revisions.append(node_revision)
234 if limit and len(history_revisions) >= limit:
223 if limit and len(history_revisions) >= limit:
235 break
224 break
236 history = svn.fs.history_prev(history, cross_copies)
225 history = svn.fs.history_prev(history, cross_copies)
237 return history_revisions
226 return history_revisions
238
227
239 def node_properties(self, wire, path, revision):
228 def node_properties(self, wire, path, revision):
240 repo = self._factory.repo(wire)
229 repo = self._factory.repo(wire)
241 fsobj = svn.repos.fs(repo)
230 fsobj = svn.repos.fs(repo)
242 rev_root = svn.fs.revision_root(fsobj, revision)
231 rev_root = svn.fs.revision_root(fsobj, revision)
243 return svn.fs.node_proplist(rev_root, path)
232 return svn.fs.node_proplist(rev_root, path)
244
233
245 def file_annotate(self, wire, path, revision):
234 def file_annotate(self, wire, path, revision):
246 abs_path = 'file://' + urllib.pathname2url(
235 abs_path = 'file://' + urllib.pathname2url(
247 vcspath.join(wire['path'], path))
236 vcspath.join(wire['path'], path))
248 file_uri = svn.core.svn_path_canonicalize(abs_path)
237 file_uri = svn.core.svn_path_canonicalize(abs_path)
249
238
250 start_rev = svn_opt_revision_value_t(0)
239 start_rev = svn_opt_revision_value_t(0)
251 peg_rev = svn_opt_revision_value_t(revision)
240 peg_rev = svn_opt_revision_value_t(revision)
252 end_rev = peg_rev
241 end_rev = peg_rev
253
242
254 annotations = []
243 annotations = []
255
244
256 def receiver(line_no, revision, author, date, line, pool):
245 def receiver(line_no, revision, author, date, line, pool):
257 annotations.append((line_no, revision, line))
246 annotations.append((line_no, revision, line))
258
247
259 # TODO: Cannot use blame5, missing typemap function in the swig code
248 # TODO: Cannot use blame5, missing typemap function in the swig code
260 try:
249 try:
261 svn.client.blame2(
250 svn.client.blame2(
262 file_uri, peg_rev, start_rev, end_rev,
251 file_uri, peg_rev, start_rev, end_rev,
263 receiver, svn.client.create_context())
252 receiver, svn.client.create_context())
264 except svn.core.SubversionException as exc:
253 except svn.core.SubversionException as exc:
265 log.exception("Error during blame operation.")
254 log.exception("Error during blame operation.")
266 raise Exception(
255 raise Exception(
267 "Blame not supported or file does not exist at path %s. "
256 "Blame not supported or file does not exist at path %s. "
268 "Error %s." % (path, exc))
257 "Error %s." % (path, exc))
269
258
270 return annotations
259 return annotations
271
260
272 def get_node_type(self, wire, path, rev=None):
261 def get_node_type(self, wire, path, rev=None):
273 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
274 fs_ptr = svn.repos.fs(repo)
263 fs_ptr = svn.repos.fs(repo)
275 if rev is None:
264 if rev is None:
276 rev = svn.fs.youngest_rev(fs_ptr)
265 rev = svn.fs.youngest_rev(fs_ptr)
277 root = svn.fs.revision_root(fs_ptr, rev)
266 root = svn.fs.revision_root(fs_ptr, rev)
278 node = svn.fs.check_path(root, path)
267 node = svn.fs.check_path(root, path)
279 return NODE_TYPE_MAPPING.get(node, None)
268 return NODE_TYPE_MAPPING.get(node, None)
280
269
281 def get_nodes(self, wire, path, revision=None):
270 def get_nodes(self, wire, path, revision=None):
282 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
283 fsobj = svn.repos.fs(repo)
272 fsobj = svn.repos.fs(repo)
284 if revision is None:
273 if revision is None:
285 revision = svn.fs.youngest_rev(fsobj)
274 revision = svn.fs.youngest_rev(fsobj)
286 root = svn.fs.revision_root(fsobj, revision)
275 root = svn.fs.revision_root(fsobj, revision)
287 entries = svn.fs.dir_entries(root, path)
276 entries = svn.fs.dir_entries(root, path)
288 result = []
277 result = []
289 for entry_path, entry_info in entries.iteritems():
278 for entry_path, entry_info in entries.iteritems():
290 result.append(
279 result.append(
291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
292 return result
281 return result
293
282
294 def get_file_content(self, wire, path, rev=None):
283 def get_file_content(self, wire, path, rev=None):
295 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
296 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
297 if rev is None:
286 if rev is None:
298 rev = svn.fs.youngest_revision(fsobj)
287 rev = svn.fs.youngest_revision(fsobj)
299 root = svn.fs.revision_root(fsobj, rev)
288 root = svn.fs.revision_root(fsobj, rev)
300 content = svn.core.Stream(svn.fs.file_contents(root, path))
289 content = svn.core.Stream(svn.fs.file_contents(root, path))
301 return content.read()
290 return content.read()
302
291
303 def get_file_size(self, wire, path, revision=None):
292 def get_file_size(self, wire, path, revision=None):
304 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
305 fsobj = svn.repos.fs(repo)
294 fsobj = svn.repos.fs(repo)
306 if revision is None:
295 if revision is None:
307 revision = svn.fs.youngest_revision(fsobj)
296 revision = svn.fs.youngest_revision(fsobj)
308 root = svn.fs.revision_root(fsobj, revision)
297 root = svn.fs.revision_root(fsobj, revision)
309 size = svn.fs.file_length(root, path)
298 size = svn.fs.file_length(root, path)
310 return size
299 return size
311
300
312 def create_repository(self, wire, compatible_version=None):
301 def create_repository(self, wire, compatible_version=None):
313 log.info('Creating Subversion repository in path "%s"', wire['path'])
302 log.info('Creating Subversion repository in path "%s"', wire['path'])
314 self._factory.repo(wire, create=True,
303 self._factory.repo(wire, create=True,
315 compatible_version=compatible_version)
304 compatible_version=compatible_version)
316
305
317 def import_remote_repository(self, wire, src_url):
306 def import_remote_repository(self, wire, src_url):
318 repo_path = wire['path']
307 repo_path = wire['path']
319 if not self.is_path_valid_repository(wire, repo_path):
308 if not self.is_path_valid_repository(wire, repo_path):
320 raise Exception(
309 raise Exception(
321 "Path %s is not a valid Subversion repository." % repo_path)
310 "Path %s is not a valid Subversion repository." % repo_path)
322 # TODO: johbo: URL checks ?
311 # TODO: johbo: URL checks ?
323 rdump = subprocess.Popen(
312 rdump = subprocess.Popen(
324 ['svnrdump', 'dump', '--non-interactive', src_url],
313 ['svnrdump', 'dump', '--non-interactive', src_url],
325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
326 load = subprocess.Popen(
315 load = subprocess.Popen(
327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
328
317
329 # TODO: johbo: This can be a very long operation, might be better
318 # TODO: johbo: This can be a very long operation, might be better
330 # to track some kind of status and provide an api to check if the
319 # to track some kind of status and provide an api to check if the
331 # import is done.
320 # import is done.
332 rdump.wait()
321 rdump.wait()
333 load.wait()
322 load.wait()
334
323
335 if rdump.returncode != 0:
324 if rdump.returncode != 0:
336 errors = rdump.stderr.read()
325 errors = rdump.stderr.read()
337 log.error('svnrdump dump failed: statuscode %s: message: %s',
326 log.error('svnrdump dump failed: statuscode %s: message: %s',
338 rdump.returncode, errors)
327 rdump.returncode, errors)
339 reason = 'UNKNOWN'
328 reason = 'UNKNOWN'
340 if 'svnrdump: E230001:' in errors:
329 if 'svnrdump: E230001:' in errors:
341 reason = 'INVALID_CERTIFICATE'
330 reason = 'INVALID_CERTIFICATE'
342 raise Exception(
331 raise Exception(
343 'Failed to dump the remote repository from %s.' % src_url,
332 'Failed to dump the remote repository from %s.' % src_url,
344 reason)
333 reason)
345 if load.returncode != 0:
334 if load.returncode != 0:
346 raise Exception(
335 raise Exception(
347 'Failed to load the dump of remote repository from %s.' %
336 'Failed to load the dump of remote repository from %s.' %
348 (src_url, ))
337 (src_url, ))
349
338
350 def commit(self, wire, message, author, timestamp, updated, removed):
339 def commit(self, wire, message, author, timestamp, updated, removed):
351 assert isinstance(message, str)
340 assert isinstance(message, str)
352 assert isinstance(author, str)
341 assert isinstance(author, str)
353
342
354 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
355 fsobj = svn.repos.fs(repo)
344 fsobj = svn.repos.fs(repo)
356
345
357 rev = svn.fs.youngest_rev(fsobj)
346 rev = svn.fs.youngest_rev(fsobj)
358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
359 txn_root = svn.fs.txn_root(txn)
348 txn_root = svn.fs.txn_root(txn)
360
349
361 for node in updated:
350 for node in updated:
362 TxnNodeProcessor(node, txn_root).update()
351 TxnNodeProcessor(node, txn_root).update()
363 for node in removed:
352 for node in removed:
364 TxnNodeProcessor(node, txn_root).remove()
353 TxnNodeProcessor(node, txn_root).remove()
365
354
366 commit_id = svn.repos.fs_commit_txn(repo, txn)
355 commit_id = svn.repos.fs_commit_txn(repo, txn)
367
356
368 if timestamp:
357 if timestamp:
369 apr_time = apr_time_t(timestamp)
358 apr_time = apr_time_t(timestamp)
370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
372
361
373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
374 return commit_id
363 return commit_id
375
364
376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
377 ignore_whitespace=False, context=3):
366 ignore_whitespace=False, context=3):
378
367
379 wire.update(cache=False)
368 wire.update(cache=False)
380 repo = self._factory.repo(wire)
369 repo = self._factory.repo(wire)
381 diff_creator = SvnDiffer(
370 diff_creator = SvnDiffer(
382 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
383 try:
372 try:
384 return diff_creator.generate_diff()
373 return diff_creator.generate_diff()
385 except svn.core.SubversionException as e:
374 except svn.core.SubversionException as e:
386 log.exception(
375 log.exception(
387 "Error during diff operation operation. "
376 "Error during diff operation operation. "
388 "Path might not exist %s, %s" % (path1, path2))
377 "Path might not exist %s, %s" % (path1, path2))
389 return ""
378 return ""
390
379
391
380
392 class SvnDiffer(object):
381 class SvnDiffer(object):
393 """
382 """
394 Utility to create diffs based on difflib and the Subversion api
383 Utility to create diffs based on difflib and the Subversion api
395 """
384 """
396
385
397 binary_content = False
386 binary_content = False
398
387
399 def __init__(
388 def __init__(
400 self, repo, src_rev, src_path, tgt_rev, tgt_path,
389 self, repo, src_rev, src_path, tgt_rev, tgt_path,
401 ignore_whitespace, context):
390 ignore_whitespace, context):
402 self.repo = repo
391 self.repo = repo
403 self.ignore_whitespace = ignore_whitespace
392 self.ignore_whitespace = ignore_whitespace
404 self.context = context
393 self.context = context
405
394
406 fsobj = svn.repos.fs(repo)
395 fsobj = svn.repos.fs(repo)
407
396
408 self.tgt_rev = tgt_rev
397 self.tgt_rev = tgt_rev
409 self.tgt_path = tgt_path or ''
398 self.tgt_path = tgt_path or ''
410 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
399 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
411 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
400 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
412
401
413 self.src_rev = src_rev
402 self.src_rev = src_rev
414 self.src_path = src_path or self.tgt_path
403 self.src_path = src_path or self.tgt_path
415 self.src_root = svn.fs.revision_root(fsobj, src_rev)
404 self.src_root = svn.fs.revision_root(fsobj, src_rev)
416 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
405 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
417
406
418 self._validate()
407 self._validate()
419
408
420 def _validate(self):
409 def _validate(self):
421 if (self.tgt_kind != svn.core.svn_node_none and
410 if (self.tgt_kind != svn.core.svn_node_none and
422 self.src_kind != svn.core.svn_node_none and
411 self.src_kind != svn.core.svn_node_none and
423 self.src_kind != self.tgt_kind):
412 self.src_kind != self.tgt_kind):
424 # TODO: johbo: proper error handling
413 # TODO: johbo: proper error handling
425 raise Exception(
414 raise Exception(
426 "Source and target are not compatible for diff generation. "
415 "Source and target are not compatible for diff generation. "
427 "Source type: %s, target type: %s" %
416 "Source type: %s, target type: %s" %
428 (self.src_kind, self.tgt_kind))
417 (self.src_kind, self.tgt_kind))
429
418
430 def generate_diff(self):
419 def generate_diff(self):
431 buf = StringIO.StringIO()
420 buf = StringIO.StringIO()
432 if self.tgt_kind == svn.core.svn_node_dir:
421 if self.tgt_kind == svn.core.svn_node_dir:
433 self._generate_dir_diff(buf)
422 self._generate_dir_diff(buf)
434 else:
423 else:
435 self._generate_file_diff(buf)
424 self._generate_file_diff(buf)
436 return buf.getvalue()
425 return buf.getvalue()
437
426
438 def _generate_dir_diff(self, buf):
427 def _generate_dir_diff(self, buf):
439 editor = DiffChangeEditor()
428 editor = DiffChangeEditor()
440 editor_ptr, editor_baton = svn.delta.make_editor(editor)
429 editor_ptr, editor_baton = svn.delta.make_editor(editor)
441 svn.repos.dir_delta2(
430 svn.repos.dir_delta2(
442 self.src_root,
431 self.src_root,
443 self.src_path,
432 self.src_path,
444 '', # src_entry
433 '', # src_entry
445 self.tgt_root,
434 self.tgt_root,
446 self.tgt_path,
435 self.tgt_path,
447 editor_ptr, editor_baton,
436 editor_ptr, editor_baton,
448 authorization_callback_allow_all,
437 authorization_callback_allow_all,
449 False, # text_deltas
438 False, # text_deltas
450 svn.core.svn_depth_infinity, # depth
439 svn.core.svn_depth_infinity, # depth
451 False, # entry_props
440 False, # entry_props
452 False, # ignore_ancestry
441 False, # ignore_ancestry
453 )
442 )
454
443
455 for path, __, change in sorted(editor.changes):
444 for path, __, change in sorted(editor.changes):
456 self._generate_node_diff(
445 self._generate_node_diff(
457 buf, change, path, self.tgt_path, path, self.src_path)
446 buf, change, path, self.tgt_path, path, self.src_path)
458
447
459 def _generate_file_diff(self, buf):
448 def _generate_file_diff(self, buf):
460 change = None
449 change = None
461 if self.src_kind == svn.core.svn_node_none:
450 if self.src_kind == svn.core.svn_node_none:
462 change = "add"
451 change = "add"
463 elif self.tgt_kind == svn.core.svn_node_none:
452 elif self.tgt_kind == svn.core.svn_node_none:
464 change = "delete"
453 change = "delete"
465 tgt_base, tgt_path = vcspath.split(self.tgt_path)
454 tgt_base, tgt_path = vcspath.split(self.tgt_path)
466 src_base, src_path = vcspath.split(self.src_path)
455 src_base, src_path = vcspath.split(self.src_path)
467 self._generate_node_diff(
456 self._generate_node_diff(
468 buf, change, tgt_path, tgt_base, src_path, src_base)
457 buf, change, tgt_path, tgt_base, src_path, src_base)
469
458
470 def _generate_node_diff(
459 def _generate_node_diff(
471 self, buf, change, tgt_path, tgt_base, src_path, src_base):
460 self, buf, change, tgt_path, tgt_base, src_path, src_base):
472
461
473 if self.src_rev == self.tgt_rev and tgt_base == src_base:
462 if self.src_rev == self.tgt_rev and tgt_base == src_base:
474 # makes consistent behaviour with git/hg to return empty diff if
463 # makes consistent behaviour with git/hg to return empty diff if
475 # we compare same revisions
464 # we compare same revisions
476 return
465 return
477
466
478 tgt_full_path = vcspath.join(tgt_base, tgt_path)
467 tgt_full_path = vcspath.join(tgt_base, tgt_path)
479 src_full_path = vcspath.join(src_base, src_path)
468 src_full_path = vcspath.join(src_base, src_path)
480
469
481 self.binary_content = False
470 self.binary_content = False
482 mime_type = self._get_mime_type(tgt_full_path)
471 mime_type = self._get_mime_type(tgt_full_path)
483
472
484 if mime_type and not mime_type.startswith('text'):
473 if mime_type and not mime_type.startswith('text'):
485 self.binary_content = True
474 self.binary_content = True
486 buf.write("=" * 67 + '\n')
475 buf.write("=" * 67 + '\n')
487 buf.write("Cannot display: file marked as a binary type.\n")
476 buf.write("Cannot display: file marked as a binary type.\n")
488 buf.write("svn:mime-type = %s\n" % mime_type)
477 buf.write("svn:mime-type = %s\n" % mime_type)
489 buf.write("Index: %s\n" % (tgt_path, ))
478 buf.write("Index: %s\n" % (tgt_path, ))
490 buf.write("=" * 67 + '\n')
479 buf.write("=" * 67 + '\n')
491 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
480 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
492 'tgt_path': tgt_path})
481 'tgt_path': tgt_path})
493
482
494 if change == 'add':
483 if change == 'add':
495 # TODO: johbo: SVN is missing a zero here compared to git
484 # TODO: johbo: SVN is missing a zero here compared to git
496 buf.write("new file mode 10644\n")
485 buf.write("new file mode 10644\n")
497
486
498 #TODO(marcink): intro to binary detection of svn patches
487 #TODO(marcink): intro to binary detection of svn patches
499 # if self.binary_content:
488 # if self.binary_content:
500 # buf.write('GIT binary patch\n')
489 # buf.write('GIT binary patch\n')
501
490
502 buf.write("--- /dev/null\t(revision 0)\n")
491 buf.write("--- /dev/null\t(revision 0)\n")
503 src_lines = []
492 src_lines = []
504 else:
493 else:
505 if change == 'delete':
494 if change == 'delete':
506 buf.write("deleted file mode 10644\n")
495 buf.write("deleted file mode 10644\n")
507
496
508 #TODO(marcink): intro to binary detection of svn patches
497 #TODO(marcink): intro to binary detection of svn patches
509 # if self.binary_content:
498 # if self.binary_content:
510 # buf.write('GIT binary patch\n')
499 # buf.write('GIT binary patch\n')
511
500
512 buf.write("--- a/%s\t(revision %s)\n" % (
501 buf.write("--- a/%s\t(revision %s)\n" % (
513 src_path, self.src_rev))
502 src_path, self.src_rev))
514 src_lines = self._svn_readlines(self.src_root, src_full_path)
503 src_lines = self._svn_readlines(self.src_root, src_full_path)
515
504
516 if change == 'delete':
505 if change == 'delete':
517 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
506 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
518 tgt_lines = []
507 tgt_lines = []
519 else:
508 else:
520 buf.write("+++ b/%s\t(revision %s)\n" % (
509 buf.write("+++ b/%s\t(revision %s)\n" % (
521 tgt_path, self.tgt_rev))
510 tgt_path, self.tgt_rev))
522 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
511 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
523
512
524 if not self.binary_content:
513 if not self.binary_content:
525 udiff = svn_diff.unified_diff(
514 udiff = svn_diff.unified_diff(
526 src_lines, tgt_lines, context=self.context,
515 src_lines, tgt_lines, context=self.context,
527 ignore_blank_lines=self.ignore_whitespace,
516 ignore_blank_lines=self.ignore_whitespace,
528 ignore_case=False,
517 ignore_case=False,
529 ignore_space_changes=self.ignore_whitespace)
518 ignore_space_changes=self.ignore_whitespace)
530 buf.writelines(udiff)
519 buf.writelines(udiff)
531
520
532 def _get_mime_type(self, path):
521 def _get_mime_type(self, path):
533 try:
522 try:
534 mime_type = svn.fs.node_prop(
523 mime_type = svn.fs.node_prop(
535 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
524 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
536 except svn.core.SubversionException:
525 except svn.core.SubversionException:
537 mime_type = svn.fs.node_prop(
526 mime_type = svn.fs.node_prop(
538 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
527 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
539 return mime_type
528 return mime_type
540
529
541 def _svn_readlines(self, fs_root, node_path):
530 def _svn_readlines(self, fs_root, node_path):
542 if self.binary_content:
531 if self.binary_content:
543 return []
532 return []
544 node_kind = svn.fs.check_path(fs_root, node_path)
533 node_kind = svn.fs.check_path(fs_root, node_path)
545 if node_kind not in (
534 if node_kind not in (
546 svn.core.svn_node_file, svn.core.svn_node_symlink):
535 svn.core.svn_node_file, svn.core.svn_node_symlink):
547 return []
536 return []
548 content = svn.core.Stream(
537 content = svn.core.Stream(
549 svn.fs.file_contents(fs_root, node_path)).read()
538 svn.fs.file_contents(fs_root, node_path)).read()
550 return content.splitlines(True)
539 return content.splitlines(True)
551
540
552
541
553 class DiffChangeEditor(svn.delta.Editor):
542 class DiffChangeEditor(svn.delta.Editor):
554 """
543 """
555 Records changes between two given revisions
544 Records changes between two given revisions
556 """
545 """
557
546
558 def __init__(self):
547 def __init__(self):
559 self.changes = []
548 self.changes = []
560
549
561 def delete_entry(self, path, revision, parent_baton, pool=None):
550 def delete_entry(self, path, revision, parent_baton, pool=None):
562 self.changes.append((path, None, 'delete'))
551 self.changes.append((path, None, 'delete'))
563
552
564 def add_file(
553 def add_file(
565 self, path, parent_baton, copyfrom_path, copyfrom_revision,
554 self, path, parent_baton, copyfrom_path, copyfrom_revision,
566 file_pool=None):
555 file_pool=None):
567 self.changes.append((path, 'file', 'add'))
556 self.changes.append((path, 'file', 'add'))
568
557
569 def open_file(self, path, parent_baton, base_revision, file_pool=None):
558 def open_file(self, path, parent_baton, base_revision, file_pool=None):
570 self.changes.append((path, 'file', 'change'))
559 self.changes.append((path, 'file', 'change'))
571
560
572
561
573 def authorization_callback_allow_all(root, path, pool):
562 def authorization_callback_allow_all(root, path, pool):
574 return True
563 return True
575
564
576
565
577 class TxnNodeProcessor(object):
566 class TxnNodeProcessor(object):
578 """
567 """
579 Utility to process the change of one node within a transaction root.
568 Utility to process the change of one node within a transaction root.
580
569
581 It encapsulates the knowledge of how to add, update or remove
570 It encapsulates the knowledge of how to add, update or remove
582 a node for a given transaction root. The purpose is to support the method
571 a node for a given transaction root. The purpose is to support the method
583 `SvnRemote.commit`.
572 `SvnRemote.commit`.
584 """
573 """
585
574
586 def __init__(self, node, txn_root):
575 def __init__(self, node, txn_root):
587 assert isinstance(node['path'], str)
576 assert isinstance(node['path'], str)
588
577
589 self.node = node
578 self.node = node
590 self.txn_root = txn_root
579 self.txn_root = txn_root
591
580
592 def update(self):
581 def update(self):
593 self._ensure_parent_dirs()
582 self._ensure_parent_dirs()
594 self._add_file_if_node_does_not_exist()
583 self._add_file_if_node_does_not_exist()
595 self._update_file_content()
584 self._update_file_content()
596 self._update_file_properties()
585 self._update_file_properties()
597
586
598 def remove(self):
587 def remove(self):
599 svn.fs.delete(self.txn_root, self.node['path'])
588 svn.fs.delete(self.txn_root, self.node['path'])
600 # TODO: Clean up directory if empty
589 # TODO: Clean up directory if empty
601
590
602 def _ensure_parent_dirs(self):
591 def _ensure_parent_dirs(self):
603 curdir = vcspath.dirname(self.node['path'])
592 curdir = vcspath.dirname(self.node['path'])
604 dirs_to_create = []
593 dirs_to_create = []
605 while not self._svn_path_exists(curdir):
594 while not self._svn_path_exists(curdir):
606 dirs_to_create.append(curdir)
595 dirs_to_create.append(curdir)
607 curdir = vcspath.dirname(curdir)
596 curdir = vcspath.dirname(curdir)
608
597
609 for curdir in reversed(dirs_to_create):
598 for curdir in reversed(dirs_to_create):
610 log.debug('Creating missing directory "%s"', curdir)
599 log.debug('Creating missing directory "%s"', curdir)
611 svn.fs.make_dir(self.txn_root, curdir)
600 svn.fs.make_dir(self.txn_root, curdir)
612
601
613 def _svn_path_exists(self, path):
602 def _svn_path_exists(self, path):
614 path_status = svn.fs.check_path(self.txn_root, path)
603 path_status = svn.fs.check_path(self.txn_root, path)
615 return path_status != svn.core.svn_node_none
604 return path_status != svn.core.svn_node_none
616
605
617 def _add_file_if_node_does_not_exist(self):
606 def _add_file_if_node_does_not_exist(self):
618 kind = svn.fs.check_path(self.txn_root, self.node['path'])
607 kind = svn.fs.check_path(self.txn_root, self.node['path'])
619 if kind == svn.core.svn_node_none:
608 if kind == svn.core.svn_node_none:
620 svn.fs.make_file(self.txn_root, self.node['path'])
609 svn.fs.make_file(self.txn_root, self.node['path'])
621
610
622 def _update_file_content(self):
611 def _update_file_content(self):
623 assert isinstance(self.node['content'], str)
612 assert isinstance(self.node['content'], str)
624 handler, baton = svn.fs.apply_textdelta(
613 handler, baton = svn.fs.apply_textdelta(
625 self.txn_root, self.node['path'], None, None)
614 self.txn_root, self.node['path'], None, None)
626 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
615 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
627
616
628 def _update_file_properties(self):
617 def _update_file_properties(self):
629 properties = self.node.get('properties', {})
618 properties = self.node.get('properties', {})
630 for key, value in properties.iteritems():
619 for key, value in properties.iteritems():
631 svn.fs.change_node_prop(
620 svn.fs.change_node_prop(
632 self.txn_root, self.node['path'], key, value)
621 self.txn_root, self.node['path'], key, value)
633
622
634
623
635 def apr_time_t(timestamp):
624 def apr_time_t(timestamp):
636 """
625 """
637 Convert a Python timestamp into APR timestamp type apr_time_t
626 Convert a Python timestamp into APR timestamp type apr_time_t
638 """
627 """
639 return timestamp * 1E6
628 return timestamp * 1E6
640
629
641
630
642 def svn_opt_revision_value_t(num):
631 def svn_opt_revision_value_t(num):
643 """
632 """
644 Put `num` into a `svn_opt_revision_value_t` structure.
633 Put `num` into a `svn_opt_revision_value_t` structure.
645 """
634 """
646 value = svn.core.svn_opt_revision_value_t()
635 value = svn.core.svn_opt_revision_value_t()
647 value.number = num
636 value.number = num
648 revision = svn.core.svn_opt_revision_t()
637 revision = svn.core.svn_opt_revision_t()
649 revision.kind = svn.core.svn_opt_revision_number
638 revision.kind = svn.core.svn_opt_revision_number
650 revision.value = value
639 revision.value = value
651 return revision
640 return revision
General Comments 0
You need to be logged in to leave comments. Login now