##// END OF EJS Templates
repositories: implemented faster dedicated checks for empty repositories
marcink -
r698:65b1b84c default
parent child Browse files
Show More
@@ -1,742 +1,751 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import collections
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 import more_itertools
28 import more_itertools
29 from dulwich import index, objects
29 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
31 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
33 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
34 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.repo import Repo as DulwichRepo, Tag
36 from dulwich.server import update_server_info
36 from dulwich.server import update_server_info
37
37
38 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
39 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 from vcsserver.hgcompat import (
41 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
43 from vcsserver.git_lfs.lib import LFSOidStore
44
44
45 DIR_STAT = stat.S_IFDIR
45 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
46 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
47 GIT_LINK = objects.S_IFGITLINK
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
53 """Converts Dulwich exceptions to something neutral."""
54 @wraps(func)
54 @wraps(func)
55 def wrapper(*args, **kwargs):
55 def wrapper(*args, **kwargs):
56 try:
56 try:
57 return func(*args, **kwargs)
57 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
60 exc = exceptions.LookupException(e)
61 raise exc(e)
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
62 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
63 exc = exceptions.VcsException(e)
64 raise exc(e)
64 raise exc(e)
65 except Exception as e:
65 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
67 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
68 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
69 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
70 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
71 #raise_from_original(exceptions.UnhandledException)
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class Repo(DulwichRepo):
76 class Repo(DulwichRepo):
77 """
77 """
78 A wrapper for dulwich Repo class.
78 A wrapper for dulwich Repo class.
79
79
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
81 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
82 once the repo object is destroyed.
83
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
85 to 0.12.0 +
86 """
86 """
87 def __del__(self):
87 def __del__(self):
88 if hasattr(self, 'object_store'):
88 if hasattr(self, 'object_store'):
89 self.close()
89 self.close()
90
90
91
91
92 class GitFactory(RepoFactory):
92 class GitFactory(RepoFactory):
93 repo_type = 'git'
93 repo_type = 'git'
94
94
95 def _create_repo(self, wire, create):
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
97 return Repo(repo_path)
98
98
99
99
100 class GitRemote(object):
100 class GitRemote(object):
101
101
102 def __init__(self, factory):
102 def __init__(self, factory):
103 self._factory = factory
103 self._factory = factory
104 self.peeled_ref_marker = '^{}'
104 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
105 self._bulk_methods = {
106 "author": self.commit_attribute,
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
109 "parents": self.commit_attribute,
110 "_commit": self.revision,
110 "_commit": self.revision,
111 }
111 }
112
112
113 def _wire_to_config(self, wire):
113 def _wire_to_config(self, wire):
114 if 'config' in wire:
114 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
116 return {}
117
117
118 def _assign_ref(self, wire, ref, commit_id):
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
120 repo[ref] = commit_id
121
121
122 def _remote_conf(self, config):
122 def _remote_conf(self, config):
123 params = [
123 params = [
124 '-c', 'core.askpass=""',
124 '-c', 'core.askpass=""',
125 ]
125 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
127 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
129 return params
130
130
131 @reraise_safe_exceptions
131 @reraise_safe_exceptions
132 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
134 try:
135 return not repo.head()
136 except Exception:
137 log.exception("failed to read object_store")
138 return True
139
140 @reraise_safe_exceptions
132 def add_object(self, wire, content):
141 def add_object(self, wire, content):
133 repo = self._factory.repo(wire)
142 repo = self._factory.repo(wire)
134 blob = objects.Blob()
143 blob = objects.Blob()
135 blob.set_raw_string(content)
144 blob.set_raw_string(content)
136 repo.object_store.add_object(blob)
145 repo.object_store.add_object(blob)
137 return blob.id
146 return blob.id
138
147
139 @reraise_safe_exceptions
148 @reraise_safe_exceptions
140 def assert_correct_path(self, wire):
149 def assert_correct_path(self, wire):
141 path = wire.get('path')
150 path = wire.get('path')
142 try:
151 try:
143 self._factory.repo(wire)
152 self._factory.repo(wire)
144 except NotGitRepository as e:
153 except NotGitRepository as e:
145 tb = traceback.format_exc()
154 tb = traceback.format_exc()
146 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
147 return False
156 return False
148
157
149 return True
158 return True
150
159
151 @reraise_safe_exceptions
160 @reraise_safe_exceptions
152 def bare(self, wire):
161 def bare(self, wire):
153 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
154 return repo.bare
163 return repo.bare
155
164
156 @reraise_safe_exceptions
165 @reraise_safe_exceptions
157 def blob_as_pretty_string(self, wire, sha):
166 def blob_as_pretty_string(self, wire, sha):
158 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
159 return repo[sha].as_pretty_string()
168 return repo[sha].as_pretty_string()
160
169
161 @reraise_safe_exceptions
170 @reraise_safe_exceptions
162 def blob_raw_length(self, wire, sha):
171 def blob_raw_length(self, wire, sha):
163 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
164 blob = repo[sha]
173 blob = repo[sha]
165 return blob.raw_length()
174 return blob.raw_length()
166
175
167 def _parse_lfs_pointer(self, raw_content):
176 def _parse_lfs_pointer(self, raw_content):
168
177
169 spec_string = 'version https://git-lfs.github.com/spec'
178 spec_string = 'version https://git-lfs.github.com/spec'
170 if raw_content and raw_content.startswith(spec_string):
179 if raw_content and raw_content.startswith(spec_string):
171 pattern = re.compile(r"""
180 pattern = re.compile(r"""
172 (?:\n)?
181 (?:\n)?
173 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
174 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
175 ^size[ ](?P<oid_size>[0-9]+)\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
176 (?:\n)?
185 (?:\n)?
177 """, re.VERBOSE | re.MULTILINE)
186 """, re.VERBOSE | re.MULTILINE)
178 match = pattern.match(raw_content)
187 match = pattern.match(raw_content)
179 if match:
188 if match:
180 return match.groupdict()
189 return match.groupdict()
181
190
182 return {}
191 return {}
183
192
184 @reraise_safe_exceptions
193 @reraise_safe_exceptions
185 def is_large_file(self, wire, sha):
194 def is_large_file(self, wire, sha):
186 repo = self._factory.repo(wire)
195 repo = self._factory.repo(wire)
187 blob = repo[sha]
196 blob = repo[sha]
188 return self._parse_lfs_pointer(blob.as_raw_string())
197 return self._parse_lfs_pointer(blob.as_raw_string())
189
198
190 @reraise_safe_exceptions
199 @reraise_safe_exceptions
191 def in_largefiles_store(self, wire, oid):
200 def in_largefiles_store(self, wire, oid):
192 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
193 conf = self._wire_to_config(wire)
202 conf = self._wire_to_config(wire)
194
203
195 store_location = conf.get('vcs_git_lfs_store_location')
204 store_location = conf.get('vcs_git_lfs_store_location')
196 if store_location:
205 if store_location:
197 repo_name = repo.path
206 repo_name = repo.path
198 store = LFSOidStore(
207 store = LFSOidStore(
199 oid=oid, repo=repo_name, store_location=store_location)
208 oid=oid, repo=repo_name, store_location=store_location)
200 return store.has_oid()
209 return store.has_oid()
201
210
202 return False
211 return False
203
212
204 @reraise_safe_exceptions
213 @reraise_safe_exceptions
205 def store_path(self, wire, oid):
214 def store_path(self, wire, oid):
206 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
207 conf = self._wire_to_config(wire)
216 conf = self._wire_to_config(wire)
208
217
209 store_location = conf.get('vcs_git_lfs_store_location')
218 store_location = conf.get('vcs_git_lfs_store_location')
210 if store_location:
219 if store_location:
211 repo_name = repo.path
220 repo_name = repo.path
212 store = LFSOidStore(
221 store = LFSOidStore(
213 oid=oid, repo=repo_name, store_location=store_location)
222 oid=oid, repo=repo_name, store_location=store_location)
214 return store.oid_path
223 return store.oid_path
215 raise ValueError('Unable to fetch oid with path {}'.format(oid))
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
216
225
217 @reraise_safe_exceptions
226 @reraise_safe_exceptions
218 def bulk_request(self, wire, rev, pre_load):
227 def bulk_request(self, wire, rev, pre_load):
219 result = {}
228 result = {}
220 for attr in pre_load:
229 for attr in pre_load:
221 try:
230 try:
222 method = self._bulk_methods[attr]
231 method = self._bulk_methods[attr]
223 args = [wire, rev]
232 args = [wire, rev]
224 if attr == "date":
233 if attr == "date":
225 args.extend(["commit_time", "commit_timezone"])
234 args.extend(["commit_time", "commit_timezone"])
226 elif attr in ["author", "message", "parents"]:
235 elif attr in ["author", "message", "parents"]:
227 args.append(attr)
236 args.append(attr)
228 result[attr] = method(*args)
237 result[attr] = method(*args)
229 except KeyError as e:
238 except KeyError as e:
230 raise exceptions.VcsException(e)(
239 raise exceptions.VcsException(e)(
231 "Unknown bulk attribute: %s" % attr)
240 "Unknown bulk attribute: %s" % attr)
232 return result
241 return result
233
242
234 def _build_opener(self, url):
243 def _build_opener(self, url):
235 handlers = []
244 handlers = []
236 url_obj = url_parser(url)
245 url_obj = url_parser(url)
237 _, authinfo = url_obj.authinfo()
246 _, authinfo = url_obj.authinfo()
238
247
239 if authinfo:
248 if authinfo:
240 # create a password manager
249 # create a password manager
241 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
242 passmgr.add_password(*authinfo)
251 passmgr.add_password(*authinfo)
243
252
244 handlers.extend((httpbasicauthhandler(passmgr),
253 handlers.extend((httpbasicauthhandler(passmgr),
245 httpdigestauthhandler(passmgr)))
254 httpdigestauthhandler(passmgr)))
246
255
247 return urllib2.build_opener(*handlers)
256 return urllib2.build_opener(*handlers)
248
257
249 @reraise_safe_exceptions
258 @reraise_safe_exceptions
250 def check_url(self, url, config):
259 def check_url(self, url, config):
251 url_obj = url_parser(url)
260 url_obj = url_parser(url)
252 test_uri, _ = url_obj.authinfo()
261 test_uri, _ = url_obj.authinfo()
253 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
254 url_obj.query = obfuscate_qs(url_obj.query)
263 url_obj.query = obfuscate_qs(url_obj.query)
255 cleaned_uri = str(url_obj)
264 cleaned_uri = str(url_obj)
256 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
257
266
258 if not test_uri.endswith('info/refs'):
267 if not test_uri.endswith('info/refs'):
259 test_uri = test_uri.rstrip('/') + '/info/refs'
268 test_uri = test_uri.rstrip('/') + '/info/refs'
260
269
261 o = self._build_opener(url)
270 o = self._build_opener(url)
262 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
263
272
264 q = {"service": 'git-upload-pack'}
273 q = {"service": 'git-upload-pack'}
265 qs = '?%s' % urllib.urlencode(q)
274 qs = '?%s' % urllib.urlencode(q)
266 cu = "%s%s" % (test_uri, qs)
275 cu = "%s%s" % (test_uri, qs)
267 req = urllib2.Request(cu, None, {})
276 req = urllib2.Request(cu, None, {})
268
277
269 try:
278 try:
270 log.debug("Trying to open URL %s", cleaned_uri)
279 log.debug("Trying to open URL %s", cleaned_uri)
271 resp = o.open(req)
280 resp = o.open(req)
272 if resp.code != 200:
281 if resp.code != 200:
273 raise exceptions.URLError()('Return Code is not 200')
282 raise exceptions.URLError()('Return Code is not 200')
274 except Exception as e:
283 except Exception as e:
275 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
276 # means it cannot be cloned
285 # means it cannot be cloned
277 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
278
287
279 # now detect if it's proper git repo
288 # now detect if it's proper git repo
280 gitdata = resp.read()
289 gitdata = resp.read()
281 if 'service=git-upload-pack' in gitdata:
290 if 'service=git-upload-pack' in gitdata:
282 pass
291 pass
283 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
284 # old style git can return some other format !
293 # old style git can return some other format !
285 pass
294 pass
286 else:
295 else:
287 raise exceptions.URLError()(
296 raise exceptions.URLError()(
288 "url [%s] does not look like an git" % (cleaned_uri,))
297 "url [%s] does not look like an git" % (cleaned_uri,))
289
298
290 return True
299 return True
291
300
292 @reraise_safe_exceptions
301 @reraise_safe_exceptions
293 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
294 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
295 remote_refs = self.pull(wire, url, apply_refs=False)
304 remote_refs = self.pull(wire, url, apply_refs=False)
296 repo = self._factory.repo(wire)
305 repo = self._factory.repo(wire)
297 if isinstance(valid_refs, list):
306 if isinstance(valid_refs, list):
298 valid_refs = tuple(valid_refs)
307 valid_refs = tuple(valid_refs)
299
308
300 for k in remote_refs:
309 for k in remote_refs:
301 # only parse heads/tags and skip so called deferred tags
310 # only parse heads/tags and skip so called deferred tags
302 if k.startswith(valid_refs) and not k.endswith(deferred):
311 if k.startswith(valid_refs) and not k.endswith(deferred):
303 repo[k] = remote_refs[k]
312 repo[k] = remote_refs[k]
304
313
305 if update_after_clone:
314 if update_after_clone:
306 # we want to checkout HEAD
315 # we want to checkout HEAD
307 repo["HEAD"] = remote_refs["HEAD"]
316 repo["HEAD"] = remote_refs["HEAD"]
308 index.build_index_from_tree(repo.path, repo.index_path(),
317 index.build_index_from_tree(repo.path, repo.index_path(),
309 repo.object_store, repo["HEAD"].tree)
318 repo.object_store, repo["HEAD"].tree)
310
319
311 # TODO: this is quite complex, check if that can be simplified
320 # TODO: this is quite complex, check if that can be simplified
312 @reraise_safe_exceptions
321 @reraise_safe_exceptions
313 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
314 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
315 object_store = repo.object_store
324 object_store = repo.object_store
316
325
317 # Create tree and populates it with blobs
326 # Create tree and populates it with blobs
318 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
319
328
320 for node in updated:
329 for node in updated:
321 # Compute subdirs if needed
330 # Compute subdirs if needed
322 dirpath, nodename = vcspath.split(node['path'])
331 dirpath, nodename = vcspath.split(node['path'])
323 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
324 parent = commit_tree
333 parent = commit_tree
325 ancestors = [('', parent)]
334 ancestors = [('', parent)]
326
335
327 # Tries to dig for the deepest existing tree
336 # Tries to dig for the deepest existing tree
328 while dirnames:
337 while dirnames:
329 curdir = dirnames.pop(0)
338 curdir = dirnames.pop(0)
330 try:
339 try:
331 dir_id = parent[curdir][1]
340 dir_id = parent[curdir][1]
332 except KeyError:
341 except KeyError:
333 # put curdir back into dirnames and stops
342 # put curdir back into dirnames and stops
334 dirnames.insert(0, curdir)
343 dirnames.insert(0, curdir)
335 break
344 break
336 else:
345 else:
337 # If found, updates parent
346 # If found, updates parent
338 parent = repo[dir_id]
347 parent = repo[dir_id]
339 ancestors.append((curdir, parent))
348 ancestors.append((curdir, parent))
340 # Now parent is deepest existing tree and we need to create
349 # Now parent is deepest existing tree and we need to create
341 # subtrees for dirnames (in reverse order)
350 # subtrees for dirnames (in reverse order)
342 # [this only applies for nodes from added]
351 # [this only applies for nodes from added]
343 new_trees = []
352 new_trees = []
344
353
345 blob = objects.Blob.from_string(node['content'])
354 blob = objects.Blob.from_string(node['content'])
346
355
347 if dirnames:
356 if dirnames:
348 # If there are trees which should be created we need to build
357 # If there are trees which should be created we need to build
349 # them now (in reverse order)
358 # them now (in reverse order)
350 reversed_dirnames = list(reversed(dirnames))
359 reversed_dirnames = list(reversed(dirnames))
351 curtree = objects.Tree()
360 curtree = objects.Tree()
352 curtree[node['node_path']] = node['mode'], blob.id
361 curtree[node['node_path']] = node['mode'], blob.id
353 new_trees.append(curtree)
362 new_trees.append(curtree)
354 for dirname in reversed_dirnames[:-1]:
363 for dirname in reversed_dirnames[:-1]:
355 newtree = objects.Tree()
364 newtree = objects.Tree()
356 newtree[dirname] = (DIR_STAT, curtree.id)
365 newtree[dirname] = (DIR_STAT, curtree.id)
357 new_trees.append(newtree)
366 new_trees.append(newtree)
358 curtree = newtree
367 curtree = newtree
359 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
360 else:
369 else:
361 parent.add(
370 parent.add(
362 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
363
372
364 new_trees.append(parent)
373 new_trees.append(parent)
365 # Update ancestors
374 # Update ancestors
366 reversed_ancestors = reversed(
375 reversed_ancestors = reversed(
367 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
368 for parent, tree, path in reversed_ancestors:
377 for parent, tree, path in reversed_ancestors:
369 parent[path] = (DIR_STAT, tree.id)
378 parent[path] = (DIR_STAT, tree.id)
370 object_store.add_object(tree)
379 object_store.add_object(tree)
371
380
372 object_store.add_object(blob)
381 object_store.add_object(blob)
373 for tree in new_trees:
382 for tree in new_trees:
374 object_store.add_object(tree)
383 object_store.add_object(tree)
375
384
376 for node_path in removed:
385 for node_path in removed:
377 paths = node_path.split('/')
386 paths = node_path.split('/')
378 tree = commit_tree
387 tree = commit_tree
379 trees = [tree]
388 trees = [tree]
380 # Traverse deep into the forest...
389 # Traverse deep into the forest...
381 for path in paths:
390 for path in paths:
382 try:
391 try:
383 obj = repo[tree[path][1]]
392 obj = repo[tree[path][1]]
384 if isinstance(obj, objects.Tree):
393 if isinstance(obj, objects.Tree):
385 trees.append(obj)
394 trees.append(obj)
386 tree = obj
395 tree = obj
387 except KeyError:
396 except KeyError:
388 break
397 break
389 # Cut down the blob and all rotten trees on the way back...
398 # Cut down the blob and all rotten trees on the way back...
390 for path, tree in reversed(zip(paths, trees)):
399 for path, tree in reversed(zip(paths, trees)):
391 del tree[path]
400 del tree[path]
392 if tree:
401 if tree:
393 # This tree still has elements - don't remove it or any
402 # This tree still has elements - don't remove it or any
394 # of it's parents
403 # of it's parents
395 break
404 break
396
405
397 object_store.add_object(commit_tree)
406 object_store.add_object(commit_tree)
398
407
399 # Create commit
408 # Create commit
400 commit = objects.Commit()
409 commit = objects.Commit()
401 commit.tree = commit_tree.id
410 commit.tree = commit_tree.id
402 for k, v in commit_data.iteritems():
411 for k, v in commit_data.iteritems():
403 setattr(commit, k, v)
412 setattr(commit, k, v)
404 object_store.add_object(commit)
413 object_store.add_object(commit)
405
414
406 ref = 'refs/heads/%s' % branch
415 ref = 'refs/heads/%s' % branch
407 repo.refs[ref] = commit.id
416 repo.refs[ref] = commit.id
408
417
409 return commit.id
418 return commit.id
410
419
411 @reraise_safe_exceptions
420 @reraise_safe_exceptions
412 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
413 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
414 client = LocalGitClient(url)
423 client = LocalGitClient(url)
415 else:
424 else:
416 url_obj = url_parser(url)
425 url_obj = url_parser(url)
417 o = self._build_opener(url)
426 o = self._build_opener(url)
418 url, _ = url_obj.authinfo()
427 url, _ = url_obj.authinfo()
419 client = HttpGitClient(base_url=url, opener=o)
428 client = HttpGitClient(base_url=url, opener=o)
420 repo = self._factory.repo(wire)
429 repo = self._factory.repo(wire)
421
430
422 determine_wants = repo.object_store.determine_wants_all
431 determine_wants = repo.object_store.determine_wants_all
423 if refs:
432 if refs:
424 def determine_wants_requested(references):
433 def determine_wants_requested(references):
425 return [references[r] for r in references if r in refs]
434 return [references[r] for r in references if r in refs]
426 determine_wants = determine_wants_requested
435 determine_wants = determine_wants_requested
427
436
428 try:
437 try:
429 remote_refs = client.fetch(
438 remote_refs = client.fetch(
430 path=url, target=repo, determine_wants=determine_wants)
439 path=url, target=repo, determine_wants=determine_wants)
431 except NotGitRepository as e:
440 except NotGitRepository as e:
432 log.warning(
441 log.warning(
433 'Trying to fetch from "%s" failed, not a Git repository.', url)
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
434 # Exception can contain unicode which we convert
443 # Exception can contain unicode which we convert
435 raise exceptions.AbortException(e)(repr(e))
444 raise exceptions.AbortException(e)(repr(e))
436
445
437 # mikhail: client.fetch() returns all the remote refs, but fetches only
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
438 # refs filtered by `determine_wants` function. We need to filter result
447 # refs filtered by `determine_wants` function. We need to filter result
439 # as well
448 # as well
440 if refs:
449 if refs:
441 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
442
451
443 if apply_refs:
452 if apply_refs:
444 # TODO: johbo: Needs proper test coverage with a git repository
453 # TODO: johbo: Needs proper test coverage with a git repository
445 # that contains a tag object, so that we would end up with
454 # that contains a tag object, so that we would end up with
446 # a peeled ref at this point.
455 # a peeled ref at this point.
447 for k in remote_refs:
456 for k in remote_refs:
448 if k.endswith(self.peeled_ref_marker):
457 if k.endswith(self.peeled_ref_marker):
449 log.debug("Skipping peeled reference %s", k)
458 log.debug("Skipping peeled reference %s", k)
450 continue
459 continue
451 repo[k] = remote_refs[k]
460 repo[k] = remote_refs[k]
452
461
453 if refs and not update_after:
462 if refs and not update_after:
454 # mikhail: explicitly set the head to the last ref.
463 # mikhail: explicitly set the head to the last ref.
455 repo['HEAD'] = remote_refs[refs[-1]]
464 repo['HEAD'] = remote_refs[refs[-1]]
456
465
457 if update_after:
466 if update_after:
458 # we want to checkout HEAD
467 # we want to checkout HEAD
459 repo["HEAD"] = remote_refs["HEAD"]
468 repo["HEAD"] = remote_refs["HEAD"]
460 index.build_index_from_tree(repo.path, repo.index_path(),
469 index.build_index_from_tree(repo.path, repo.index_path(),
461 repo.object_store, repo["HEAD"].tree)
470 repo.object_store, repo["HEAD"].tree)
462 return remote_refs
471 return remote_refs
463
472
464 @reraise_safe_exceptions
473 @reraise_safe_exceptions
465 def sync_fetch(self, wire, url, refs=None):
474 def sync_fetch(self, wire, url, refs=None):
466 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
467 if refs and not isinstance(refs, (list, tuple)):
476 if refs and not isinstance(refs, (list, tuple)):
468 refs = [refs]
477 refs = [refs]
469 config = self._wire_to_config(wire)
478 config = self._wire_to_config(wire)
470 # get all remote refs we'll use to fetch later
479 # get all remote refs we'll use to fetch later
471 output, __ = self.run_git_command(
480 output, __ = self.run_git_command(
472 wire, ['ls-remote', url], fail_on_stderr=False,
481 wire, ['ls-remote', url], fail_on_stderr=False,
473 _copts=self._remote_conf(config),
482 _copts=self._remote_conf(config),
474 extra_env={'GIT_TERMINAL_PROMPT': '0'})
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
475
484
476 remote_refs = collections.OrderedDict()
485 remote_refs = collections.OrderedDict()
477 fetch_refs = []
486 fetch_refs = []
478
487
479 for ref_line in output.splitlines():
488 for ref_line in output.splitlines():
480 sha, ref = ref_line.split('\t')
489 sha, ref = ref_line.split('\t')
481 sha = sha.strip()
490 sha = sha.strip()
482 if ref in remote_refs:
491 if ref in remote_refs:
483 # duplicate, skip
492 # duplicate, skip
484 continue
493 continue
485 if ref.endswith(self.peeled_ref_marker):
494 if ref.endswith(self.peeled_ref_marker):
486 log.debug("Skipping peeled reference %s", ref)
495 log.debug("Skipping peeled reference %s", ref)
487 continue
496 continue
488 # don't sync HEAD
497 # don't sync HEAD
489 if ref in ['HEAD']:
498 if ref in ['HEAD']:
490 continue
499 continue
491
500
492 remote_refs[ref] = sha
501 remote_refs[ref] = sha
493
502
494 if refs and sha in refs:
503 if refs and sha in refs:
495 # we filter fetch using our specified refs
504 # we filter fetch using our specified refs
496 fetch_refs.append('{}:{}'.format(ref, ref))
505 fetch_refs.append('{}:{}'.format(ref, ref))
497 elif not refs:
506 elif not refs:
498 fetch_refs.append('{}:{}'.format(ref, ref))
507 fetch_refs.append('{}:{}'.format(ref, ref))
499 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
500 if fetch_refs:
509 if fetch_refs:
501 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
502 fetch_refs_chunks = list(chunk)
511 fetch_refs_chunks = list(chunk)
503 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
504 _out, _err = self.run_git_command(
513 _out, _err = self.run_git_command(
505 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
506 fail_on_stderr=False,
515 fail_on_stderr=False,
507 _copts=self._remote_conf(config),
516 _copts=self._remote_conf(config),
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
509
518
510 return remote_refs
519 return remote_refs
511
520
512 @reraise_safe_exceptions
521 @reraise_safe_exceptions
513 def sync_push(self, wire, url, refs=None):
522 def sync_push(self, wire, url, refs=None):
514 if not self.check_url(url, wire):
523 if not self.check_url(url, wire):
515 return
524 return
516 config = self._wire_to_config(wire)
525 config = self._wire_to_config(wire)
517 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
518 self.run_git_command(
527 self.run_git_command(
519 wire, ['push', url, '--mirror'], fail_on_stderr=False,
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
520 _copts=self._remote_conf(config),
529 _copts=self._remote_conf(config),
521 extra_env={'GIT_TERMINAL_PROMPT': '0'})
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
522
531
523 @reraise_safe_exceptions
532 @reraise_safe_exceptions
524 def get_remote_refs(self, wire, url):
533 def get_remote_refs(self, wire, url):
525 repo = Repo(url)
534 repo = Repo(url)
526 return repo.get_refs()
535 return repo.get_refs()
527
536
528 @reraise_safe_exceptions
537 @reraise_safe_exceptions
529 def get_description(self, wire):
538 def get_description(self, wire):
530 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
531 return repo.get_description()
540 return repo.get_description()
532
541
533 @reraise_safe_exceptions
542 @reraise_safe_exceptions
534 def get_missing_revs(self, wire, rev1, rev2, path2):
543 def get_missing_revs(self, wire, rev1, rev2, path2):
535 repo = self._factory.repo(wire)
544 repo = self._factory.repo(wire)
536 LocalGitClient(thin_packs=False).fetch(path2, repo)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
537
546
538 wire_remote = wire.copy()
547 wire_remote = wire.copy()
539 wire_remote['path'] = path2
548 wire_remote['path'] = path2
540 repo_remote = self._factory.repo(wire_remote)
549 repo_remote = self._factory.repo(wire_remote)
541 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
542
551
543 revs = [
552 revs = [
544 x.commit.id
553 x.commit.id
545 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
546 return revs
555 return revs
547
556
548 @reraise_safe_exceptions
557 @reraise_safe_exceptions
549 def get_object(self, wire, sha):
558 def get_object(self, wire, sha):
550 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
551 obj = repo.get_object(sha)
560 obj = repo.get_object(sha)
552 commit_id = obj.id
561 commit_id = obj.id
553
562
554 if isinstance(obj, Tag):
563 if isinstance(obj, Tag):
555 commit_id = obj.object[1]
564 commit_id = obj.object[1]
556
565
557 return {
566 return {
558 'id': obj.id,
567 'id': obj.id,
559 'type': obj.type_name,
568 'type': obj.type_name,
560 'commit_id': commit_id
569 'commit_id': commit_id
561 }
570 }
562
571
563 @reraise_safe_exceptions
572 @reraise_safe_exceptions
564 def get_object_attrs(self, wire, sha, *attrs):
573 def get_object_attrs(self, wire, sha, *attrs):
565 repo = self._factory.repo(wire)
574 repo = self._factory.repo(wire)
566 obj = repo.get_object(sha)
575 obj = repo.get_object(sha)
567 return list(getattr(obj, a) for a in attrs)
576 return list(getattr(obj, a) for a in attrs)
568
577
569 @reraise_safe_exceptions
578 @reraise_safe_exceptions
570 def get_refs(self, wire):
579 def get_refs(self, wire):
571 repo = self._factory.repo(wire)
580 repo = self._factory.repo(wire)
572 result = {}
581 result = {}
573 for ref, sha in repo.refs.as_dict().items():
582 for ref, sha in repo.refs.as_dict().items():
574 peeled_sha = repo.get_peeled(ref)
583 peeled_sha = repo.get_peeled(ref)
575 result[ref] = peeled_sha
584 result[ref] = peeled_sha
576 return result
585 return result
577
586
578 @reraise_safe_exceptions
587 @reraise_safe_exceptions
579 def get_refs_path(self, wire):
588 def get_refs_path(self, wire):
580 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
581 return repo.refs.path
590 return repo.refs.path
582
591
583 @reraise_safe_exceptions
592 @reraise_safe_exceptions
584 def head(self, wire, show_exc=True):
593 def head(self, wire, show_exc=True):
585 repo = self._factory.repo(wire)
594 repo = self._factory.repo(wire)
586 try:
595 try:
587 return repo.head()
596 return repo.head()
588 except Exception:
597 except Exception:
589 if show_exc:
598 if show_exc:
590 raise
599 raise
591
600
592 @reraise_safe_exceptions
601 @reraise_safe_exceptions
593 def init(self, wire):
602 def init(self, wire):
594 repo_path = str_to_dulwich(wire['path'])
603 repo_path = str_to_dulwich(wire['path'])
595 self.repo = Repo.init(repo_path)
604 self.repo = Repo.init(repo_path)
596
605
597 @reraise_safe_exceptions
606 @reraise_safe_exceptions
598 def init_bare(self, wire):
607 def init_bare(self, wire):
599 repo_path = str_to_dulwich(wire['path'])
608 repo_path = str_to_dulwich(wire['path'])
600 self.repo = Repo.init_bare(repo_path)
609 self.repo = Repo.init_bare(repo_path)
601
610
602 @reraise_safe_exceptions
611 @reraise_safe_exceptions
603 def revision(self, wire, rev):
612 def revision(self, wire, rev):
604 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
605 obj = repo[rev]
614 obj = repo[rev]
606 obj_data = {
615 obj_data = {
607 'id': obj.id,
616 'id': obj.id,
608 }
617 }
609 try:
618 try:
610 obj_data['tree'] = obj.tree
619 obj_data['tree'] = obj.tree
611 except AttributeError:
620 except AttributeError:
612 pass
621 pass
613 return obj_data
622 return obj_data
614
623
615 @reraise_safe_exceptions
624 @reraise_safe_exceptions
616 def commit_attribute(self, wire, rev, attr):
625 def commit_attribute(self, wire, rev, attr):
617 repo = self._factory.repo(wire)
626 repo = self._factory.repo(wire)
618 obj = repo[rev]
627 obj = repo[rev]
619 return getattr(obj, attr)
628 return getattr(obj, attr)
620
629
621 @reraise_safe_exceptions
630 @reraise_safe_exceptions
622 def set_refs(self, wire, key, value):
631 def set_refs(self, wire, key, value):
623 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
624 repo.refs[key] = value
633 repo.refs[key] = value
625
634
626 @reraise_safe_exceptions
635 @reraise_safe_exceptions
627 def remove_ref(self, wire, key):
636 def remove_ref(self, wire, key):
628 repo = self._factory.repo(wire)
637 repo = self._factory.repo(wire)
629 del repo.refs[key]
638 del repo.refs[key]
630
639
631 @reraise_safe_exceptions
640 @reraise_safe_exceptions
632 def tree_changes(self, wire, source_id, target_id):
641 def tree_changes(self, wire, source_id, target_id):
633 repo = self._factory.repo(wire)
642 repo = self._factory.repo(wire)
634 source = repo[source_id].tree if source_id else None
643 source = repo[source_id].tree if source_id else None
635 target = repo[target_id].tree
644 target = repo[target_id].tree
636 result = repo.object_store.tree_changes(source, target)
645 result = repo.object_store.tree_changes(source, target)
637 return list(result)
646 return list(result)
638
647
639 @reraise_safe_exceptions
648 @reraise_safe_exceptions
640 def tree_items(self, wire, tree_id):
649 def tree_items(self, wire, tree_id):
641 repo = self._factory.repo(wire)
650 repo = self._factory.repo(wire)
642 tree = repo[tree_id]
651 tree = repo[tree_id]
643
652
644 result = []
653 result = []
645 for item in tree.iteritems():
654 for item in tree.iteritems():
646 item_sha = item.sha
655 item_sha = item.sha
647 item_mode = item.mode
656 item_mode = item.mode
648
657
649 if FILE_MODE(item_mode) == GIT_LINK:
658 if FILE_MODE(item_mode) == GIT_LINK:
650 item_type = "link"
659 item_type = "link"
651 else:
660 else:
652 item_type = repo[item_sha].type_name
661 item_type = repo[item_sha].type_name
653
662
654 result.append((item.path, item_mode, item_sha, item_type))
663 result.append((item.path, item_mode, item_sha, item_type))
655 return result
664 return result
656
665
657 @reraise_safe_exceptions
666 @reraise_safe_exceptions
658 def update_server_info(self, wire):
667 def update_server_info(self, wire):
659 repo = self._factory.repo(wire)
668 repo = self._factory.repo(wire)
660 update_server_info(repo)
669 update_server_info(repo)
661
670
662 @reraise_safe_exceptions
671 @reraise_safe_exceptions
663 def discover_git_version(self):
672 def discover_git_version(self):
664 stdout, _ = self.run_git_command(
673 stdout, _ = self.run_git_command(
665 {}, ['--version'], _bare=True, _safe=True)
674 {}, ['--version'], _bare=True, _safe=True)
666 prefix = 'git version'
675 prefix = 'git version'
667 if stdout.startswith(prefix):
676 if stdout.startswith(prefix):
668 stdout = stdout[len(prefix):]
677 stdout = stdout[len(prefix):]
669 return stdout.strip()
678 return stdout.strip()
670
679
671 @reraise_safe_exceptions
680 @reraise_safe_exceptions
672 def run_git_command(self, wire, cmd, **opts):
681 def run_git_command(self, wire, cmd, **opts):
673 path = wire.get('path', None)
682 path = wire.get('path', None)
674
683
675 if path and os.path.isdir(path):
684 if path and os.path.isdir(path):
676 opts['cwd'] = path
685 opts['cwd'] = path
677
686
678 if '_bare' in opts:
687 if '_bare' in opts:
679 _copts = []
688 _copts = []
680 del opts['_bare']
689 del opts['_bare']
681 else:
690 else:
682 _copts = ['-c', 'core.quotepath=false', ]
691 _copts = ['-c', 'core.quotepath=false', ]
683 safe_call = False
692 safe_call = False
684 if '_safe' in opts:
693 if '_safe' in opts:
685 # no exc on failure
694 # no exc on failure
686 del opts['_safe']
695 del opts['_safe']
687 safe_call = True
696 safe_call = True
688
697
689 if '_copts' in opts:
698 if '_copts' in opts:
690 _copts.extend(opts['_copts'] or [])
699 _copts.extend(opts['_copts'] or [])
691 del opts['_copts']
700 del opts['_copts']
692
701
693 gitenv = os.environ.copy()
702 gitenv = os.environ.copy()
694 gitenv.update(opts.pop('extra_env', {}))
703 gitenv.update(opts.pop('extra_env', {}))
695 # need to clean fix GIT_DIR !
704 # need to clean fix GIT_DIR !
696 if 'GIT_DIR' in gitenv:
705 if 'GIT_DIR' in gitenv:
697 del gitenv['GIT_DIR']
706 del gitenv['GIT_DIR']
698 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
707 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
699 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
708 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
700
709
701 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
710 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
702 _opts = {'env': gitenv, 'shell': False}
711 _opts = {'env': gitenv, 'shell': False}
703
712
704 try:
713 try:
705 _opts.update(opts)
714 _opts.update(opts)
706 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
715 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
707
716
708 return ''.join(p), ''.join(p.error)
717 return ''.join(p), ''.join(p.error)
709 except (EnvironmentError, OSError) as err:
718 except (EnvironmentError, OSError) as err:
710 cmd = ' '.join(cmd) # human friendly CMD
719 cmd = ' '.join(cmd) # human friendly CMD
711 tb_err = ("Couldn't run git command (%s).\n"
720 tb_err = ("Couldn't run git command (%s).\n"
712 "Original error was:%s\n"
721 "Original error was:%s\n"
713 "Call options:%s\n"
722 "Call options:%s\n"
714 % (cmd, err, _opts))
723 % (cmd, err, _opts))
715 log.exception(tb_err)
724 log.exception(tb_err)
716 if safe_call:
725 if safe_call:
717 return '', err
726 return '', err
718 else:
727 else:
719 raise exceptions.VcsException()(tb_err)
728 raise exceptions.VcsException()(tb_err)
720
729
721 @reraise_safe_exceptions
730 @reraise_safe_exceptions
722 def install_hooks(self, wire, force=False):
731 def install_hooks(self, wire, force=False):
723 from vcsserver.hook_utils import install_git_hooks
732 from vcsserver.hook_utils import install_git_hooks
724 repo = self._factory.repo(wire)
733 repo = self._factory.repo(wire)
725 return install_git_hooks(repo.path, repo.bare, force_create=force)
734 return install_git_hooks(repo.path, repo.bare, force_create=force)
726
735
727 @reraise_safe_exceptions
736 @reraise_safe_exceptions
728 def get_hooks_info(self, wire):
737 def get_hooks_info(self, wire):
729 from vcsserver.hook_utils import (
738 from vcsserver.hook_utils import (
730 get_git_pre_hook_version, get_git_post_hook_version)
739 get_git_pre_hook_version, get_git_post_hook_version)
731 repo = self._factory.repo(wire)
740 repo = self._factory.repo(wire)
732 return {
741 return {
733 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
742 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
734 'post_version': get_git_post_hook_version(repo.path, repo.bare),
743 'post_version': get_git_post_hook_version(repo.path, repo.bare),
735 }
744 }
736
745
737
746
738 def str_to_dulwich(value):
747 def str_to_dulwich(value):
739 """
748 """
740 Dulwich 0.10.1a requires `unicode` objects to be passed in.
749 Dulwich 0.10.1a requires `unicode` objects to be passed in.
741 """
750 """
742 return value.decode(settings.WIRE_ENCODING)
751 return value.decode(settings.WIRE_ENCODING)
@@ -1,846 +1,856 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 def make_ui_from_config(repo_config):
44 def make_ui_from_config(repo_config):
45
45
46 class LoggingUI(ui.ui):
46 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
47 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
48 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
49 super(LoggingUI, self).status(*msg, **opts)
50
50
51 def warn(self, *msg, **opts):
51 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
52 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
53 super(LoggingUI, self).warn(*msg, **opts)
54
54
55 def error(self, *msg, **opts):
55 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
56 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
57 super(LoggingUI, self).error(*msg, **opts)
58
58
59 def note(self, *msg, **opts):
59 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
60 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
61 super(LoggingUI, self).note(*msg, **opts)
62
62
63 def debug(self, *msg, **opts):
63 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
64 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
65 super(LoggingUI, self).debug(*msg, **opts)
66
66
67 baseui = LoggingUI()
67 baseui = LoggingUI()
68
68
69 # clean the baseui object
69 # clean the baseui object
70 baseui._ocfg = hgconfig.config()
70 baseui._ocfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
73
73
74 for section, option, value in repo_config:
74 for section, option, value in repo_config:
75 baseui.setconfig(section, option, value)
75 baseui.setconfig(section, option, value)
76
76
77 # make our hgweb quiet so it doesn't print output
77 # make our hgweb quiet so it doesn't print output
78 baseui.setconfig('ui', 'quiet', 'true')
78 baseui.setconfig('ui', 'quiet', 'true')
79
79
80 baseui.setconfig('ui', 'paginate', 'never')
80 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
81 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
82 baseui.setconfig('ui', 'message-output', 'stderr')
83
83
84 # force mercurial to only use 1 thread, otherwise it may try to set a
84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # signal in a non-main thread, thus generating a ValueError.
85 # signal in a non-main thread, thus generating a ValueError.
86 baseui.setconfig('worker', 'numcpus', 1)
86 baseui.setconfig('worker', 'numcpus', 1)
87
87
88 # If there is no config for the largefiles extension, we explicitly disable
88 # If there is no config for the largefiles extension, we explicitly disable
89 # it here. This overrides settings from repositories hgrc file. Recent
89 # it here. This overrides settings from repositories hgrc file. Recent
90 # mercurial versions enable largefiles in hgrc on clone from largefile
90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # repo.
91 # repo.
92 if not baseui.hasconfig('extensions', 'largefiles'):
92 if not baseui.hasconfig('extensions', 'largefiles'):
93 log.debug('Explicitly disable largefiles extension for repo.')
93 log.debug('Explicitly disable largefiles extension for repo.')
94 baseui.setconfig('extensions', 'largefiles', '!')
94 baseui.setconfig('extensions', 'largefiles', '!')
95
95
96 return baseui
96 return baseui
97
97
98
98
99 def reraise_safe_exceptions(func):
99 def reraise_safe_exceptions(func):
100 """Decorator for converting mercurial exceptions to something neutral."""
100 """Decorator for converting mercurial exceptions to something neutral."""
101 def wrapper(*args, **kwargs):
101 def wrapper(*args, **kwargs):
102 try:
102 try:
103 return func(*args, **kwargs)
103 return func(*args, **kwargs)
104 except (Abort, InterventionRequired) as e:
104 except (Abort, InterventionRequired) as e:
105 raise_from_original(exceptions.AbortException(e))
105 raise_from_original(exceptions.AbortException(e))
106 except RepoLookupError as e:
106 except RepoLookupError as e:
107 raise_from_original(exceptions.LookupException(e))
107 raise_from_original(exceptions.LookupException(e))
108 except RequirementError as e:
108 except RequirementError as e:
109 raise_from_original(exceptions.RequirementException(e))
109 raise_from_original(exceptions.RequirementException(e))
110 except RepoError as e:
110 except RepoError as e:
111 raise_from_original(exceptions.VcsException(e))
111 raise_from_original(exceptions.VcsException(e))
112 except LookupError as e:
112 except LookupError as e:
113 raise_from_original(exceptions.LookupException(e))
113 raise_from_original(exceptions.LookupException(e))
114 except Exception as e:
114 except Exception as e:
115 if not hasattr(e, '_vcs_kind'):
115 if not hasattr(e, '_vcs_kind'):
116 log.exception("Unhandled exception in hg remote call")
116 log.exception("Unhandled exception in hg remote call")
117 raise_from_original(exceptions.UnhandledException(e))
117 raise_from_original(exceptions.UnhandledException(e))
118
118
119 raise
119 raise
120 return wrapper
120 return wrapper
121
121
122
122
123 class MercurialFactory(RepoFactory):
123 class MercurialFactory(RepoFactory):
124 repo_type = 'hg'
124 repo_type = 'hg'
125
125
126 def _create_config(self, config, hooks=True):
126 def _create_config(self, config, hooks=True):
127 if not hooks:
127 if not hooks:
128 hooks_to_clean = frozenset((
128 hooks_to_clean = frozenset((
129 'changegroup.repo_size', 'preoutgoing.pre_pull',
129 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 new_config = []
131 new_config = []
132 for section, option, value in config:
132 for section, option, value in config:
133 if section == 'hooks' and option in hooks_to_clean:
133 if section == 'hooks' and option in hooks_to_clean:
134 continue
134 continue
135 new_config.append((section, option, value))
135 new_config.append((section, option, value))
136 config = new_config
136 config = new_config
137
137
138 baseui = make_ui_from_config(config)
138 baseui = make_ui_from_config(config)
139 return baseui
139 return baseui
140
140
141 def _create_repo(self, wire, create):
141 def _create_repo(self, wire, create):
142 baseui = self._create_config(wire["config"])
142 baseui = self._create_config(wire["config"])
143 return instance(baseui, wire["path"], create)
143 return instance(baseui, wire["path"], create)
144
144
145
145
146 class HgRemote(object):
146 class HgRemote(object):
147
147
148 def __init__(self, factory):
148 def __init__(self, factory):
149 self._factory = factory
149 self._factory = factory
150
150
151 self._bulk_methods = {
151 self._bulk_methods = {
152 "affected_files": self.ctx_files,
152 "affected_files": self.ctx_files,
153 "author": self.ctx_user,
153 "author": self.ctx_user,
154 "branch": self.ctx_branch,
154 "branch": self.ctx_branch,
155 "children": self.ctx_children,
155 "children": self.ctx_children,
156 "date": self.ctx_date,
156 "date": self.ctx_date,
157 "message": self.ctx_description,
157 "message": self.ctx_description,
158 "parents": self.ctx_parents,
158 "parents": self.ctx_parents,
159 "status": self.ctx_status,
159 "status": self.ctx_status,
160 "obsolete": self.ctx_obsolete,
160 "obsolete": self.ctx_obsolete,
161 "phase": self.ctx_phase,
161 "phase": self.ctx_phase,
162 "hidden": self.ctx_hidden,
162 "hidden": self.ctx_hidden,
163 "_file_paths": self.ctx_list,
163 "_file_paths": self.ctx_list,
164 }
164 }
165
165
166 def _get_ctx(self, repo, ref):
166 def _get_ctx(self, repo, ref):
167 return get_ctx(repo, ref)
167 return get_ctx(repo, ref)
168
168
169 @reraise_safe_exceptions
169 @reraise_safe_exceptions
170 def discover_hg_version(self):
170 def discover_hg_version(self):
171 from mercurial import util
171 from mercurial import util
172 return util.version()
172 return util.version()
173
173
174 @reraise_safe_exceptions
174 @reraise_safe_exceptions
175 def is_empty(self, wire):
176 repo = self._factory.repo(wire)
177
178 try:
179 return len(repo) == 0
180 except Exception:
181 log.exception("failed to read object_store")
182 return False
183
184 @reraise_safe_exceptions
175 def archive_repo(self, archive_path, mtime, file_info, kind):
185 def archive_repo(self, archive_path, mtime, file_info, kind):
176 if kind == "tgz":
186 if kind == "tgz":
177 archiver = archival.tarit(archive_path, mtime, "gz")
187 archiver = archival.tarit(archive_path, mtime, "gz")
178 elif kind == "tbz2":
188 elif kind == "tbz2":
179 archiver = archival.tarit(archive_path, mtime, "bz2")
189 archiver = archival.tarit(archive_path, mtime, "bz2")
180 elif kind == 'zip':
190 elif kind == 'zip':
181 archiver = archival.zipit(archive_path, mtime)
191 archiver = archival.zipit(archive_path, mtime)
182 else:
192 else:
183 raise exceptions.ArchiveException()(
193 raise exceptions.ArchiveException()(
184 'Remote does not support: "%s".' % kind)
194 'Remote does not support: "%s".' % kind)
185
195
186 for f_path, f_mode, f_is_link, f_content in file_info:
196 for f_path, f_mode, f_is_link, f_content in file_info:
187 archiver.addfile(f_path, f_mode, f_is_link, f_content)
197 archiver.addfile(f_path, f_mode, f_is_link, f_content)
188 archiver.done()
198 archiver.done()
189
199
190 @reraise_safe_exceptions
200 @reraise_safe_exceptions
191 def bookmarks(self, wire):
201 def bookmarks(self, wire):
192 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
193 return dict(repo._bookmarks)
203 return dict(repo._bookmarks)
194
204
195 @reraise_safe_exceptions
205 @reraise_safe_exceptions
196 def branches(self, wire, normal, closed):
206 def branches(self, wire, normal, closed):
197 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
198 iter_branches = repo.branchmap().iterbranches()
208 iter_branches = repo.branchmap().iterbranches()
199 bt = {}
209 bt = {}
200 for branch_name, _heads, tip, is_closed in iter_branches:
210 for branch_name, _heads, tip, is_closed in iter_branches:
201 if normal and not is_closed:
211 if normal and not is_closed:
202 bt[branch_name] = tip
212 bt[branch_name] = tip
203 if closed and is_closed:
213 if closed and is_closed:
204 bt[branch_name] = tip
214 bt[branch_name] = tip
205
215
206 return bt
216 return bt
207
217
208 @reraise_safe_exceptions
218 @reraise_safe_exceptions
209 def bulk_request(self, wire, rev, pre_load):
219 def bulk_request(self, wire, rev, pre_load):
210 result = {}
220 result = {}
211 for attr in pre_load:
221 for attr in pre_load:
212 try:
222 try:
213 method = self._bulk_methods[attr]
223 method = self._bulk_methods[attr]
214 result[attr] = method(wire, rev)
224 result[attr] = method(wire, rev)
215 except KeyError as e:
225 except KeyError as e:
216 raise exceptions.VcsException(e)(
226 raise exceptions.VcsException(e)(
217 'Unknown bulk attribute: "%s"' % attr)
227 'Unknown bulk attribute: "%s"' % attr)
218 return result
228 return result
219
229
220 @reraise_safe_exceptions
230 @reraise_safe_exceptions
221 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
222 baseui = self._factory._create_config(wire["config"], hooks=hooks)
232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
223 clone(baseui, source, dest, noupdate=not update_after_clone)
233 clone(baseui, source, dest, noupdate=not update_after_clone)
224
234
225 @reraise_safe_exceptions
235 @reraise_safe_exceptions
226 def commitctx(
236 def commitctx(
227 self, wire, message, parents, commit_time, commit_timezone,
237 self, wire, message, parents, commit_time, commit_timezone,
228 user, files, extra, removed, updated):
238 user, files, extra, removed, updated):
229
239
230 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
231 baseui = self._factory._create_config(wire['config'])
241 baseui = self._factory._create_config(wire['config'])
232 publishing = baseui.configbool('phases', 'publish')
242 publishing = baseui.configbool('phases', 'publish')
233 if publishing:
243 if publishing:
234 new_commit = 'public'
244 new_commit = 'public'
235 else:
245 else:
236 new_commit = 'draft'
246 new_commit = 'draft'
237
247
238 def _filectxfn(_repo, ctx, path):
248 def _filectxfn(_repo, ctx, path):
239 """
249 """
240 Marks given path as added/changed/removed in a given _repo. This is
250 Marks given path as added/changed/removed in a given _repo. This is
241 for internal mercurial commit function.
251 for internal mercurial commit function.
242 """
252 """
243
253
244 # check if this path is removed
254 # check if this path is removed
245 if path in removed:
255 if path in removed:
246 # returning None is a way to mark node for removal
256 # returning None is a way to mark node for removal
247 return None
257 return None
248
258
249 # check if this path is added
259 # check if this path is added
250 for node in updated:
260 for node in updated:
251 if node['path'] == path:
261 if node['path'] == path:
252 return memfilectx(
262 return memfilectx(
253 _repo,
263 _repo,
254 changectx=ctx,
264 changectx=ctx,
255 path=node['path'],
265 path=node['path'],
256 data=node['content'],
266 data=node['content'],
257 islink=False,
267 islink=False,
258 isexec=bool(node['mode'] & stat.S_IXUSR),
268 isexec=bool(node['mode'] & stat.S_IXUSR),
259 copied=False)
269 copied=False)
260
270
261 raise exceptions.AbortException()(
271 raise exceptions.AbortException()(
262 "Given path haven't been marked as added, "
272 "Given path haven't been marked as added, "
263 "changed or removed (%s)" % path)
273 "changed or removed (%s)" % path)
264
274
265 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
266
276
267 commit_ctx = memctx(
277 commit_ctx = memctx(
268 repo=repo,
278 repo=repo,
269 parents=parents,
279 parents=parents,
270 text=message,
280 text=message,
271 files=files,
281 files=files,
272 filectxfn=_filectxfn,
282 filectxfn=_filectxfn,
273 user=user,
283 user=user,
274 date=(commit_time, commit_timezone),
284 date=(commit_time, commit_timezone),
275 extra=extra)
285 extra=extra)
276
286
277 n = repo.commitctx(commit_ctx)
287 n = repo.commitctx(commit_ctx)
278 new_id = hex(n)
288 new_id = hex(n)
279
289
280 return new_id
290 return new_id
281
291
282 @reraise_safe_exceptions
292 @reraise_safe_exceptions
283 def ctx_branch(self, wire, revision):
293 def ctx_branch(self, wire, revision):
284 repo = self._factory.repo(wire)
294 repo = self._factory.repo(wire)
285 ctx = self._get_ctx(repo, revision)
295 ctx = self._get_ctx(repo, revision)
286 return ctx.branch()
296 return ctx.branch()
287
297
288 @reraise_safe_exceptions
298 @reraise_safe_exceptions
289 def ctx_children(self, wire, revision):
299 def ctx_children(self, wire, revision):
290 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
291 ctx = self._get_ctx(repo, revision)
301 ctx = self._get_ctx(repo, revision)
292 return [child.rev() for child in ctx.children()]
302 return [child.rev() for child in ctx.children()]
293
303
294 @reraise_safe_exceptions
304 @reraise_safe_exceptions
295 def ctx_date(self, wire, revision):
305 def ctx_date(self, wire, revision):
296 repo = self._factory.repo(wire)
306 repo = self._factory.repo(wire)
297 ctx = self._get_ctx(repo, revision)
307 ctx = self._get_ctx(repo, revision)
298 return ctx.date()
308 return ctx.date()
299
309
300 @reraise_safe_exceptions
310 @reraise_safe_exceptions
301 def ctx_description(self, wire, revision):
311 def ctx_description(self, wire, revision):
302 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
303 ctx = self._get_ctx(repo, revision)
313 ctx = self._get_ctx(repo, revision)
304 return ctx.description()
314 return ctx.description()
305
315
306 @reraise_safe_exceptions
316 @reraise_safe_exceptions
307 def ctx_files(self, wire, revision):
317 def ctx_files(self, wire, revision):
308 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
309 ctx = self._get_ctx(repo, revision)
319 ctx = self._get_ctx(repo, revision)
310 return ctx.files()
320 return ctx.files()
311
321
312 @reraise_safe_exceptions
322 @reraise_safe_exceptions
313 def ctx_list(self, path, revision):
323 def ctx_list(self, path, revision):
314 repo = self._factory.repo(path)
324 repo = self._factory.repo(path)
315 ctx = self._get_ctx(repo, revision)
325 ctx = self._get_ctx(repo, revision)
316 return list(ctx)
326 return list(ctx)
317
327
318 @reraise_safe_exceptions
328 @reraise_safe_exceptions
319 def ctx_parents(self, wire, revision):
329 def ctx_parents(self, wire, revision):
320 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
321 ctx = self._get_ctx(repo, revision)
331 ctx = self._get_ctx(repo, revision)
322 return [parent.rev() for parent in ctx.parents()]
332 return [parent.rev() for parent in ctx.parents()]
323
333
324 @reraise_safe_exceptions
334 @reraise_safe_exceptions
325 def ctx_phase(self, wire, revision):
335 def ctx_phase(self, wire, revision):
326 repo = self._factory.repo(wire)
336 repo = self._factory.repo(wire)
327 ctx = self._get_ctx(repo, revision)
337 ctx = self._get_ctx(repo, revision)
328 # public=0, draft=1, secret=3
338 # public=0, draft=1, secret=3
329 return ctx.phase()
339 return ctx.phase()
330
340
331 @reraise_safe_exceptions
341 @reraise_safe_exceptions
332 def ctx_obsolete(self, wire, revision):
342 def ctx_obsolete(self, wire, revision):
333 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
334 ctx = self._get_ctx(repo, revision)
344 ctx = self._get_ctx(repo, revision)
335 return ctx.obsolete()
345 return ctx.obsolete()
336
346
337 @reraise_safe_exceptions
347 @reraise_safe_exceptions
338 def ctx_hidden(self, wire, revision):
348 def ctx_hidden(self, wire, revision):
339 repo = self._factory.repo(wire)
349 repo = self._factory.repo(wire)
340 ctx = self._get_ctx(repo, revision)
350 ctx = self._get_ctx(repo, revision)
341 return ctx.hidden()
351 return ctx.hidden()
342
352
343 @reraise_safe_exceptions
353 @reraise_safe_exceptions
344 def ctx_substate(self, wire, revision):
354 def ctx_substate(self, wire, revision):
345 repo = self._factory.repo(wire)
355 repo = self._factory.repo(wire)
346 ctx = self._get_ctx(repo, revision)
356 ctx = self._get_ctx(repo, revision)
347 return ctx.substate
357 return ctx.substate
348
358
349 @reraise_safe_exceptions
359 @reraise_safe_exceptions
350 def ctx_status(self, wire, revision):
360 def ctx_status(self, wire, revision):
351 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, revision)
362 ctx = self._get_ctx(repo, revision)
353 status = repo[ctx.p1().node()].status(other=ctx.node())
363 status = repo[ctx.p1().node()].status(other=ctx.node())
354 # object of status (odd, custom named tuple in mercurial) is not
364 # object of status (odd, custom named tuple in mercurial) is not
355 # correctly serializable, we make it a list, as the underling
365 # correctly serializable, we make it a list, as the underling
356 # API expects this to be a list
366 # API expects this to be a list
357 return list(status)
367 return list(status)
358
368
359 @reraise_safe_exceptions
369 @reraise_safe_exceptions
360 def ctx_user(self, wire, revision):
370 def ctx_user(self, wire, revision):
361 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
363 return ctx.user()
373 return ctx.user()
364
374
365 @reraise_safe_exceptions
375 @reraise_safe_exceptions
366 def check_url(self, url, config):
376 def check_url(self, url, config):
367 _proto = None
377 _proto = None
368 if '+' in url[:url.find('://')]:
378 if '+' in url[:url.find('://')]:
369 _proto = url[0:url.find('+')]
379 _proto = url[0:url.find('+')]
370 url = url[url.find('+') + 1:]
380 url = url[url.find('+') + 1:]
371 handlers = []
381 handlers = []
372 url_obj = url_parser(url)
382 url_obj = url_parser(url)
373 test_uri, authinfo = url_obj.authinfo()
383 test_uri, authinfo = url_obj.authinfo()
374 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
375 url_obj.query = obfuscate_qs(url_obj.query)
385 url_obj.query = obfuscate_qs(url_obj.query)
376
386
377 cleaned_uri = str(url_obj)
387 cleaned_uri = str(url_obj)
378 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
379
389
380 if authinfo:
390 if authinfo:
381 # create a password manager
391 # create a password manager
382 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
383 passmgr.add_password(*authinfo)
393 passmgr.add_password(*authinfo)
384
394
385 handlers.extend((httpbasicauthhandler(passmgr),
395 handlers.extend((httpbasicauthhandler(passmgr),
386 httpdigestauthhandler(passmgr)))
396 httpdigestauthhandler(passmgr)))
387
397
388 o = urllib2.build_opener(*handlers)
398 o = urllib2.build_opener(*handlers)
389 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
390 ('Accept', 'application/mercurial-0.1')]
400 ('Accept', 'application/mercurial-0.1')]
391
401
392 q = {"cmd": 'between'}
402 q = {"cmd": 'between'}
393 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
394 qs = '?%s' % urllib.urlencode(q)
404 qs = '?%s' % urllib.urlencode(q)
395 cu = "%s%s" % (test_uri, qs)
405 cu = "%s%s" % (test_uri, qs)
396 req = urllib2.Request(cu, None, {})
406 req = urllib2.Request(cu, None, {})
397
407
398 try:
408 try:
399 log.debug("Trying to open URL %s", cleaned_uri)
409 log.debug("Trying to open URL %s", cleaned_uri)
400 resp = o.open(req)
410 resp = o.open(req)
401 if resp.code != 200:
411 if resp.code != 200:
402 raise exceptions.URLError()('Return Code is not 200')
412 raise exceptions.URLError()('Return Code is not 200')
403 except Exception as e:
413 except Exception as e:
404 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
405 # means it cannot be cloned
415 # means it cannot be cloned
406 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
407
417
408 # now check if it's a proper hg repo, but don't do it for svn
418 # now check if it's a proper hg repo, but don't do it for svn
409 try:
419 try:
410 if _proto == 'svn':
420 if _proto == 'svn':
411 pass
421 pass
412 else:
422 else:
413 # check for pure hg repos
423 # check for pure hg repos
414 log.debug(
424 log.debug(
415 "Verifying if URL is a Mercurial repository: %s",
425 "Verifying if URL is a Mercurial repository: %s",
416 cleaned_uri)
426 cleaned_uri)
417 ui = make_ui_from_config(config)
427 ui = make_ui_from_config(config)
418 peer_checker = makepeer(ui, url)
428 peer_checker = makepeer(ui, url)
419 peer_checker.lookup('tip')
429 peer_checker.lookup('tip')
420 except Exception as e:
430 except Exception as e:
421 log.warning("URL is not a valid Mercurial repository: %s",
431 log.warning("URL is not a valid Mercurial repository: %s",
422 cleaned_uri)
432 cleaned_uri)
423 raise exceptions.URLError(e)(
433 raise exceptions.URLError(e)(
424 "url [%s] does not look like an hg repo org_exc: %s"
434 "url [%s] does not look like an hg repo org_exc: %s"
425 % (cleaned_uri, e))
435 % (cleaned_uri, e))
426
436
427 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
428 return True
438 return True
429
439
430 @reraise_safe_exceptions
440 @reraise_safe_exceptions
431 def diff(
441 def diff(
432 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
433 context):
443 context):
434 repo = self._factory.repo(wire)
444 repo = self._factory.repo(wire)
435
445
436 if file_filter:
446 if file_filter:
437 match_filter = match(file_filter[0], '', [file_filter[1]])
447 match_filter = match(file_filter[0], '', [file_filter[1]])
438 else:
448 else:
439 match_filter = file_filter
449 match_filter = file_filter
440 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
450 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
441
451
442 try:
452 try:
443 return "".join(patch.diff(
453 return "".join(patch.diff(
444 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
445 except RepoLookupError as e:
455 except RepoLookupError as e:
446 raise exceptions.LookupException(e)()
456 raise exceptions.LookupException(e)()
447
457
448 @reraise_safe_exceptions
458 @reraise_safe_exceptions
449 def node_history(self, wire, revision, path, limit):
459 def node_history(self, wire, revision, path, limit):
450 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
451
461
452 ctx = self._get_ctx(repo, revision)
462 ctx = self._get_ctx(repo, revision)
453 fctx = ctx.filectx(path)
463 fctx = ctx.filectx(path)
454
464
455 def history_iter():
465 def history_iter():
456 limit_rev = fctx.rev()
466 limit_rev = fctx.rev()
457 for obj in reversed(list(fctx.filelog())):
467 for obj in reversed(list(fctx.filelog())):
458 obj = fctx.filectx(obj)
468 obj = fctx.filectx(obj)
459 ctx = obj.changectx()
469 ctx = obj.changectx()
460 if ctx.hidden() or ctx.obsolete():
470 if ctx.hidden() or ctx.obsolete():
461 continue
471 continue
462
472
463 if limit_rev >= obj.rev():
473 if limit_rev >= obj.rev():
464 yield obj
474 yield obj
465
475
466 history = []
476 history = []
467 for cnt, obj in enumerate(history_iter()):
477 for cnt, obj in enumerate(history_iter()):
468 if limit and cnt >= limit:
478 if limit and cnt >= limit:
469 break
479 break
470 history.append(hex(obj.node()))
480 history.append(hex(obj.node()))
471
481
472 return [x for x in history]
482 return [x for x in history]
473
483
474 @reraise_safe_exceptions
484 @reraise_safe_exceptions
475 def node_history_untill(self, wire, revision, path, limit):
485 def node_history_untill(self, wire, revision, path, limit):
476 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
477 ctx = self._get_ctx(repo, revision)
487 ctx = self._get_ctx(repo, revision)
478 fctx = ctx.filectx(path)
488 fctx = ctx.filectx(path)
479
489
480 file_log = list(fctx.filelog())
490 file_log = list(fctx.filelog())
481 if limit:
491 if limit:
482 # Limit to the last n items
492 # Limit to the last n items
483 file_log = file_log[-limit:]
493 file_log = file_log[-limit:]
484
494
485 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
486
496
487 @reraise_safe_exceptions
497 @reraise_safe_exceptions
488 def fctx_annotate(self, wire, revision, path):
498 def fctx_annotate(self, wire, revision, path):
489 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
490 ctx = self._get_ctx(repo, revision)
500 ctx = self._get_ctx(repo, revision)
491 fctx = ctx.filectx(path)
501 fctx = ctx.filectx(path)
492
502
493 result = []
503 result = []
494 for i, annotate_obj in enumerate(fctx.annotate(), 1):
504 for i, annotate_obj in enumerate(fctx.annotate(), 1):
495 ln_no = i
505 ln_no = i
496 sha = hex(annotate_obj.fctx.node())
506 sha = hex(annotate_obj.fctx.node())
497 content = annotate_obj.text
507 content = annotate_obj.text
498 result.append((ln_no, sha, content))
508 result.append((ln_no, sha, content))
499 return result
509 return result
500
510
501 @reraise_safe_exceptions
511 @reraise_safe_exceptions
502 def fctx_data(self, wire, revision, path):
512 def fctx_data(self, wire, revision, path):
503 repo = self._factory.repo(wire)
513 repo = self._factory.repo(wire)
504 ctx = self._get_ctx(repo, revision)
514 ctx = self._get_ctx(repo, revision)
505 fctx = ctx.filectx(path)
515 fctx = ctx.filectx(path)
506 return fctx.data()
516 return fctx.data()
507
517
508 @reraise_safe_exceptions
518 @reraise_safe_exceptions
509 def fctx_flags(self, wire, revision, path):
519 def fctx_flags(self, wire, revision, path):
510 repo = self._factory.repo(wire)
520 repo = self._factory.repo(wire)
511 ctx = self._get_ctx(repo, revision)
521 ctx = self._get_ctx(repo, revision)
512 fctx = ctx.filectx(path)
522 fctx = ctx.filectx(path)
513 return fctx.flags()
523 return fctx.flags()
514
524
515 @reraise_safe_exceptions
525 @reraise_safe_exceptions
516 def fctx_size(self, wire, revision, path):
526 def fctx_size(self, wire, revision, path):
517 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
518 ctx = self._get_ctx(repo, revision)
528 ctx = self._get_ctx(repo, revision)
519 fctx = ctx.filectx(path)
529 fctx = ctx.filectx(path)
520 return fctx.size()
530 return fctx.size()
521
531
522 @reraise_safe_exceptions
532 @reraise_safe_exceptions
523 def get_all_commit_ids(self, wire, name):
533 def get_all_commit_ids(self, wire, name):
524 repo = self._factory.repo(wire)
534 repo = self._factory.repo(wire)
525 repo = repo.filtered(name)
535 repo = repo.filtered(name)
526 revs = map(lambda x: hex(x[7]), repo.changelog.index)
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
527 return revs
537 return revs
528
538
529 @reraise_safe_exceptions
539 @reraise_safe_exceptions
530 def get_config_value(self, wire, section, name, untrusted=False):
540 def get_config_value(self, wire, section, name, untrusted=False):
531 repo = self._factory.repo(wire)
541 repo = self._factory.repo(wire)
532 return repo.ui.config(section, name, untrusted=untrusted)
542 return repo.ui.config(section, name, untrusted=untrusted)
533
543
534 @reraise_safe_exceptions
544 @reraise_safe_exceptions
535 def get_config_bool(self, wire, section, name, untrusted=False):
545 def get_config_bool(self, wire, section, name, untrusted=False):
536 repo = self._factory.repo(wire)
546 repo = self._factory.repo(wire)
537 return repo.ui.configbool(section, name, untrusted=untrusted)
547 return repo.ui.configbool(section, name, untrusted=untrusted)
538
548
539 @reraise_safe_exceptions
549 @reraise_safe_exceptions
540 def get_config_list(self, wire, section, name, untrusted=False):
550 def get_config_list(self, wire, section, name, untrusted=False):
541 repo = self._factory.repo(wire)
551 repo = self._factory.repo(wire)
542 return repo.ui.configlist(section, name, untrusted=untrusted)
552 return repo.ui.configlist(section, name, untrusted=untrusted)
543
553
544 @reraise_safe_exceptions
554 @reraise_safe_exceptions
545 def is_large_file(self, wire, path):
555 def is_large_file(self, wire, path):
546 return largefiles.lfutil.isstandin(path)
556 return largefiles.lfutil.isstandin(path)
547
557
548 @reraise_safe_exceptions
558 @reraise_safe_exceptions
549 def in_largefiles_store(self, wire, sha):
559 def in_largefiles_store(self, wire, sha):
550 repo = self._factory.repo(wire)
560 repo = self._factory.repo(wire)
551 return largefiles.lfutil.instore(repo, sha)
561 return largefiles.lfutil.instore(repo, sha)
552
562
553 @reraise_safe_exceptions
563 @reraise_safe_exceptions
554 def in_user_cache(self, wire, sha):
564 def in_user_cache(self, wire, sha):
555 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
556 return largefiles.lfutil.inusercache(repo.ui, sha)
566 return largefiles.lfutil.inusercache(repo.ui, sha)
557
567
558 @reraise_safe_exceptions
568 @reraise_safe_exceptions
559 def store_path(self, wire, sha):
569 def store_path(self, wire, sha):
560 repo = self._factory.repo(wire)
570 repo = self._factory.repo(wire)
561 return largefiles.lfutil.storepath(repo, sha)
571 return largefiles.lfutil.storepath(repo, sha)
562
572
563 @reraise_safe_exceptions
573 @reraise_safe_exceptions
564 def link(self, wire, sha, path):
574 def link(self, wire, sha, path):
565 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
566 largefiles.lfutil.link(
576 largefiles.lfutil.link(
567 largefiles.lfutil.usercachepath(repo.ui, sha), path)
577 largefiles.lfutil.usercachepath(repo.ui, sha), path)
568
578
569 @reraise_safe_exceptions
579 @reraise_safe_exceptions
570 def localrepository(self, wire, create=False):
580 def localrepository(self, wire, create=False):
571 self._factory.repo(wire, create=create)
581 self._factory.repo(wire, create=create)
572
582
573 @reraise_safe_exceptions
583 @reraise_safe_exceptions
574 def lookup(self, wire, revision, both):
584 def lookup(self, wire, revision, both):
575
585
576 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
577
587
578 if isinstance(revision, int):
588 if isinstance(revision, int):
579 # NOTE(marcink):
589 # NOTE(marcink):
580 # since Mercurial doesn't support negative indexes properly
590 # since Mercurial doesn't support negative indexes properly
581 # we need to shift accordingly by one to get proper index, e.g
591 # we need to shift accordingly by one to get proper index, e.g
582 # repo[-1] => repo[-2]
592 # repo[-1] => repo[-2]
583 # repo[0] => repo[-1]
593 # repo[0] => repo[-1]
584 if revision <= 0:
594 if revision <= 0:
585 revision = revision + -1
595 revision = revision + -1
586 try:
596 try:
587 ctx = self._get_ctx(repo, revision)
597 ctx = self._get_ctx(repo, revision)
588 except (TypeError, RepoLookupError) as e:
598 except (TypeError, RepoLookupError) as e:
589 e._org_exc_tb = traceback.format_exc()
599 e._org_exc_tb = traceback.format_exc()
590 raise exceptions.LookupException(e)(revision)
600 raise exceptions.LookupException(e)(revision)
591 except LookupError as e:
601 except LookupError as e:
592 e._org_exc_tb = traceback.format_exc()
602 e._org_exc_tb = traceback.format_exc()
593 raise exceptions.LookupException(e)(e.name)
603 raise exceptions.LookupException(e)(e.name)
594
604
595 if not both:
605 if not both:
596 return ctx.hex()
606 return ctx.hex()
597
607
598 ctx = repo[ctx.hex()]
608 ctx = repo[ctx.hex()]
599 return ctx.hex(), ctx.rev()
609 return ctx.hex(), ctx.rev()
600
610
601 @reraise_safe_exceptions
611 @reraise_safe_exceptions
602 def pull(self, wire, url, commit_ids=None):
612 def pull(self, wire, url, commit_ids=None):
603 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
604 # Disable any prompts for this repo
614 # Disable any prompts for this repo
605 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
606
616
607 remote = peer(repo, {}, url)
617 remote = peer(repo, {}, url)
608 # Disable any prompts for this remote
618 # Disable any prompts for this remote
609 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
610
620
611 if commit_ids:
621 if commit_ids:
612 commit_ids = [bin(commit_id) for commit_id in commit_ids]
622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
613
623
614 return exchange.pull(
624 return exchange.pull(
615 repo, remote, heads=commit_ids, force=None).cgresult
625 repo, remote, heads=commit_ids, force=None).cgresult
616
626
617 @reraise_safe_exceptions
627 @reraise_safe_exceptions
618 def sync_push(self, wire, url):
628 def sync_push(self, wire, url):
619 if not self.check_url(url, wire['config']):
629 if not self.check_url(url, wire['config']):
620 return
630 return
621
631
622 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
623
633
624 # Disable any prompts for this repo
634 # Disable any prompts for this repo
625 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
626
636
627 bookmarks = dict(repo._bookmarks).keys()
637 bookmarks = dict(repo._bookmarks).keys()
628 remote = peer(repo, {}, url)
638 remote = peer(repo, {}, url)
629 # Disable any prompts for this remote
639 # Disable any prompts for this remote
630 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
631
641
632 return exchange.push(
642 return exchange.push(
633 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
634
644
635 @reraise_safe_exceptions
645 @reraise_safe_exceptions
636 def revision(self, wire, rev):
646 def revision(self, wire, rev):
637 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
638 ctx = self._get_ctx(repo, rev)
648 ctx = self._get_ctx(repo, rev)
639 return ctx.rev()
649 return ctx.rev()
640
650
641 @reraise_safe_exceptions
651 @reraise_safe_exceptions
642 def rev_range(self, wire, filter):
652 def rev_range(self, wire, filter):
643 repo = self._factory.repo(wire)
653 repo = self._factory.repo(wire)
644 revisions = [rev for rev in revrange(repo, filter)]
654 revisions = [rev for rev in revrange(repo, filter)]
645 return revisions
655 return revisions
646
656
647 @reraise_safe_exceptions
657 @reraise_safe_exceptions
648 def rev_range_hash(self, wire, node):
658 def rev_range_hash(self, wire, node):
649 repo = self._factory.repo(wire)
659 repo = self._factory.repo(wire)
650
660
651 def get_revs(repo, rev_opt):
661 def get_revs(repo, rev_opt):
652 if rev_opt:
662 if rev_opt:
653 revs = revrange(repo, rev_opt)
663 revs = revrange(repo, rev_opt)
654 if len(revs) == 0:
664 if len(revs) == 0:
655 return (nullrev, nullrev)
665 return (nullrev, nullrev)
656 return max(revs), min(revs)
666 return max(revs), min(revs)
657 else:
667 else:
658 return len(repo) - 1, 0
668 return len(repo) - 1, 0
659
669
660 stop, start = get_revs(repo, [node + ':'])
670 stop, start = get_revs(repo, [node + ':'])
661 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
671 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
662 return revs
672 return revs
663
673
664 @reraise_safe_exceptions
674 @reraise_safe_exceptions
665 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
675 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
666 other_path = kwargs.pop('other_path', None)
676 other_path = kwargs.pop('other_path', None)
667
677
668 # case when we want to compare two independent repositories
678 # case when we want to compare two independent repositories
669 if other_path and other_path != wire["path"]:
679 if other_path and other_path != wire["path"]:
670 baseui = self._factory._create_config(wire["config"])
680 baseui = self._factory._create_config(wire["config"])
671 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
681 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
672 else:
682 else:
673 repo = self._factory.repo(wire)
683 repo = self._factory.repo(wire)
674 return list(repo.revs(rev_spec, *args))
684 return list(repo.revs(rev_spec, *args))
675
685
676 @reraise_safe_exceptions
686 @reraise_safe_exceptions
677 def strip(self, wire, revision, update, backup):
687 def strip(self, wire, revision, update, backup):
678 repo = self._factory.repo(wire)
688 repo = self._factory.repo(wire)
679 ctx = self._get_ctx(repo, revision)
689 ctx = self._get_ctx(repo, revision)
680 hgext_strip(
690 hgext_strip(
681 repo.baseui, repo, ctx.node(), update=update, backup=backup)
691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
682
692
683 @reraise_safe_exceptions
693 @reraise_safe_exceptions
684 def verify(self, wire,):
694 def verify(self, wire,):
685 repo = self._factory.repo(wire)
695 repo = self._factory.repo(wire)
686 baseui = self._factory._create_config(wire['config'])
696 baseui = self._factory._create_config(wire['config'])
687 baseui.setconfig('ui', 'quiet', 'false')
697 baseui.setconfig('ui', 'quiet', 'false')
688 output = io.BytesIO()
698 output = io.BytesIO()
689
699
690 def write(data, **unused_kwargs):
700 def write(data, **unused_kwargs):
691 output.write(data)
701 output.write(data)
692 baseui.write = write
702 baseui.write = write
693
703
694 repo.ui = baseui
704 repo.ui = baseui
695 verify.verify(repo)
705 verify.verify(repo)
696 return output.getvalue()
706 return output.getvalue()
697
707
698 @reraise_safe_exceptions
708 @reraise_safe_exceptions
699 def tag(self, wire, name, revision, message, local, user,
709 def tag(self, wire, name, revision, message, local, user,
700 tag_time, tag_timezone):
710 tag_time, tag_timezone):
701 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
702 ctx = self._get_ctx(repo, revision)
712 ctx = self._get_ctx(repo, revision)
703 node = ctx.node()
713 node = ctx.node()
704
714
705 date = (tag_time, tag_timezone)
715 date = (tag_time, tag_timezone)
706 try:
716 try:
707 hg_tag.tag(repo, name, node, message, local, user, date)
717 hg_tag.tag(repo, name, node, message, local, user, date)
708 except Abort as e:
718 except Abort as e:
709 log.exception("Tag operation aborted")
719 log.exception("Tag operation aborted")
710 # Exception can contain unicode which we convert
720 # Exception can contain unicode which we convert
711 raise exceptions.AbortException(e)(repr(e))
721 raise exceptions.AbortException(e)(repr(e))
712
722
713 @reraise_safe_exceptions
723 @reraise_safe_exceptions
714 def tags(self, wire):
724 def tags(self, wire):
715 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
716 return repo.tags()
726 return repo.tags()
717
727
718 @reraise_safe_exceptions
728 @reraise_safe_exceptions
719 def update(self, wire, node=None, clean=False):
729 def update(self, wire, node=None, clean=False):
720 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
721 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
722 commands.update(baseui, repo, node=node, clean=clean)
732 commands.update(baseui, repo, node=node, clean=clean)
723
733
724 @reraise_safe_exceptions
734 @reraise_safe_exceptions
725 def identify(self, wire):
735 def identify(self, wire):
726 repo = self._factory.repo(wire)
736 repo = self._factory.repo(wire)
727 baseui = self._factory._create_config(wire['config'])
737 baseui = self._factory._create_config(wire['config'])
728 output = io.BytesIO()
738 output = io.BytesIO()
729 baseui.write = output.write
739 baseui.write = output.write
730 # This is required to get a full node id
740 # This is required to get a full node id
731 baseui.debugflag = True
741 baseui.debugflag = True
732 commands.identify(baseui, repo, id=True)
742 commands.identify(baseui, repo, id=True)
733
743
734 return output.getvalue()
744 return output.getvalue()
735
745
736 @reraise_safe_exceptions
746 @reraise_safe_exceptions
737 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
738 hooks=True):
748 hooks=True):
739 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
740 baseui = self._factory._create_config(wire['config'], hooks=hooks)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
741
751
742 # Mercurial internally has a lot of logic that checks ONLY if
752 # Mercurial internally has a lot of logic that checks ONLY if
743 # option is defined, we just pass those if they are defined then
753 # option is defined, we just pass those if they are defined then
744 opts = {}
754 opts = {}
745 if bookmark:
755 if bookmark:
746 opts['bookmark'] = bookmark
756 opts['bookmark'] = bookmark
747 if branch:
757 if branch:
748 opts['branch'] = branch
758 opts['branch'] = branch
749 if revision:
759 if revision:
750 opts['rev'] = revision
760 opts['rev'] = revision
751
761
752 commands.pull(baseui, repo, source, **opts)
762 commands.pull(baseui, repo, source, **opts)
753
763
754 @reraise_safe_exceptions
764 @reraise_safe_exceptions
755 def heads(self, wire, branch=None):
765 def heads(self, wire, branch=None):
756 repo = self._factory.repo(wire)
766 repo = self._factory.repo(wire)
757 baseui = self._factory._create_config(wire['config'])
767 baseui = self._factory._create_config(wire['config'])
758 output = io.BytesIO()
768 output = io.BytesIO()
759
769
760 def write(data, **unused_kwargs):
770 def write(data, **unused_kwargs):
761 output.write(data)
771 output.write(data)
762
772
763 baseui.write = write
773 baseui.write = write
764 if branch:
774 if branch:
765 args = [branch]
775 args = [branch]
766 else:
776 else:
767 args = []
777 args = []
768 commands.heads(baseui, repo, template='{node} ', *args)
778 commands.heads(baseui, repo, template='{node} ', *args)
769
779
770 return output.getvalue()
780 return output.getvalue()
771
781
772 @reraise_safe_exceptions
782 @reraise_safe_exceptions
773 def ancestor(self, wire, revision1, revision2):
783 def ancestor(self, wire, revision1, revision2):
774 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
775 changelog = repo.changelog
785 changelog = repo.changelog
776 lookup = repo.lookup
786 lookup = repo.lookup
777 a = changelog.ancestor(lookup(revision1), lookup(revision2))
787 a = changelog.ancestor(lookup(revision1), lookup(revision2))
778 return hex(a)
788 return hex(a)
779
789
780 @reraise_safe_exceptions
790 @reraise_safe_exceptions
781 def push(self, wire, revisions, dest_path, hooks=True,
791 def push(self, wire, revisions, dest_path, hooks=True,
782 push_branches=False):
792 push_branches=False):
783 repo = self._factory.repo(wire)
793 repo = self._factory.repo(wire)
784 baseui = self._factory._create_config(wire['config'], hooks=hooks)
794 baseui = self._factory._create_config(wire['config'], hooks=hooks)
785 commands.push(baseui, repo, dest=dest_path, rev=revisions,
795 commands.push(baseui, repo, dest=dest_path, rev=revisions,
786 new_branch=push_branches)
796 new_branch=push_branches)
787
797
788 @reraise_safe_exceptions
798 @reraise_safe_exceptions
789 def merge(self, wire, revision):
799 def merge(self, wire, revision):
790 repo = self._factory.repo(wire)
800 repo = self._factory.repo(wire)
791 baseui = self._factory._create_config(wire['config'])
801 baseui = self._factory._create_config(wire['config'])
792 repo.ui.setconfig('ui', 'merge', 'internal:dump')
802 repo.ui.setconfig('ui', 'merge', 'internal:dump')
793
803
794 # In case of sub repositories are used mercurial prompts the user in
804 # In case of sub repositories are used mercurial prompts the user in
795 # case of merge conflicts or different sub repository sources. By
805 # case of merge conflicts or different sub repository sources. By
796 # setting the interactive flag to `False` mercurial doesn't prompt the
806 # setting the interactive flag to `False` mercurial doesn't prompt the
797 # used but instead uses a default value.
807 # used but instead uses a default value.
798 repo.ui.setconfig('ui', 'interactive', False)
808 repo.ui.setconfig('ui', 'interactive', False)
799 commands.merge(baseui, repo, rev=revision)
809 commands.merge(baseui, repo, rev=revision)
800
810
801 @reraise_safe_exceptions
811 @reraise_safe_exceptions
802 def merge_state(self, wire):
812 def merge_state(self, wire):
803 repo = self._factory.repo(wire)
813 repo = self._factory.repo(wire)
804 repo.ui.setconfig('ui', 'merge', 'internal:dump')
814 repo.ui.setconfig('ui', 'merge', 'internal:dump')
805
815
806 # In case of sub repositories are used mercurial prompts the user in
816 # In case of sub repositories are used mercurial prompts the user in
807 # case of merge conflicts or different sub repository sources. By
817 # case of merge conflicts or different sub repository sources. By
808 # setting the interactive flag to `False` mercurial doesn't prompt the
818 # setting the interactive flag to `False` mercurial doesn't prompt the
809 # used but instead uses a default value.
819 # used but instead uses a default value.
810 repo.ui.setconfig('ui', 'interactive', False)
820 repo.ui.setconfig('ui', 'interactive', False)
811 ms = hg_merge.mergestate(repo)
821 ms = hg_merge.mergestate(repo)
812 return [x for x in ms.unresolved()]
822 return [x for x in ms.unresolved()]
813
823
814 @reraise_safe_exceptions
824 @reraise_safe_exceptions
815 def commit(self, wire, message, username, close_branch=False):
825 def commit(self, wire, message, username, close_branch=False):
816 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
817 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
818 repo.ui.setconfig('ui', 'username', username)
828 repo.ui.setconfig('ui', 'username', username)
819 commands.commit(baseui, repo, message=message, close_branch=close_branch)
829 commands.commit(baseui, repo, message=message, close_branch=close_branch)
820
830
821
831
822 @reraise_safe_exceptions
832 @reraise_safe_exceptions
823 def rebase(self, wire, source=None, dest=None, abort=False):
833 def rebase(self, wire, source=None, dest=None, abort=False):
824 repo = self._factory.repo(wire)
834 repo = self._factory.repo(wire)
825 baseui = self._factory._create_config(wire['config'])
835 baseui = self._factory._create_config(wire['config'])
826 repo.ui.setconfig('ui', 'merge', 'internal:dump')
836 repo.ui.setconfig('ui', 'merge', 'internal:dump')
827 rebase.rebase(
837 rebase.rebase(
828 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
829
839
830 @reraise_safe_exceptions
840 @reraise_safe_exceptions
831 def bookmark(self, wire, bookmark, revision=None):
841 def bookmark(self, wire, bookmark, revision=None):
832 repo = self._factory.repo(wire)
842 repo = self._factory.repo(wire)
833 baseui = self._factory._create_config(wire['config'])
843 baseui = self._factory._create_config(wire['config'])
834 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
844 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
835
845
836 @reraise_safe_exceptions
846 @reraise_safe_exceptions
837 def install_hooks(self, wire, force=False):
847 def install_hooks(self, wire, force=False):
838 # we don't need any special hooks for Mercurial
848 # we don't need any special hooks for Mercurial
839 pass
849 pass
840
850
841 @reraise_safe_exceptions
851 @reraise_safe_exceptions
842 def get_hooks_info(self, wire):
852 def get_hooks_info(self, wire):
843 return {
853 return {
844 'pre_version': vcsserver.__version__,
854 'pre_version': vcsserver.__version__,
845 'post_version': vcsserver.__version__,
855 'post_version': vcsserver.__version__,
846 }
856 }
@@ -1,765 +1,775 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 # Set of svn compatible version flags.
43 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
44 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
45 svn_compatible_versions = {
46 'pre-1.4-compatible',
46 'pre-1.4-compatible',
47 'pre-1.5-compatible',
47 'pre-1.5-compatible',
48 'pre-1.6-compatible',
48 'pre-1.6-compatible',
49 'pre-1.8-compatible',
49 'pre-1.8-compatible',
50 'pre-1.9-compatible'
50 'pre-1.9-compatible'
51 }
51 }
52
52
53 svn_compatible_versions_map = {
53 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
54 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
58 'pre-1.9-compatible': '1.8',
59 }
59 }
60
60
61
61
62 def reraise_safe_exceptions(func):
62 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
63 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
65 try:
65 try:
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 except Exception as e:
67 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
68 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
69 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
70 raise_from_original(exceptions.UnhandledException(e))
71 raise
71 raise
72 return wrapper
72 return wrapper
73
73
74
74
75 class SubversionFactory(RepoFactory):
75 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
76 repo_type = 'svn'
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {'compatible-version': '1.9'}
81 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 fs_config['compatible-version'] = \
86 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
87 svn_compatible_versions_map[compatible_version]
88
88
89 log.debug('Create SVN repo with config "%s"', fs_config)
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
91 else:
92 repo = svn.repos.open(path)
92 repo = svn.repos.open(path)
93
93
94 log.debug('Got SVN object: %s', repo)
94 log.debug('Got SVN object: %s', repo)
95 return repo
95 return repo
96
96
97 def repo(self, wire, create=False, compatible_version=None):
97 def repo(self, wire, create=False, compatible_version=None):
98 """
98 """
99 Get a repository instance for the given path.
99 Get a repository instance for the given path.
100
100
101 Uses internally the low level beaker API since the decorators introduce
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
102 significant overhead.
103 """
103 """
104 region = self._cache_region
104 region = self._cache_region
105 context = wire.get('context', None)
105 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
106 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
107 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
108 cache = wire.get('cache', True)
109 cache_on = context and cache
109 cache_on = context and cache
110
110
111 @region.conditional_cache_on_arguments(condition=cache_on)
111 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
113 return self._create_repo(wire, create, compatible_version)
114
114
115 return create_new_repo(self.repo_type, repo_path, context_uid,
115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
116 compatible_version)
117
117
118
118
119 NODE_TYPE_MAPPING = {
119 NODE_TYPE_MAPPING = {
120 svn.core.svn_node_file: 'file',
120 svn.core.svn_node_file: 'file',
121 svn.core.svn_node_dir: 'dir',
121 svn.core.svn_node_dir: 'dir',
122 }
122 }
123
123
124
124
125 class SvnRemote(object):
125 class SvnRemote(object):
126
126
127 def __init__(self, factory, hg_factory=None):
127 def __init__(self, factory, hg_factory=None):
128 self._factory = factory
128 self._factory = factory
129 # TODO: Remove once we do not use internal Mercurial objects anymore
129 # TODO: Remove once we do not use internal Mercurial objects anymore
130 # for subversion
130 # for subversion
131 self._hg_factory = hg_factory
131 self._hg_factory = hg_factory
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def discover_svn_version(self):
134 def discover_svn_version(self):
135 try:
135 try:
136 import svn.core
136 import svn.core
137 svn_ver = svn.core.SVN_VERSION
137 svn_ver = svn.core.SVN_VERSION
138 except ImportError:
138 except ImportError:
139 svn_ver = None
139 svn_ver = None
140 return svn_ver
140 return svn_ver
141
141
142 @reraise_safe_exceptions
143 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
145
146 try:
147 return self.lookup(wire, -1) == 0
148 except Exception:
149 log.exception("failed to read object_store")
150 return False
151
142 def check_url(self, url, config_items):
152 def check_url(self, url, config_items):
143 # this can throw exception if not installed, but we detect this
153 # this can throw exception if not installed, but we detect this
144 from hgsubversion import svnrepo
154 from hgsubversion import svnrepo
145
155
146 baseui = self._hg_factory._create_config(config_items)
156 baseui = self._hg_factory._create_config(config_items)
147 # uuid function get's only valid UUID from proper repo, else
157 # uuid function get's only valid UUID from proper repo, else
148 # throws exception
158 # throws exception
149 try:
159 try:
150 svnrepo.svnremoterepo(baseui, url).svn.uuid
160 svnrepo.svnremoterepo(baseui, url).svn.uuid
151 except Exception:
161 except Exception:
152 tb = traceback.format_exc()
162 tb = traceback.format_exc()
153 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
163 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
154 raise URLError(
164 raise URLError(
155 '"%s" is not a valid Subversion source url.' % (url, ))
165 '"%s" is not a valid Subversion source url.' % (url, ))
156 return True
166 return True
157
167
158 def is_path_valid_repository(self, wire, path):
168 def is_path_valid_repository(self, wire, path):
159
169
160 # NOTE(marcink): short circuit the check for SVN repo
170 # NOTE(marcink): short circuit the check for SVN repo
161 # the repos.open might be expensive to check, but we have one cheap
171 # the repos.open might be expensive to check, but we have one cheap
162 # pre condition that we can use, to check for 'format' file
172 # pre condition that we can use, to check for 'format' file
163
173
164 if not os.path.isfile(os.path.join(path, 'format')):
174 if not os.path.isfile(os.path.join(path, 'format')):
165 return False
175 return False
166
176
167 try:
177 try:
168 svn.repos.open(path)
178 svn.repos.open(path)
169 except svn.core.SubversionException:
179 except svn.core.SubversionException:
170 tb = traceback.format_exc()
180 tb = traceback.format_exc()
171 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
181 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
172 return False
182 return False
173 return True
183 return True
174
184
175 @reraise_safe_exceptions
185 @reraise_safe_exceptions
176 def verify(self, wire,):
186 def verify(self, wire,):
177 repo_path = wire['path']
187 repo_path = wire['path']
178 if not self.is_path_valid_repository(wire, repo_path):
188 if not self.is_path_valid_repository(wire, repo_path):
179 raise Exception(
189 raise Exception(
180 "Path %s is not a valid Subversion repository." % repo_path)
190 "Path %s is not a valid Subversion repository." % repo_path)
181
191
182 cmd = ['svnadmin', 'info', repo_path]
192 cmd = ['svnadmin', 'info', repo_path]
183 stdout, stderr = subprocessio.run_command(cmd)
193 stdout, stderr = subprocessio.run_command(cmd)
184 return stdout
194 return stdout
185
195
186 def lookup(self, wire, revision):
196 def lookup(self, wire, revision):
187 if revision not in [-1, None, 'HEAD']:
197 if revision not in [-1, None, 'HEAD']:
188 raise NotImplementedError
198 raise NotImplementedError
189 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
190 fs_ptr = svn.repos.fs(repo)
200 fs_ptr = svn.repos.fs(repo)
191 head = svn.fs.youngest_rev(fs_ptr)
201 head = svn.fs.youngest_rev(fs_ptr)
192 return head
202 return head
193
203
194 def lookup_interval(self, wire, start_ts, end_ts):
204 def lookup_interval(self, wire, start_ts, end_ts):
195 repo = self._factory.repo(wire)
205 repo = self._factory.repo(wire)
196 fsobj = svn.repos.fs(repo)
206 fsobj = svn.repos.fs(repo)
197 start_rev = None
207 start_rev = None
198 end_rev = None
208 end_rev = None
199 if start_ts:
209 if start_ts:
200 start_ts_svn = apr_time_t(start_ts)
210 start_ts_svn = apr_time_t(start_ts)
201 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
211 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
202 else:
212 else:
203 start_rev = 1
213 start_rev = 1
204 if end_ts:
214 if end_ts:
205 end_ts_svn = apr_time_t(end_ts)
215 end_ts_svn = apr_time_t(end_ts)
206 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
216 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
207 else:
217 else:
208 end_rev = svn.fs.youngest_rev(fsobj)
218 end_rev = svn.fs.youngest_rev(fsobj)
209 return start_rev, end_rev
219 return start_rev, end_rev
210
220
211 def revision_properties(self, wire, revision):
221 def revision_properties(self, wire, revision):
212 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
213 fs_ptr = svn.repos.fs(repo)
223 fs_ptr = svn.repos.fs(repo)
214 return svn.fs.revision_proplist(fs_ptr, revision)
224 return svn.fs.revision_proplist(fs_ptr, revision)
215
225
216 def revision_changes(self, wire, revision):
226 def revision_changes(self, wire, revision):
217
227
218 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
219 fsobj = svn.repos.fs(repo)
229 fsobj = svn.repos.fs(repo)
220 rev_root = svn.fs.revision_root(fsobj, revision)
230 rev_root = svn.fs.revision_root(fsobj, revision)
221
231
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
232 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
233 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 base_dir = ""
234 base_dir = ""
225 send_deltas = False
235 send_deltas = False
226 svn.repos.replay2(
236 svn.repos.replay2(
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
237 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 editor_ptr, editor_baton, None)
238 editor_ptr, editor_baton, None)
229
239
230 added = []
240 added = []
231 changed = []
241 changed = []
232 removed = []
242 removed = []
233
243
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
244 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 for path, change in editor.changes.iteritems():
245 for path, change in editor.changes.iteritems():
236 # TODO: Decide what to do with directory nodes. Subversion can add
246 # TODO: Decide what to do with directory nodes. Subversion can add
237 # empty directories.
247 # empty directories.
238
248
239 if change.item_kind == svn.core.svn_node_dir:
249 if change.item_kind == svn.core.svn_node_dir:
240 continue
250 continue
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
251 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 added.append(path)
252 added.append(path)
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
253 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 svn.repos.CHANGE_ACTION_REPLACE]:
254 svn.repos.CHANGE_ACTION_REPLACE]:
245 changed.append(path)
255 changed.append(path)
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
256 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 removed.append(path)
257 removed.append(path)
248 else:
258 else:
249 raise NotImplementedError(
259 raise NotImplementedError(
250 "Action %s not supported on path %s" % (
260 "Action %s not supported on path %s" % (
251 change.action, path))
261 change.action, path))
252
262
253 changes = {
263 changes = {
254 'added': added,
264 'added': added,
255 'changed': changed,
265 'changed': changed,
256 'removed': removed,
266 'removed': removed,
257 }
267 }
258 return changes
268 return changes
259
269
260 def node_history(self, wire, path, revision, limit):
270 def node_history(self, wire, path, revision, limit):
261 cross_copies = False
271 cross_copies = False
262 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
263 fsobj = svn.repos.fs(repo)
273 fsobj = svn.repos.fs(repo)
264 rev_root = svn.fs.revision_root(fsobj, revision)
274 rev_root = svn.fs.revision_root(fsobj, revision)
265
275
266 history_revisions = []
276 history_revisions = []
267 history = svn.fs.node_history(rev_root, path)
277 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.history_prev(history, cross_copies)
278 history = svn.fs.history_prev(history, cross_copies)
269 while history:
279 while history:
270 __, node_revision = svn.fs.history_location(history)
280 __, node_revision = svn.fs.history_location(history)
271 history_revisions.append(node_revision)
281 history_revisions.append(node_revision)
272 if limit and len(history_revisions) >= limit:
282 if limit and len(history_revisions) >= limit:
273 break
283 break
274 history = svn.fs.history_prev(history, cross_copies)
284 history = svn.fs.history_prev(history, cross_copies)
275 return history_revisions
285 return history_revisions
276
286
277 def node_properties(self, wire, path, revision):
287 def node_properties(self, wire, path, revision):
278 repo = self._factory.repo(wire)
288 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
289 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
290 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
291 return svn.fs.node_proplist(rev_root, path)
282
292
283 def file_annotate(self, wire, path, revision):
293 def file_annotate(self, wire, path, revision):
284 abs_path = 'file://' + urllib.pathname2url(
294 abs_path = 'file://' + urllib.pathname2url(
285 vcspath.join(wire['path'], path))
295 vcspath.join(wire['path'], path))
286 file_uri = svn.core.svn_path_canonicalize(abs_path)
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
287
297
288 start_rev = svn_opt_revision_value_t(0)
298 start_rev = svn_opt_revision_value_t(0)
289 peg_rev = svn_opt_revision_value_t(revision)
299 peg_rev = svn_opt_revision_value_t(revision)
290 end_rev = peg_rev
300 end_rev = peg_rev
291
301
292 annotations = []
302 annotations = []
293
303
294 def receiver(line_no, revision, author, date, line, pool):
304 def receiver(line_no, revision, author, date, line, pool):
295 annotations.append((line_no, revision, line))
305 annotations.append((line_no, revision, line))
296
306
297 # TODO: Cannot use blame5, missing typemap function in the swig code
307 # TODO: Cannot use blame5, missing typemap function in the swig code
298 try:
308 try:
299 svn.client.blame2(
309 svn.client.blame2(
300 file_uri, peg_rev, start_rev, end_rev,
310 file_uri, peg_rev, start_rev, end_rev,
301 receiver, svn.client.create_context())
311 receiver, svn.client.create_context())
302 except svn.core.SubversionException as exc:
312 except svn.core.SubversionException as exc:
303 log.exception("Error during blame operation.")
313 log.exception("Error during blame operation.")
304 raise Exception(
314 raise Exception(
305 "Blame not supported or file does not exist at path %s. "
315 "Blame not supported or file does not exist at path %s. "
306 "Error %s." % (path, exc))
316 "Error %s." % (path, exc))
307
317
308 return annotations
318 return annotations
309
319
310 def get_node_type(self, wire, path, rev=None):
320 def get_node_type(self, wire, path, rev=None):
311 repo = self._factory.repo(wire)
321 repo = self._factory.repo(wire)
312 fs_ptr = svn.repos.fs(repo)
322 fs_ptr = svn.repos.fs(repo)
313 if rev is None:
323 if rev is None:
314 rev = svn.fs.youngest_rev(fs_ptr)
324 rev = svn.fs.youngest_rev(fs_ptr)
315 root = svn.fs.revision_root(fs_ptr, rev)
325 root = svn.fs.revision_root(fs_ptr, rev)
316 node = svn.fs.check_path(root, path)
326 node = svn.fs.check_path(root, path)
317 return NODE_TYPE_MAPPING.get(node, None)
327 return NODE_TYPE_MAPPING.get(node, None)
318
328
319 def get_nodes(self, wire, path, revision=None):
329 def get_nodes(self, wire, path, revision=None):
320 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
321 fsobj = svn.repos.fs(repo)
331 fsobj = svn.repos.fs(repo)
322 if revision is None:
332 if revision is None:
323 revision = svn.fs.youngest_rev(fsobj)
333 revision = svn.fs.youngest_rev(fsobj)
324 root = svn.fs.revision_root(fsobj, revision)
334 root = svn.fs.revision_root(fsobj, revision)
325 entries = svn.fs.dir_entries(root, path)
335 entries = svn.fs.dir_entries(root, path)
326 result = []
336 result = []
327 for entry_path, entry_info in entries.iteritems():
337 for entry_path, entry_info in entries.iteritems():
328 result.append(
338 result.append(
329 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
330 return result
340 return result
331
341
332 def get_file_content(self, wire, path, rev=None):
342 def get_file_content(self, wire, path, rev=None):
333 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
344 fsobj = svn.repos.fs(repo)
335 if rev is None:
345 if rev is None:
336 rev = svn.fs.youngest_revision(fsobj)
346 rev = svn.fs.youngest_revision(fsobj)
337 root = svn.fs.revision_root(fsobj, rev)
347 root = svn.fs.revision_root(fsobj, rev)
338 content = svn.core.Stream(svn.fs.file_contents(root, path))
348 content = svn.core.Stream(svn.fs.file_contents(root, path))
339 return content.read()
349 return content.read()
340
350
341 def get_file_size(self, wire, path, revision=None):
351 def get_file_size(self, wire, path, revision=None):
342 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
343 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
344 if revision is None:
354 if revision is None:
345 revision = svn.fs.youngest_revision(fsobj)
355 revision = svn.fs.youngest_revision(fsobj)
346 root = svn.fs.revision_root(fsobj, revision)
356 root = svn.fs.revision_root(fsobj, revision)
347 size = svn.fs.file_length(root, path)
357 size = svn.fs.file_length(root, path)
348 return size
358 return size
349
359
350 def create_repository(self, wire, compatible_version=None):
360 def create_repository(self, wire, compatible_version=None):
351 log.info('Creating Subversion repository in path "%s"', wire['path'])
361 log.info('Creating Subversion repository in path "%s"', wire['path'])
352 self._factory.repo(wire, create=True,
362 self._factory.repo(wire, create=True,
353 compatible_version=compatible_version)
363 compatible_version=compatible_version)
354
364
355 def get_url_and_credentials(self, src_url):
365 def get_url_and_credentials(self, src_url):
356 obj = urlparse.urlparse(src_url)
366 obj = urlparse.urlparse(src_url)
357 username = obj.username or None
367 username = obj.username or None
358 password = obj.password or None
368 password = obj.password or None
359 return username, password, src_url
369 return username, password, src_url
360
370
361 def import_remote_repository(self, wire, src_url):
371 def import_remote_repository(self, wire, src_url):
362 repo_path = wire['path']
372 repo_path = wire['path']
363 if not self.is_path_valid_repository(wire, repo_path):
373 if not self.is_path_valid_repository(wire, repo_path):
364 raise Exception(
374 raise Exception(
365 "Path %s is not a valid Subversion repository." % repo_path)
375 "Path %s is not a valid Subversion repository." % repo_path)
366
376
367 username, password, src_url = self.get_url_and_credentials(src_url)
377 username, password, src_url = self.get_url_and_credentials(src_url)
368 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
378 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
369 '--trust-server-cert-failures=unknown-ca']
379 '--trust-server-cert-failures=unknown-ca']
370 if username and password:
380 if username and password:
371 rdump_cmd += ['--username', username, '--password', password]
381 rdump_cmd += ['--username', username, '--password', password]
372 rdump_cmd += [src_url]
382 rdump_cmd += [src_url]
373
383
374 rdump = subprocess.Popen(
384 rdump = subprocess.Popen(
375 rdump_cmd,
385 rdump_cmd,
376 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
386 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
377 load = subprocess.Popen(
387 load = subprocess.Popen(
378 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
388 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
379
389
380 # TODO: johbo: This can be a very long operation, might be better
390 # TODO: johbo: This can be a very long operation, might be better
381 # to track some kind of status and provide an api to check if the
391 # to track some kind of status and provide an api to check if the
382 # import is done.
392 # import is done.
383 rdump.wait()
393 rdump.wait()
384 load.wait()
394 load.wait()
385
395
386 log.debug('Return process ended with code: %s', rdump.returncode)
396 log.debug('Return process ended with code: %s', rdump.returncode)
387 if rdump.returncode != 0:
397 if rdump.returncode != 0:
388 errors = rdump.stderr.read()
398 errors = rdump.stderr.read()
389 log.error('svnrdump dump failed: statuscode %s: message: %s',
399 log.error('svnrdump dump failed: statuscode %s: message: %s',
390 rdump.returncode, errors)
400 rdump.returncode, errors)
391 reason = 'UNKNOWN'
401 reason = 'UNKNOWN'
392 if 'svnrdump: E230001:' in errors:
402 if 'svnrdump: E230001:' in errors:
393 reason = 'INVALID_CERTIFICATE'
403 reason = 'INVALID_CERTIFICATE'
394
404
395 if reason == 'UNKNOWN':
405 if reason == 'UNKNOWN':
396 reason = 'UNKNOWN:{}'.format(errors)
406 reason = 'UNKNOWN:{}'.format(errors)
397 raise Exception(
407 raise Exception(
398 'Failed to dump the remote repository from %s. Reason:%s' % (
408 'Failed to dump the remote repository from %s. Reason:%s' % (
399 src_url, reason))
409 src_url, reason))
400 if load.returncode != 0:
410 if load.returncode != 0:
401 raise Exception(
411 raise Exception(
402 'Failed to load the dump of remote repository from %s.' %
412 'Failed to load the dump of remote repository from %s.' %
403 (src_url, ))
413 (src_url, ))
404
414
405 def commit(self, wire, message, author, timestamp, updated, removed):
415 def commit(self, wire, message, author, timestamp, updated, removed):
406 assert isinstance(message, str)
416 assert isinstance(message, str)
407 assert isinstance(author, str)
417 assert isinstance(author, str)
408
418
409 repo = self._factory.repo(wire)
419 repo = self._factory.repo(wire)
410 fsobj = svn.repos.fs(repo)
420 fsobj = svn.repos.fs(repo)
411
421
412 rev = svn.fs.youngest_rev(fsobj)
422 rev = svn.fs.youngest_rev(fsobj)
413 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
423 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
414 txn_root = svn.fs.txn_root(txn)
424 txn_root = svn.fs.txn_root(txn)
415
425
416 for node in updated:
426 for node in updated:
417 TxnNodeProcessor(node, txn_root).update()
427 TxnNodeProcessor(node, txn_root).update()
418 for node in removed:
428 for node in removed:
419 TxnNodeProcessor(node, txn_root).remove()
429 TxnNodeProcessor(node, txn_root).remove()
420
430
421 commit_id = svn.repos.fs_commit_txn(repo, txn)
431 commit_id = svn.repos.fs_commit_txn(repo, txn)
422
432
423 if timestamp:
433 if timestamp:
424 apr_time = apr_time_t(timestamp)
434 apr_time = apr_time_t(timestamp)
425 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
435 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
426 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
436 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
427
437
428 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
438 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
429 return commit_id
439 return commit_id
430
440
431 def diff(self, wire, rev1, rev2, path1=None, path2=None,
441 def diff(self, wire, rev1, rev2, path1=None, path2=None,
432 ignore_whitespace=False, context=3):
442 ignore_whitespace=False, context=3):
433
443
434 wire.update(cache=False)
444 wire.update(cache=False)
435 repo = self._factory.repo(wire)
445 repo = self._factory.repo(wire)
436 diff_creator = SvnDiffer(
446 diff_creator = SvnDiffer(
437 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
447 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
438 try:
448 try:
439 return diff_creator.generate_diff()
449 return diff_creator.generate_diff()
440 except svn.core.SubversionException as e:
450 except svn.core.SubversionException as e:
441 log.exception(
451 log.exception(
442 "Error during diff operation operation. "
452 "Error during diff operation operation. "
443 "Path might not exist %s, %s" % (path1, path2))
453 "Path might not exist %s, %s" % (path1, path2))
444 return ""
454 return ""
445
455
446 @reraise_safe_exceptions
456 @reraise_safe_exceptions
447 def is_large_file(self, wire, path):
457 def is_large_file(self, wire, path):
448 return False
458 return False
449
459
450 @reraise_safe_exceptions
460 @reraise_safe_exceptions
451 def run_svn_command(self, wire, cmd, **opts):
461 def run_svn_command(self, wire, cmd, **opts):
452 path = wire.get('path', None)
462 path = wire.get('path', None)
453
463
454 if path and os.path.isdir(path):
464 if path and os.path.isdir(path):
455 opts['cwd'] = path
465 opts['cwd'] = path
456
466
457 safe_call = False
467 safe_call = False
458 if '_safe' in opts:
468 if '_safe' in opts:
459 safe_call = True
469 safe_call = True
460
470
461 svnenv = os.environ.copy()
471 svnenv = os.environ.copy()
462 svnenv.update(opts.pop('extra_env', {}))
472 svnenv.update(opts.pop('extra_env', {}))
463
473
464 _opts = {'env': svnenv, 'shell': False}
474 _opts = {'env': svnenv, 'shell': False}
465
475
466 try:
476 try:
467 _opts.update(opts)
477 _opts.update(opts)
468 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
478 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
469
479
470 return ''.join(p), ''.join(p.error)
480 return ''.join(p), ''.join(p.error)
471 except (EnvironmentError, OSError) as err:
481 except (EnvironmentError, OSError) as err:
472 cmd = ' '.join(cmd) # human friendly CMD
482 cmd = ' '.join(cmd) # human friendly CMD
473 tb_err = ("Couldn't run svn command (%s).\n"
483 tb_err = ("Couldn't run svn command (%s).\n"
474 "Original error was:%s\n"
484 "Original error was:%s\n"
475 "Call options:%s\n"
485 "Call options:%s\n"
476 % (cmd, err, _opts))
486 % (cmd, err, _opts))
477 log.exception(tb_err)
487 log.exception(tb_err)
478 if safe_call:
488 if safe_call:
479 return '', err
489 return '', err
480 else:
490 else:
481 raise exceptions.VcsException()(tb_err)
491 raise exceptions.VcsException()(tb_err)
482
492
483 @reraise_safe_exceptions
493 @reraise_safe_exceptions
484 def install_hooks(self, wire, force=False):
494 def install_hooks(self, wire, force=False):
485 from vcsserver.hook_utils import install_svn_hooks
495 from vcsserver.hook_utils import install_svn_hooks
486 repo_path = wire['path']
496 repo_path = wire['path']
487 binary_dir = settings.BINARY_DIR
497 binary_dir = settings.BINARY_DIR
488 executable = None
498 executable = None
489 if binary_dir:
499 if binary_dir:
490 executable = os.path.join(binary_dir, 'python')
500 executable = os.path.join(binary_dir, 'python')
491 return install_svn_hooks(
501 return install_svn_hooks(
492 repo_path, executable=executable, force_create=force)
502 repo_path, executable=executable, force_create=force)
493
503
494 @reraise_safe_exceptions
504 @reraise_safe_exceptions
495 def get_hooks_info(self, wire):
505 def get_hooks_info(self, wire):
496 from vcsserver.hook_utils import (
506 from vcsserver.hook_utils import (
497 get_svn_pre_hook_version, get_svn_post_hook_version)
507 get_svn_pre_hook_version, get_svn_post_hook_version)
498 repo_path = wire['path']
508 repo_path = wire['path']
499 return {
509 return {
500 'pre_version': get_svn_pre_hook_version(repo_path),
510 'pre_version': get_svn_pre_hook_version(repo_path),
501 'post_version': get_svn_post_hook_version(repo_path),
511 'post_version': get_svn_post_hook_version(repo_path),
502 }
512 }
503
513
504
514
505 class SvnDiffer(object):
515 class SvnDiffer(object):
506 """
516 """
507 Utility to create diffs based on difflib and the Subversion api
517 Utility to create diffs based on difflib and the Subversion api
508 """
518 """
509
519
510 binary_content = False
520 binary_content = False
511
521
512 def __init__(
522 def __init__(
513 self, repo, src_rev, src_path, tgt_rev, tgt_path,
523 self, repo, src_rev, src_path, tgt_rev, tgt_path,
514 ignore_whitespace, context):
524 ignore_whitespace, context):
515 self.repo = repo
525 self.repo = repo
516 self.ignore_whitespace = ignore_whitespace
526 self.ignore_whitespace = ignore_whitespace
517 self.context = context
527 self.context = context
518
528
519 fsobj = svn.repos.fs(repo)
529 fsobj = svn.repos.fs(repo)
520
530
521 self.tgt_rev = tgt_rev
531 self.tgt_rev = tgt_rev
522 self.tgt_path = tgt_path or ''
532 self.tgt_path = tgt_path or ''
523 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
533 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
524 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
534 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
525
535
526 self.src_rev = src_rev
536 self.src_rev = src_rev
527 self.src_path = src_path or self.tgt_path
537 self.src_path = src_path or self.tgt_path
528 self.src_root = svn.fs.revision_root(fsobj, src_rev)
538 self.src_root = svn.fs.revision_root(fsobj, src_rev)
529 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
539 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
530
540
531 self._validate()
541 self._validate()
532
542
533 def _validate(self):
543 def _validate(self):
534 if (self.tgt_kind != svn.core.svn_node_none and
544 if (self.tgt_kind != svn.core.svn_node_none and
535 self.src_kind != svn.core.svn_node_none and
545 self.src_kind != svn.core.svn_node_none and
536 self.src_kind != self.tgt_kind):
546 self.src_kind != self.tgt_kind):
537 # TODO: johbo: proper error handling
547 # TODO: johbo: proper error handling
538 raise Exception(
548 raise Exception(
539 "Source and target are not compatible for diff generation. "
549 "Source and target are not compatible for diff generation. "
540 "Source type: %s, target type: %s" %
550 "Source type: %s, target type: %s" %
541 (self.src_kind, self.tgt_kind))
551 (self.src_kind, self.tgt_kind))
542
552
543 def generate_diff(self):
553 def generate_diff(self):
544 buf = StringIO.StringIO()
554 buf = StringIO.StringIO()
545 if self.tgt_kind == svn.core.svn_node_dir:
555 if self.tgt_kind == svn.core.svn_node_dir:
546 self._generate_dir_diff(buf)
556 self._generate_dir_diff(buf)
547 else:
557 else:
548 self._generate_file_diff(buf)
558 self._generate_file_diff(buf)
549 return buf.getvalue()
559 return buf.getvalue()
550
560
551 def _generate_dir_diff(self, buf):
561 def _generate_dir_diff(self, buf):
552 editor = DiffChangeEditor()
562 editor = DiffChangeEditor()
553 editor_ptr, editor_baton = svn.delta.make_editor(editor)
563 editor_ptr, editor_baton = svn.delta.make_editor(editor)
554 svn.repos.dir_delta2(
564 svn.repos.dir_delta2(
555 self.src_root,
565 self.src_root,
556 self.src_path,
566 self.src_path,
557 '', # src_entry
567 '', # src_entry
558 self.tgt_root,
568 self.tgt_root,
559 self.tgt_path,
569 self.tgt_path,
560 editor_ptr, editor_baton,
570 editor_ptr, editor_baton,
561 authorization_callback_allow_all,
571 authorization_callback_allow_all,
562 False, # text_deltas
572 False, # text_deltas
563 svn.core.svn_depth_infinity, # depth
573 svn.core.svn_depth_infinity, # depth
564 False, # entry_props
574 False, # entry_props
565 False, # ignore_ancestry
575 False, # ignore_ancestry
566 )
576 )
567
577
568 for path, __, change in sorted(editor.changes):
578 for path, __, change in sorted(editor.changes):
569 self._generate_node_diff(
579 self._generate_node_diff(
570 buf, change, path, self.tgt_path, path, self.src_path)
580 buf, change, path, self.tgt_path, path, self.src_path)
571
581
572 def _generate_file_diff(self, buf):
582 def _generate_file_diff(self, buf):
573 change = None
583 change = None
574 if self.src_kind == svn.core.svn_node_none:
584 if self.src_kind == svn.core.svn_node_none:
575 change = "add"
585 change = "add"
576 elif self.tgt_kind == svn.core.svn_node_none:
586 elif self.tgt_kind == svn.core.svn_node_none:
577 change = "delete"
587 change = "delete"
578 tgt_base, tgt_path = vcspath.split(self.tgt_path)
588 tgt_base, tgt_path = vcspath.split(self.tgt_path)
579 src_base, src_path = vcspath.split(self.src_path)
589 src_base, src_path = vcspath.split(self.src_path)
580 self._generate_node_diff(
590 self._generate_node_diff(
581 buf, change, tgt_path, tgt_base, src_path, src_base)
591 buf, change, tgt_path, tgt_base, src_path, src_base)
582
592
583 def _generate_node_diff(
593 def _generate_node_diff(
584 self, buf, change, tgt_path, tgt_base, src_path, src_base):
594 self, buf, change, tgt_path, tgt_base, src_path, src_base):
585
595
586 if self.src_rev == self.tgt_rev and tgt_base == src_base:
596 if self.src_rev == self.tgt_rev and tgt_base == src_base:
587 # makes consistent behaviour with git/hg to return empty diff if
597 # makes consistent behaviour with git/hg to return empty diff if
588 # we compare same revisions
598 # we compare same revisions
589 return
599 return
590
600
591 tgt_full_path = vcspath.join(tgt_base, tgt_path)
601 tgt_full_path = vcspath.join(tgt_base, tgt_path)
592 src_full_path = vcspath.join(src_base, src_path)
602 src_full_path = vcspath.join(src_base, src_path)
593
603
594 self.binary_content = False
604 self.binary_content = False
595 mime_type = self._get_mime_type(tgt_full_path)
605 mime_type = self._get_mime_type(tgt_full_path)
596
606
597 if mime_type and not mime_type.startswith('text'):
607 if mime_type and not mime_type.startswith('text'):
598 self.binary_content = True
608 self.binary_content = True
599 buf.write("=" * 67 + '\n')
609 buf.write("=" * 67 + '\n')
600 buf.write("Cannot display: file marked as a binary type.\n")
610 buf.write("Cannot display: file marked as a binary type.\n")
601 buf.write("svn:mime-type = %s\n" % mime_type)
611 buf.write("svn:mime-type = %s\n" % mime_type)
602 buf.write("Index: %s\n" % (tgt_path, ))
612 buf.write("Index: %s\n" % (tgt_path, ))
603 buf.write("=" * 67 + '\n')
613 buf.write("=" * 67 + '\n')
604 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
614 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
605 'tgt_path': tgt_path})
615 'tgt_path': tgt_path})
606
616
607 if change == 'add':
617 if change == 'add':
608 # TODO: johbo: SVN is missing a zero here compared to git
618 # TODO: johbo: SVN is missing a zero here compared to git
609 buf.write("new file mode 10644\n")
619 buf.write("new file mode 10644\n")
610
620
611 #TODO(marcink): intro to binary detection of svn patches
621 #TODO(marcink): intro to binary detection of svn patches
612 # if self.binary_content:
622 # if self.binary_content:
613 # buf.write('GIT binary patch\n')
623 # buf.write('GIT binary patch\n')
614
624
615 buf.write("--- /dev/null\t(revision 0)\n")
625 buf.write("--- /dev/null\t(revision 0)\n")
616 src_lines = []
626 src_lines = []
617 else:
627 else:
618 if change == 'delete':
628 if change == 'delete':
619 buf.write("deleted file mode 10644\n")
629 buf.write("deleted file mode 10644\n")
620
630
621 #TODO(marcink): intro to binary detection of svn patches
631 #TODO(marcink): intro to binary detection of svn patches
622 # if self.binary_content:
632 # if self.binary_content:
623 # buf.write('GIT binary patch\n')
633 # buf.write('GIT binary patch\n')
624
634
625 buf.write("--- a/%s\t(revision %s)\n" % (
635 buf.write("--- a/%s\t(revision %s)\n" % (
626 src_path, self.src_rev))
636 src_path, self.src_rev))
627 src_lines = self._svn_readlines(self.src_root, src_full_path)
637 src_lines = self._svn_readlines(self.src_root, src_full_path)
628
638
629 if change == 'delete':
639 if change == 'delete':
630 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
640 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
631 tgt_lines = []
641 tgt_lines = []
632 else:
642 else:
633 buf.write("+++ b/%s\t(revision %s)\n" % (
643 buf.write("+++ b/%s\t(revision %s)\n" % (
634 tgt_path, self.tgt_rev))
644 tgt_path, self.tgt_rev))
635 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
645 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
636
646
637 if not self.binary_content:
647 if not self.binary_content:
638 udiff = svn_diff.unified_diff(
648 udiff = svn_diff.unified_diff(
639 src_lines, tgt_lines, context=self.context,
649 src_lines, tgt_lines, context=self.context,
640 ignore_blank_lines=self.ignore_whitespace,
650 ignore_blank_lines=self.ignore_whitespace,
641 ignore_case=False,
651 ignore_case=False,
642 ignore_space_changes=self.ignore_whitespace)
652 ignore_space_changes=self.ignore_whitespace)
643 buf.writelines(udiff)
653 buf.writelines(udiff)
644
654
645 def _get_mime_type(self, path):
655 def _get_mime_type(self, path):
646 try:
656 try:
647 mime_type = svn.fs.node_prop(
657 mime_type = svn.fs.node_prop(
648 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
658 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
649 except svn.core.SubversionException:
659 except svn.core.SubversionException:
650 mime_type = svn.fs.node_prop(
660 mime_type = svn.fs.node_prop(
651 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
661 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
652 return mime_type
662 return mime_type
653
663
654 def _svn_readlines(self, fs_root, node_path):
664 def _svn_readlines(self, fs_root, node_path):
655 if self.binary_content:
665 if self.binary_content:
656 return []
666 return []
657 node_kind = svn.fs.check_path(fs_root, node_path)
667 node_kind = svn.fs.check_path(fs_root, node_path)
658 if node_kind not in (
668 if node_kind not in (
659 svn.core.svn_node_file, svn.core.svn_node_symlink):
669 svn.core.svn_node_file, svn.core.svn_node_symlink):
660 return []
670 return []
661 content = svn.core.Stream(
671 content = svn.core.Stream(
662 svn.fs.file_contents(fs_root, node_path)).read()
672 svn.fs.file_contents(fs_root, node_path)).read()
663 return content.splitlines(True)
673 return content.splitlines(True)
664
674
665
675
666
676
667 class DiffChangeEditor(svn.delta.Editor):
677 class DiffChangeEditor(svn.delta.Editor):
668 """
678 """
669 Records changes between two given revisions
679 Records changes between two given revisions
670 """
680 """
671
681
672 def __init__(self):
682 def __init__(self):
673 self.changes = []
683 self.changes = []
674
684
675 def delete_entry(self, path, revision, parent_baton, pool=None):
685 def delete_entry(self, path, revision, parent_baton, pool=None):
676 self.changes.append((path, None, 'delete'))
686 self.changes.append((path, None, 'delete'))
677
687
678 def add_file(
688 def add_file(
679 self, path, parent_baton, copyfrom_path, copyfrom_revision,
689 self, path, parent_baton, copyfrom_path, copyfrom_revision,
680 file_pool=None):
690 file_pool=None):
681 self.changes.append((path, 'file', 'add'))
691 self.changes.append((path, 'file', 'add'))
682
692
683 def open_file(self, path, parent_baton, base_revision, file_pool=None):
693 def open_file(self, path, parent_baton, base_revision, file_pool=None):
684 self.changes.append((path, 'file', 'change'))
694 self.changes.append((path, 'file', 'change'))
685
695
686
696
687 def authorization_callback_allow_all(root, path, pool):
697 def authorization_callback_allow_all(root, path, pool):
688 return True
698 return True
689
699
690
700
691 class TxnNodeProcessor(object):
701 class TxnNodeProcessor(object):
692 """
702 """
693 Utility to process the change of one node within a transaction root.
703 Utility to process the change of one node within a transaction root.
694
704
695 It encapsulates the knowledge of how to add, update or remove
705 It encapsulates the knowledge of how to add, update or remove
696 a node for a given transaction root. The purpose is to support the method
706 a node for a given transaction root. The purpose is to support the method
697 `SvnRemote.commit`.
707 `SvnRemote.commit`.
698 """
708 """
699
709
700 def __init__(self, node, txn_root):
710 def __init__(self, node, txn_root):
701 assert isinstance(node['path'], str)
711 assert isinstance(node['path'], str)
702
712
703 self.node = node
713 self.node = node
704 self.txn_root = txn_root
714 self.txn_root = txn_root
705
715
706 def update(self):
716 def update(self):
707 self._ensure_parent_dirs()
717 self._ensure_parent_dirs()
708 self._add_file_if_node_does_not_exist()
718 self._add_file_if_node_does_not_exist()
709 self._update_file_content()
719 self._update_file_content()
710 self._update_file_properties()
720 self._update_file_properties()
711
721
712 def remove(self):
722 def remove(self):
713 svn.fs.delete(self.txn_root, self.node['path'])
723 svn.fs.delete(self.txn_root, self.node['path'])
714 # TODO: Clean up directory if empty
724 # TODO: Clean up directory if empty
715
725
716 def _ensure_parent_dirs(self):
726 def _ensure_parent_dirs(self):
717 curdir = vcspath.dirname(self.node['path'])
727 curdir = vcspath.dirname(self.node['path'])
718 dirs_to_create = []
728 dirs_to_create = []
719 while not self._svn_path_exists(curdir):
729 while not self._svn_path_exists(curdir):
720 dirs_to_create.append(curdir)
730 dirs_to_create.append(curdir)
721 curdir = vcspath.dirname(curdir)
731 curdir = vcspath.dirname(curdir)
722
732
723 for curdir in reversed(dirs_to_create):
733 for curdir in reversed(dirs_to_create):
724 log.debug('Creating missing directory "%s"', curdir)
734 log.debug('Creating missing directory "%s"', curdir)
725 svn.fs.make_dir(self.txn_root, curdir)
735 svn.fs.make_dir(self.txn_root, curdir)
726
736
727 def _svn_path_exists(self, path):
737 def _svn_path_exists(self, path):
728 path_status = svn.fs.check_path(self.txn_root, path)
738 path_status = svn.fs.check_path(self.txn_root, path)
729 return path_status != svn.core.svn_node_none
739 return path_status != svn.core.svn_node_none
730
740
731 def _add_file_if_node_does_not_exist(self):
741 def _add_file_if_node_does_not_exist(self):
732 kind = svn.fs.check_path(self.txn_root, self.node['path'])
742 kind = svn.fs.check_path(self.txn_root, self.node['path'])
733 if kind == svn.core.svn_node_none:
743 if kind == svn.core.svn_node_none:
734 svn.fs.make_file(self.txn_root, self.node['path'])
744 svn.fs.make_file(self.txn_root, self.node['path'])
735
745
736 def _update_file_content(self):
746 def _update_file_content(self):
737 assert isinstance(self.node['content'], str)
747 assert isinstance(self.node['content'], str)
738 handler, baton = svn.fs.apply_textdelta(
748 handler, baton = svn.fs.apply_textdelta(
739 self.txn_root, self.node['path'], None, None)
749 self.txn_root, self.node['path'], None, None)
740 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
750 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
741
751
742 def _update_file_properties(self):
752 def _update_file_properties(self):
743 properties = self.node.get('properties', {})
753 properties = self.node.get('properties', {})
744 for key, value in properties.iteritems():
754 for key, value in properties.iteritems():
745 svn.fs.change_node_prop(
755 svn.fs.change_node_prop(
746 self.txn_root, self.node['path'], key, value)
756 self.txn_root, self.node['path'], key, value)
747
757
748
758
749 def apr_time_t(timestamp):
759 def apr_time_t(timestamp):
750 """
760 """
751 Convert a Python timestamp into APR timestamp type apr_time_t
761 Convert a Python timestamp into APR timestamp type apr_time_t
752 """
762 """
753 return timestamp * 1E6
763 return timestamp * 1E6
754
764
755
765
756 def svn_opt_revision_value_t(num):
766 def svn_opt_revision_value_t(num):
757 """
767 """
758 Put `num` into a `svn_opt_revision_value_t` structure.
768 Put `num` into a `svn_opt_revision_value_t` structure.
759 """
769 """
760 value = svn.core.svn_opt_revision_value_t()
770 value = svn.core.svn_opt_revision_value_t()
761 value.number = num
771 value.number = num
762 revision = svn.core.svn_opt_revision_t()
772 revision = svn.core.svn_opt_revision_t()
763 revision.kind = svn.core.svn_opt_revision_number
773 revision.kind = svn.core.svn_opt_revision_number
764 revision.value = value
774 revision.value = value
765 return revision
775 return revision
General Comments 0
You need to be logged in to leave comments. Login now