##// END OF EJS Templates
hooks: expose logic to fetch hook file information.
marcink -
r623:0f0a8ed3 default
parent child Browse files
Show More
@@ -1,732 +1,742 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import collections
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 import more_itertools
28 import more_itertools
29 from dulwich import index, objects
29 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
31 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
33 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
34 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.repo import Repo as DulwichRepo, Tag
36 from dulwich.server import update_server_info
36 from dulwich.server import update_server_info
37
37
38 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
39 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 from vcsserver.hgcompat import (
41 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
43 from vcsserver.git_lfs.lib import LFSOidStore
44
44
45 DIR_STAT = stat.S_IFDIR
45 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
46 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
47 GIT_LINK = objects.S_IFGITLINK
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
53 """Converts Dulwich exceptions to something neutral."""
54 @wraps(func)
54 @wraps(func)
55 def wrapper(*args, **kwargs):
55 def wrapper(*args, **kwargs):
56 try:
56 try:
57 return func(*args, **kwargs)
57 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
60 exc = exceptions.LookupException(e)
61 raise exc(e)
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
62 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
63 exc = exceptions.VcsException(e)
64 raise exc(e)
64 raise exc(e)
65 except Exception as e:
65 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
67 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
68 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
69 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
70 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
71 #raise_from_original(exceptions.UnhandledException)
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class Repo(DulwichRepo):
76 class Repo(DulwichRepo):
77 """
77 """
78 A wrapper for dulwich Repo class.
78 A wrapper for dulwich Repo class.
79
79
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
81 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
82 once the repo object is destroyed.
83
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
85 to 0.12.0 +
86 """
86 """
87 def __del__(self):
87 def __del__(self):
88 if hasattr(self, 'object_store'):
88 if hasattr(self, 'object_store'):
89 self.close()
89 self.close()
90
90
91
91
92 class GitFactory(RepoFactory):
92 class GitFactory(RepoFactory):
93 repo_type = 'git'
93 repo_type = 'git'
94
94
95 def _create_repo(self, wire, create):
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
97 return Repo(repo_path)
98
98
99
99
100 class GitRemote(object):
100 class GitRemote(object):
101
101
102 def __init__(self, factory):
102 def __init__(self, factory):
103 self._factory = factory
103 self._factory = factory
104 self.peeled_ref_marker = '^{}'
104 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
105 self._bulk_methods = {
106 "author": self.commit_attribute,
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
109 "parents": self.commit_attribute,
110 "_commit": self.revision,
110 "_commit": self.revision,
111 }
111 }
112
112
113 def _wire_to_config(self, wire):
113 def _wire_to_config(self, wire):
114 if 'config' in wire:
114 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
116 return {}
117
117
118 def _assign_ref(self, wire, ref, commit_id):
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
120 repo[ref] = commit_id
121
121
122 def _remote_conf(self, config):
122 def _remote_conf(self, config):
123 params = [
123 params = [
124 '-c', 'core.askpass=""',
124 '-c', 'core.askpass=""',
125 ]
125 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
127 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
129 return params
130
130
131 @reraise_safe_exceptions
131 @reraise_safe_exceptions
132 def add_object(self, wire, content):
132 def add_object(self, wire, content):
133 repo = self._factory.repo(wire)
133 repo = self._factory.repo(wire)
134 blob = objects.Blob()
134 blob = objects.Blob()
135 blob.set_raw_string(content)
135 blob.set_raw_string(content)
136 repo.object_store.add_object(blob)
136 repo.object_store.add_object(blob)
137 return blob.id
137 return blob.id
138
138
139 @reraise_safe_exceptions
139 @reraise_safe_exceptions
140 def assert_correct_path(self, wire):
140 def assert_correct_path(self, wire):
141 path = wire.get('path')
141 path = wire.get('path')
142 try:
142 try:
143 self._factory.repo(wire)
143 self._factory.repo(wire)
144 except NotGitRepository as e:
144 except NotGitRepository as e:
145 tb = traceback.format_exc()
145 tb = traceback.format_exc()
146 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
146 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
147 return False
147 return False
148
148
149 return True
149 return True
150
150
151 @reraise_safe_exceptions
151 @reraise_safe_exceptions
152 def bare(self, wire):
152 def bare(self, wire):
153 repo = self._factory.repo(wire)
153 repo = self._factory.repo(wire)
154 return repo.bare
154 return repo.bare
155
155
156 @reraise_safe_exceptions
156 @reraise_safe_exceptions
157 def blob_as_pretty_string(self, wire, sha):
157 def blob_as_pretty_string(self, wire, sha):
158 repo = self._factory.repo(wire)
158 repo = self._factory.repo(wire)
159 return repo[sha].as_pretty_string()
159 return repo[sha].as_pretty_string()
160
160
161 @reraise_safe_exceptions
161 @reraise_safe_exceptions
162 def blob_raw_length(self, wire, sha):
162 def blob_raw_length(self, wire, sha):
163 repo = self._factory.repo(wire)
163 repo = self._factory.repo(wire)
164 blob = repo[sha]
164 blob = repo[sha]
165 return blob.raw_length()
165 return blob.raw_length()
166
166
167 def _parse_lfs_pointer(self, raw_content):
167 def _parse_lfs_pointer(self, raw_content):
168
168
169 spec_string = 'version https://git-lfs.github.com/spec'
169 spec_string = 'version https://git-lfs.github.com/spec'
170 if raw_content and raw_content.startswith(spec_string):
170 if raw_content and raw_content.startswith(spec_string):
171 pattern = re.compile(r"""
171 pattern = re.compile(r"""
172 (?:\n)?
172 (?:\n)?
173 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
173 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
174 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
174 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
175 ^size[ ](?P<oid_size>[0-9]+)\n
175 ^size[ ](?P<oid_size>[0-9]+)\n
176 (?:\n)?
176 (?:\n)?
177 """, re.VERBOSE | re.MULTILINE)
177 """, re.VERBOSE | re.MULTILINE)
178 match = pattern.match(raw_content)
178 match = pattern.match(raw_content)
179 if match:
179 if match:
180 return match.groupdict()
180 return match.groupdict()
181
181
182 return {}
182 return {}
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def is_large_file(self, wire, sha):
185 def is_large_file(self, wire, sha):
186 repo = self._factory.repo(wire)
186 repo = self._factory.repo(wire)
187 blob = repo[sha]
187 blob = repo[sha]
188 return self._parse_lfs_pointer(blob.as_raw_string())
188 return self._parse_lfs_pointer(blob.as_raw_string())
189
189
190 @reraise_safe_exceptions
190 @reraise_safe_exceptions
191 def in_largefiles_store(self, wire, oid):
191 def in_largefiles_store(self, wire, oid):
192 repo = self._factory.repo(wire)
192 repo = self._factory.repo(wire)
193 conf = self._wire_to_config(wire)
193 conf = self._wire_to_config(wire)
194
194
195 store_location = conf.get('vcs_git_lfs_store_location')
195 store_location = conf.get('vcs_git_lfs_store_location')
196 if store_location:
196 if store_location:
197 repo_name = repo.path
197 repo_name = repo.path
198 store = LFSOidStore(
198 store = LFSOidStore(
199 oid=oid, repo=repo_name, store_location=store_location)
199 oid=oid, repo=repo_name, store_location=store_location)
200 return store.has_oid()
200 return store.has_oid()
201
201
202 return False
202 return False
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def store_path(self, wire, oid):
205 def store_path(self, wire, oid):
206 repo = self._factory.repo(wire)
206 repo = self._factory.repo(wire)
207 conf = self._wire_to_config(wire)
207 conf = self._wire_to_config(wire)
208
208
209 store_location = conf.get('vcs_git_lfs_store_location')
209 store_location = conf.get('vcs_git_lfs_store_location')
210 if store_location:
210 if store_location:
211 repo_name = repo.path
211 repo_name = repo.path
212 store = LFSOidStore(
212 store = LFSOidStore(
213 oid=oid, repo=repo_name, store_location=store_location)
213 oid=oid, repo=repo_name, store_location=store_location)
214 return store.oid_path
214 return store.oid_path
215 raise ValueError('Unable to fetch oid with path {}'.format(oid))
215 raise ValueError('Unable to fetch oid with path {}'.format(oid))
216
216
217 @reraise_safe_exceptions
217 @reraise_safe_exceptions
218 def bulk_request(self, wire, rev, pre_load):
218 def bulk_request(self, wire, rev, pre_load):
219 result = {}
219 result = {}
220 for attr in pre_load:
220 for attr in pre_load:
221 try:
221 try:
222 method = self._bulk_methods[attr]
222 method = self._bulk_methods[attr]
223 args = [wire, rev]
223 args = [wire, rev]
224 if attr == "date":
224 if attr == "date":
225 args.extend(["commit_time", "commit_timezone"])
225 args.extend(["commit_time", "commit_timezone"])
226 elif attr in ["author", "message", "parents"]:
226 elif attr in ["author", "message", "parents"]:
227 args.append(attr)
227 args.append(attr)
228 result[attr] = method(*args)
228 result[attr] = method(*args)
229 except KeyError as e:
229 except KeyError as e:
230 raise exceptions.VcsException(e)(
230 raise exceptions.VcsException(e)(
231 "Unknown bulk attribute: %s" % attr)
231 "Unknown bulk attribute: %s" % attr)
232 return result
232 return result
233
233
234 def _build_opener(self, url):
234 def _build_opener(self, url):
235 handlers = []
235 handlers = []
236 url_obj = url_parser(url)
236 url_obj = url_parser(url)
237 _, authinfo = url_obj.authinfo()
237 _, authinfo = url_obj.authinfo()
238
238
239 if authinfo:
239 if authinfo:
240 # create a password manager
240 # create a password manager
241 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
241 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
242 passmgr.add_password(*authinfo)
242 passmgr.add_password(*authinfo)
243
243
244 handlers.extend((httpbasicauthhandler(passmgr),
244 handlers.extend((httpbasicauthhandler(passmgr),
245 httpdigestauthhandler(passmgr)))
245 httpdigestauthhandler(passmgr)))
246
246
247 return urllib2.build_opener(*handlers)
247 return urllib2.build_opener(*handlers)
248
248
249 @reraise_safe_exceptions
249 @reraise_safe_exceptions
250 def check_url(self, url, config):
250 def check_url(self, url, config):
251 url_obj = url_parser(url)
251 url_obj = url_parser(url)
252 test_uri, _ = url_obj.authinfo()
252 test_uri, _ = url_obj.authinfo()
253 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
253 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
254 url_obj.query = obfuscate_qs(url_obj.query)
254 url_obj.query = obfuscate_qs(url_obj.query)
255 cleaned_uri = str(url_obj)
255 cleaned_uri = str(url_obj)
256 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
256 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
257
257
258 if not test_uri.endswith('info/refs'):
258 if not test_uri.endswith('info/refs'):
259 test_uri = test_uri.rstrip('/') + '/info/refs'
259 test_uri = test_uri.rstrip('/') + '/info/refs'
260
260
261 o = self._build_opener(url)
261 o = self._build_opener(url)
262 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
262 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
263
263
264 q = {"service": 'git-upload-pack'}
264 q = {"service": 'git-upload-pack'}
265 qs = '?%s' % urllib.urlencode(q)
265 qs = '?%s' % urllib.urlencode(q)
266 cu = "%s%s" % (test_uri, qs)
266 cu = "%s%s" % (test_uri, qs)
267 req = urllib2.Request(cu, None, {})
267 req = urllib2.Request(cu, None, {})
268
268
269 try:
269 try:
270 log.debug("Trying to open URL %s", cleaned_uri)
270 log.debug("Trying to open URL %s", cleaned_uri)
271 resp = o.open(req)
271 resp = o.open(req)
272 if resp.code != 200:
272 if resp.code != 200:
273 raise exceptions.URLError()('Return Code is not 200')
273 raise exceptions.URLError()('Return Code is not 200')
274 except Exception as e:
274 except Exception as e:
275 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
275 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
276 # means it cannot be cloned
276 # means it cannot be cloned
277 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
277 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
278
278
279 # now detect if it's proper git repo
279 # now detect if it's proper git repo
280 gitdata = resp.read()
280 gitdata = resp.read()
281 if 'service=git-upload-pack' in gitdata:
281 if 'service=git-upload-pack' in gitdata:
282 pass
282 pass
283 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
283 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
284 # old style git can return some other format !
284 # old style git can return some other format !
285 pass
285 pass
286 else:
286 else:
287 raise exceptions.URLError()(
287 raise exceptions.URLError()(
288 "url [%s] does not look like an git" % (cleaned_uri,))
288 "url [%s] does not look like an git" % (cleaned_uri,))
289
289
290 return True
290 return True
291
291
292 @reraise_safe_exceptions
292 @reraise_safe_exceptions
293 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
293 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
294 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
294 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
295 remote_refs = self.pull(wire, url, apply_refs=False)
295 remote_refs = self.pull(wire, url, apply_refs=False)
296 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
297 if isinstance(valid_refs, list):
297 if isinstance(valid_refs, list):
298 valid_refs = tuple(valid_refs)
298 valid_refs = tuple(valid_refs)
299
299
300 for k in remote_refs:
300 for k in remote_refs:
301 # only parse heads/tags and skip so called deferred tags
301 # only parse heads/tags and skip so called deferred tags
302 if k.startswith(valid_refs) and not k.endswith(deferred):
302 if k.startswith(valid_refs) and not k.endswith(deferred):
303 repo[k] = remote_refs[k]
303 repo[k] = remote_refs[k]
304
304
305 if update_after_clone:
305 if update_after_clone:
306 # we want to checkout HEAD
306 # we want to checkout HEAD
307 repo["HEAD"] = remote_refs["HEAD"]
307 repo["HEAD"] = remote_refs["HEAD"]
308 index.build_index_from_tree(repo.path, repo.index_path(),
308 index.build_index_from_tree(repo.path, repo.index_path(),
309 repo.object_store, repo["HEAD"].tree)
309 repo.object_store, repo["HEAD"].tree)
310
310
311 # TODO: this is quite complex, check if that can be simplified
311 # TODO: this is quite complex, check if that can be simplified
312 @reraise_safe_exceptions
312 @reraise_safe_exceptions
313 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
313 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
314 repo = self._factory.repo(wire)
314 repo = self._factory.repo(wire)
315 object_store = repo.object_store
315 object_store = repo.object_store
316
316
317 # Create tree and populates it with blobs
317 # Create tree and populates it with blobs
318 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
318 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
319
319
320 for node in updated:
320 for node in updated:
321 # Compute subdirs if needed
321 # Compute subdirs if needed
322 dirpath, nodename = vcspath.split(node['path'])
322 dirpath, nodename = vcspath.split(node['path'])
323 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
323 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
324 parent = commit_tree
324 parent = commit_tree
325 ancestors = [('', parent)]
325 ancestors = [('', parent)]
326
326
327 # Tries to dig for the deepest existing tree
327 # Tries to dig for the deepest existing tree
328 while dirnames:
328 while dirnames:
329 curdir = dirnames.pop(0)
329 curdir = dirnames.pop(0)
330 try:
330 try:
331 dir_id = parent[curdir][1]
331 dir_id = parent[curdir][1]
332 except KeyError:
332 except KeyError:
333 # put curdir back into dirnames and stops
333 # put curdir back into dirnames and stops
334 dirnames.insert(0, curdir)
334 dirnames.insert(0, curdir)
335 break
335 break
336 else:
336 else:
337 # If found, updates parent
337 # If found, updates parent
338 parent = repo[dir_id]
338 parent = repo[dir_id]
339 ancestors.append((curdir, parent))
339 ancestors.append((curdir, parent))
340 # Now parent is deepest existing tree and we need to create
340 # Now parent is deepest existing tree and we need to create
341 # subtrees for dirnames (in reverse order)
341 # subtrees for dirnames (in reverse order)
342 # [this only applies for nodes from added]
342 # [this only applies for nodes from added]
343 new_trees = []
343 new_trees = []
344
344
345 blob = objects.Blob.from_string(node['content'])
345 blob = objects.Blob.from_string(node['content'])
346
346
347 if dirnames:
347 if dirnames:
348 # If there are trees which should be created we need to build
348 # If there are trees which should be created we need to build
349 # them now (in reverse order)
349 # them now (in reverse order)
350 reversed_dirnames = list(reversed(dirnames))
350 reversed_dirnames = list(reversed(dirnames))
351 curtree = objects.Tree()
351 curtree = objects.Tree()
352 curtree[node['node_path']] = node['mode'], blob.id
352 curtree[node['node_path']] = node['mode'], blob.id
353 new_trees.append(curtree)
353 new_trees.append(curtree)
354 for dirname in reversed_dirnames[:-1]:
354 for dirname in reversed_dirnames[:-1]:
355 newtree = objects.Tree()
355 newtree = objects.Tree()
356 newtree[dirname] = (DIR_STAT, curtree.id)
356 newtree[dirname] = (DIR_STAT, curtree.id)
357 new_trees.append(newtree)
357 new_trees.append(newtree)
358 curtree = newtree
358 curtree = newtree
359 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
359 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
360 else:
360 else:
361 parent.add(
361 parent.add(
362 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
362 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
363
363
364 new_trees.append(parent)
364 new_trees.append(parent)
365 # Update ancestors
365 # Update ancestors
366 reversed_ancestors = reversed(
366 reversed_ancestors = reversed(
367 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
367 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
368 for parent, tree, path in reversed_ancestors:
368 for parent, tree, path in reversed_ancestors:
369 parent[path] = (DIR_STAT, tree.id)
369 parent[path] = (DIR_STAT, tree.id)
370 object_store.add_object(tree)
370 object_store.add_object(tree)
371
371
372 object_store.add_object(blob)
372 object_store.add_object(blob)
373 for tree in new_trees:
373 for tree in new_trees:
374 object_store.add_object(tree)
374 object_store.add_object(tree)
375
375
376 for node_path in removed:
376 for node_path in removed:
377 paths = node_path.split('/')
377 paths = node_path.split('/')
378 tree = commit_tree
378 tree = commit_tree
379 trees = [tree]
379 trees = [tree]
380 # Traverse deep into the forest...
380 # Traverse deep into the forest...
381 for path in paths:
381 for path in paths:
382 try:
382 try:
383 obj = repo[tree[path][1]]
383 obj = repo[tree[path][1]]
384 if isinstance(obj, objects.Tree):
384 if isinstance(obj, objects.Tree):
385 trees.append(obj)
385 trees.append(obj)
386 tree = obj
386 tree = obj
387 except KeyError:
387 except KeyError:
388 break
388 break
389 # Cut down the blob and all rotten trees on the way back...
389 # Cut down the blob and all rotten trees on the way back...
390 for path, tree in reversed(zip(paths, trees)):
390 for path, tree in reversed(zip(paths, trees)):
391 del tree[path]
391 del tree[path]
392 if tree:
392 if tree:
393 # This tree still has elements - don't remove it or any
393 # This tree still has elements - don't remove it or any
394 # of it's parents
394 # of it's parents
395 break
395 break
396
396
397 object_store.add_object(commit_tree)
397 object_store.add_object(commit_tree)
398
398
399 # Create commit
399 # Create commit
400 commit = objects.Commit()
400 commit = objects.Commit()
401 commit.tree = commit_tree.id
401 commit.tree = commit_tree.id
402 for k, v in commit_data.iteritems():
402 for k, v in commit_data.iteritems():
403 setattr(commit, k, v)
403 setattr(commit, k, v)
404 object_store.add_object(commit)
404 object_store.add_object(commit)
405
405
406 ref = 'refs/heads/%s' % branch
406 ref = 'refs/heads/%s' % branch
407 repo.refs[ref] = commit.id
407 repo.refs[ref] = commit.id
408
408
409 return commit.id
409 return commit.id
410
410
411 @reraise_safe_exceptions
411 @reraise_safe_exceptions
412 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
412 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
413 if url != 'default' and '://' not in url:
413 if url != 'default' and '://' not in url:
414 client = LocalGitClient(url)
414 client = LocalGitClient(url)
415 else:
415 else:
416 url_obj = url_parser(url)
416 url_obj = url_parser(url)
417 o = self._build_opener(url)
417 o = self._build_opener(url)
418 url, _ = url_obj.authinfo()
418 url, _ = url_obj.authinfo()
419 client = HttpGitClient(base_url=url, opener=o)
419 client = HttpGitClient(base_url=url, opener=o)
420 repo = self._factory.repo(wire)
420 repo = self._factory.repo(wire)
421
421
422 determine_wants = repo.object_store.determine_wants_all
422 determine_wants = repo.object_store.determine_wants_all
423 if refs:
423 if refs:
424 def determine_wants_requested(references):
424 def determine_wants_requested(references):
425 return [references[r] for r in references if r in refs]
425 return [references[r] for r in references if r in refs]
426 determine_wants = determine_wants_requested
426 determine_wants = determine_wants_requested
427
427
428 try:
428 try:
429 remote_refs = client.fetch(
429 remote_refs = client.fetch(
430 path=url, target=repo, determine_wants=determine_wants)
430 path=url, target=repo, determine_wants=determine_wants)
431 except NotGitRepository as e:
431 except NotGitRepository as e:
432 log.warning(
432 log.warning(
433 'Trying to fetch from "%s" failed, not a Git repository.', url)
433 'Trying to fetch from "%s" failed, not a Git repository.', url)
434 # Exception can contain unicode which we convert
434 # Exception can contain unicode which we convert
435 raise exceptions.AbortException(e)(repr(e))
435 raise exceptions.AbortException(e)(repr(e))
436
436
437 # mikhail: client.fetch() returns all the remote refs, but fetches only
437 # mikhail: client.fetch() returns all the remote refs, but fetches only
438 # refs filtered by `determine_wants` function. We need to filter result
438 # refs filtered by `determine_wants` function. We need to filter result
439 # as well
439 # as well
440 if refs:
440 if refs:
441 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
441 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
442
442
443 if apply_refs:
443 if apply_refs:
444 # TODO: johbo: Needs proper test coverage with a git repository
444 # TODO: johbo: Needs proper test coverage with a git repository
445 # that contains a tag object, so that we would end up with
445 # that contains a tag object, so that we would end up with
446 # a peeled ref at this point.
446 # a peeled ref at this point.
447 for k in remote_refs:
447 for k in remote_refs:
448 if k.endswith(self.peeled_ref_marker):
448 if k.endswith(self.peeled_ref_marker):
449 log.debug("Skipping peeled reference %s", k)
449 log.debug("Skipping peeled reference %s", k)
450 continue
450 continue
451 repo[k] = remote_refs[k]
451 repo[k] = remote_refs[k]
452
452
453 if refs and not update_after:
453 if refs and not update_after:
454 # mikhail: explicitly set the head to the last ref.
454 # mikhail: explicitly set the head to the last ref.
455 repo['HEAD'] = remote_refs[refs[-1]]
455 repo['HEAD'] = remote_refs[refs[-1]]
456
456
457 if update_after:
457 if update_after:
458 # we want to checkout HEAD
458 # we want to checkout HEAD
459 repo["HEAD"] = remote_refs["HEAD"]
459 repo["HEAD"] = remote_refs["HEAD"]
460 index.build_index_from_tree(repo.path, repo.index_path(),
460 index.build_index_from_tree(repo.path, repo.index_path(),
461 repo.object_store, repo["HEAD"].tree)
461 repo.object_store, repo["HEAD"].tree)
462 return remote_refs
462 return remote_refs
463
463
464 @reraise_safe_exceptions
464 @reraise_safe_exceptions
465 def sync_fetch(self, wire, url, refs=None):
465 def sync_fetch(self, wire, url, refs=None):
466 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
467 if refs and not isinstance(refs, (list, tuple)):
467 if refs and not isinstance(refs, (list, tuple)):
468 refs = [refs]
468 refs = [refs]
469 config = self._wire_to_config(wire)
469 config = self._wire_to_config(wire)
470 # get all remote refs we'll use to fetch later
470 # get all remote refs we'll use to fetch later
471 output, __ = self.run_git_command(
471 output, __ = self.run_git_command(
472 wire, ['ls-remote', url], fail_on_stderr=False,
472 wire, ['ls-remote', url], fail_on_stderr=False,
473 _copts=self._remote_conf(config),
473 _copts=self._remote_conf(config),
474 extra_env={'GIT_TERMINAL_PROMPT': '0'})
474 extra_env={'GIT_TERMINAL_PROMPT': '0'})
475
475
476 remote_refs = collections.OrderedDict()
476 remote_refs = collections.OrderedDict()
477 fetch_refs = []
477 fetch_refs = []
478
478
479 for ref_line in output.splitlines():
479 for ref_line in output.splitlines():
480 sha, ref = ref_line.split('\t')
480 sha, ref = ref_line.split('\t')
481 sha = sha.strip()
481 sha = sha.strip()
482 if ref in remote_refs:
482 if ref in remote_refs:
483 # duplicate, skip
483 # duplicate, skip
484 continue
484 continue
485 if ref.endswith(self.peeled_ref_marker):
485 if ref.endswith(self.peeled_ref_marker):
486 log.debug("Skipping peeled reference %s", ref)
486 log.debug("Skipping peeled reference %s", ref)
487 continue
487 continue
488 # don't sync HEAD
488 # don't sync HEAD
489 if ref in ['HEAD']:
489 if ref in ['HEAD']:
490 continue
490 continue
491
491
492 remote_refs[ref] = sha
492 remote_refs[ref] = sha
493
493
494 if refs and sha in refs:
494 if refs and sha in refs:
495 # we filter fetch using our specified refs
495 # we filter fetch using our specified refs
496 fetch_refs.append('{}:{}'.format(ref, ref))
496 fetch_refs.append('{}:{}'.format(ref, ref))
497 elif not refs:
497 elif not refs:
498 fetch_refs.append('{}:{}'.format(ref, ref))
498 fetch_refs.append('{}:{}'.format(ref, ref))
499 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
499 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
500 if fetch_refs:
500 if fetch_refs:
501 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
501 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
502 fetch_refs_chunks = list(chunk)
502 fetch_refs_chunks = list(chunk)
503 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
503 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
504 _out, _err = self.run_git_command(
504 _out, _err = self.run_git_command(
505 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
505 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
506 fail_on_stderr=False,
506 fail_on_stderr=False,
507 _copts=self._remote_conf(config),
507 _copts=self._remote_conf(config),
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
509
509
510 return remote_refs
510 return remote_refs
511
511
512 @reraise_safe_exceptions
512 @reraise_safe_exceptions
513 def sync_push(self, wire, url, refs=None):
513 def sync_push(self, wire, url, refs=None):
514 if not self.check_url(url, wire):
514 if not self.check_url(url, wire):
515 return
515 return
516 config = self._wire_to_config(wire)
516 config = self._wire_to_config(wire)
517 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
518 self.run_git_command(
518 self.run_git_command(
519 wire, ['push', url, '--mirror'], fail_on_stderr=False,
519 wire, ['push', url, '--mirror'], fail_on_stderr=False,
520 _copts=self._remote_conf(config),
520 _copts=self._remote_conf(config),
521 extra_env={'GIT_TERMINAL_PROMPT': '0'})
521 extra_env={'GIT_TERMINAL_PROMPT': '0'})
522
522
523 @reraise_safe_exceptions
523 @reraise_safe_exceptions
524 def get_remote_refs(self, wire, url):
524 def get_remote_refs(self, wire, url):
525 repo = Repo(url)
525 repo = Repo(url)
526 return repo.get_refs()
526 return repo.get_refs()
527
527
528 @reraise_safe_exceptions
528 @reraise_safe_exceptions
529 def get_description(self, wire):
529 def get_description(self, wire):
530 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
531 return repo.get_description()
531 return repo.get_description()
532
532
533 @reraise_safe_exceptions
533 @reraise_safe_exceptions
534 def get_missing_revs(self, wire, rev1, rev2, path2):
534 def get_missing_revs(self, wire, rev1, rev2, path2):
535 repo = self._factory.repo(wire)
535 repo = self._factory.repo(wire)
536 LocalGitClient(thin_packs=False).fetch(path2, repo)
536 LocalGitClient(thin_packs=False).fetch(path2, repo)
537
537
538 wire_remote = wire.copy()
538 wire_remote = wire.copy()
539 wire_remote['path'] = path2
539 wire_remote['path'] = path2
540 repo_remote = self._factory.repo(wire_remote)
540 repo_remote = self._factory.repo(wire_remote)
541 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
541 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
542
542
543 revs = [
543 revs = [
544 x.commit.id
544 x.commit.id
545 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
545 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
546 return revs
546 return revs
547
547
548 @reraise_safe_exceptions
548 @reraise_safe_exceptions
549 def get_object(self, wire, sha):
549 def get_object(self, wire, sha):
550 repo = self._factory.repo(wire)
550 repo = self._factory.repo(wire)
551 obj = repo.get_object(sha)
551 obj = repo.get_object(sha)
552 commit_id = obj.id
552 commit_id = obj.id
553
553
554 if isinstance(obj, Tag):
554 if isinstance(obj, Tag):
555 commit_id = obj.object[1]
555 commit_id = obj.object[1]
556
556
557 return {
557 return {
558 'id': obj.id,
558 'id': obj.id,
559 'type': obj.type_name,
559 'type': obj.type_name,
560 'commit_id': commit_id
560 'commit_id': commit_id
561 }
561 }
562
562
563 @reraise_safe_exceptions
563 @reraise_safe_exceptions
564 def get_object_attrs(self, wire, sha, *attrs):
564 def get_object_attrs(self, wire, sha, *attrs):
565 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
566 obj = repo.get_object(sha)
566 obj = repo.get_object(sha)
567 return list(getattr(obj, a) for a in attrs)
567 return list(getattr(obj, a) for a in attrs)
568
568
569 @reraise_safe_exceptions
569 @reraise_safe_exceptions
570 def get_refs(self, wire):
570 def get_refs(self, wire):
571 repo = self._factory.repo(wire)
571 repo = self._factory.repo(wire)
572 result = {}
572 result = {}
573 for ref, sha in repo.refs.as_dict().items():
573 for ref, sha in repo.refs.as_dict().items():
574 peeled_sha = repo.get_peeled(ref)
574 peeled_sha = repo.get_peeled(ref)
575 result[ref] = peeled_sha
575 result[ref] = peeled_sha
576 return result
576 return result
577
577
578 @reraise_safe_exceptions
578 @reraise_safe_exceptions
579 def get_refs_path(self, wire):
579 def get_refs_path(self, wire):
580 repo = self._factory.repo(wire)
580 repo = self._factory.repo(wire)
581 return repo.refs.path
581 return repo.refs.path
582
582
583 @reraise_safe_exceptions
583 @reraise_safe_exceptions
584 def head(self, wire, show_exc=True):
584 def head(self, wire, show_exc=True):
585 repo = self._factory.repo(wire)
585 repo = self._factory.repo(wire)
586 try:
586 try:
587 return repo.head()
587 return repo.head()
588 except Exception:
588 except Exception:
589 if show_exc:
589 if show_exc:
590 raise
590 raise
591
591
592 @reraise_safe_exceptions
592 @reraise_safe_exceptions
593 def init(self, wire):
593 def init(self, wire):
594 repo_path = str_to_dulwich(wire['path'])
594 repo_path = str_to_dulwich(wire['path'])
595 self.repo = Repo.init(repo_path)
595 self.repo = Repo.init(repo_path)
596
596
597 @reraise_safe_exceptions
597 @reraise_safe_exceptions
598 def init_bare(self, wire):
598 def init_bare(self, wire):
599 repo_path = str_to_dulwich(wire['path'])
599 repo_path = str_to_dulwich(wire['path'])
600 self.repo = Repo.init_bare(repo_path)
600 self.repo = Repo.init_bare(repo_path)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def revision(self, wire, rev):
603 def revision(self, wire, rev):
604 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
605 obj = repo[rev]
605 obj = repo[rev]
606 obj_data = {
606 obj_data = {
607 'id': obj.id,
607 'id': obj.id,
608 }
608 }
609 try:
609 try:
610 obj_data['tree'] = obj.tree
610 obj_data['tree'] = obj.tree
611 except AttributeError:
611 except AttributeError:
612 pass
612 pass
613 return obj_data
613 return obj_data
614
614
615 @reraise_safe_exceptions
615 @reraise_safe_exceptions
616 def commit_attribute(self, wire, rev, attr):
616 def commit_attribute(self, wire, rev, attr):
617 repo = self._factory.repo(wire)
617 repo = self._factory.repo(wire)
618 obj = repo[rev]
618 obj = repo[rev]
619 return getattr(obj, attr)
619 return getattr(obj, attr)
620
620
621 @reraise_safe_exceptions
621 @reraise_safe_exceptions
622 def set_refs(self, wire, key, value):
622 def set_refs(self, wire, key, value):
623 repo = self._factory.repo(wire)
623 repo = self._factory.repo(wire)
624 repo.refs[key] = value
624 repo.refs[key] = value
625
625
626 @reraise_safe_exceptions
626 @reraise_safe_exceptions
627 def remove_ref(self, wire, key):
627 def remove_ref(self, wire, key):
628 repo = self._factory.repo(wire)
628 repo = self._factory.repo(wire)
629 del repo.refs[key]
629 del repo.refs[key]
630
630
631 @reraise_safe_exceptions
631 @reraise_safe_exceptions
632 def tree_changes(self, wire, source_id, target_id):
632 def tree_changes(self, wire, source_id, target_id):
633 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
634 source = repo[source_id].tree if source_id else None
634 source = repo[source_id].tree if source_id else None
635 target = repo[target_id].tree
635 target = repo[target_id].tree
636 result = repo.object_store.tree_changes(source, target)
636 result = repo.object_store.tree_changes(source, target)
637 return list(result)
637 return list(result)
638
638
639 @reraise_safe_exceptions
639 @reraise_safe_exceptions
640 def tree_items(self, wire, tree_id):
640 def tree_items(self, wire, tree_id):
641 repo = self._factory.repo(wire)
641 repo = self._factory.repo(wire)
642 tree = repo[tree_id]
642 tree = repo[tree_id]
643
643
644 result = []
644 result = []
645 for item in tree.iteritems():
645 for item in tree.iteritems():
646 item_sha = item.sha
646 item_sha = item.sha
647 item_mode = item.mode
647 item_mode = item.mode
648
648
649 if FILE_MODE(item_mode) == GIT_LINK:
649 if FILE_MODE(item_mode) == GIT_LINK:
650 item_type = "link"
650 item_type = "link"
651 else:
651 else:
652 item_type = repo[item_sha].type_name
652 item_type = repo[item_sha].type_name
653
653
654 result.append((item.path, item_mode, item_sha, item_type))
654 result.append((item.path, item_mode, item_sha, item_type))
655 return result
655 return result
656
656
657 @reraise_safe_exceptions
657 @reraise_safe_exceptions
658 def update_server_info(self, wire):
658 def update_server_info(self, wire):
659 repo = self._factory.repo(wire)
659 repo = self._factory.repo(wire)
660 update_server_info(repo)
660 update_server_info(repo)
661
661
662 @reraise_safe_exceptions
662 @reraise_safe_exceptions
663 def discover_git_version(self):
663 def discover_git_version(self):
664 stdout, _ = self.run_git_command(
664 stdout, _ = self.run_git_command(
665 {}, ['--version'], _bare=True, _safe=True)
665 {}, ['--version'], _bare=True, _safe=True)
666 prefix = 'git version'
666 prefix = 'git version'
667 if stdout.startswith(prefix):
667 if stdout.startswith(prefix):
668 stdout = stdout[len(prefix):]
668 stdout = stdout[len(prefix):]
669 return stdout.strip()
669 return stdout.strip()
670
670
671 @reraise_safe_exceptions
671 @reraise_safe_exceptions
672 def run_git_command(self, wire, cmd, **opts):
672 def run_git_command(self, wire, cmd, **opts):
673 path = wire.get('path', None)
673 path = wire.get('path', None)
674
674
675 if path and os.path.isdir(path):
675 if path and os.path.isdir(path):
676 opts['cwd'] = path
676 opts['cwd'] = path
677
677
678 if '_bare' in opts:
678 if '_bare' in opts:
679 _copts = []
679 _copts = []
680 del opts['_bare']
680 del opts['_bare']
681 else:
681 else:
682 _copts = ['-c', 'core.quotepath=false', ]
682 _copts = ['-c', 'core.quotepath=false', ]
683 safe_call = False
683 safe_call = False
684 if '_safe' in opts:
684 if '_safe' in opts:
685 # no exc on failure
685 # no exc on failure
686 del opts['_safe']
686 del opts['_safe']
687 safe_call = True
687 safe_call = True
688
688
689 if '_copts' in opts:
689 if '_copts' in opts:
690 _copts.extend(opts['_copts'] or [])
690 _copts.extend(opts['_copts'] or [])
691 del opts['_copts']
691 del opts['_copts']
692
692
693 gitenv = os.environ.copy()
693 gitenv = os.environ.copy()
694 gitenv.update(opts.pop('extra_env', {}))
694 gitenv.update(opts.pop('extra_env', {}))
695 # need to clean fix GIT_DIR !
695 # need to clean fix GIT_DIR !
696 if 'GIT_DIR' in gitenv:
696 if 'GIT_DIR' in gitenv:
697 del gitenv['GIT_DIR']
697 del gitenv['GIT_DIR']
698 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
698 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
699 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
699 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
700
700
701 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
701 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
702 _opts = {'env': gitenv, 'shell': False}
702 _opts = {'env': gitenv, 'shell': False}
703
703
704 try:
704 try:
705 _opts.update(opts)
705 _opts.update(opts)
706 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
706 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
707
707
708 return ''.join(p), ''.join(p.error)
708 return ''.join(p), ''.join(p.error)
709 except (EnvironmentError, OSError) as err:
709 except (EnvironmentError, OSError) as err:
710 cmd = ' '.join(cmd) # human friendly CMD
710 cmd = ' '.join(cmd) # human friendly CMD
711 tb_err = ("Couldn't run git command (%s).\n"
711 tb_err = ("Couldn't run git command (%s).\n"
712 "Original error was:%s\n"
712 "Original error was:%s\n"
713 "Call options:%s\n"
713 "Call options:%s\n"
714 % (cmd, err, _opts))
714 % (cmd, err, _opts))
715 log.exception(tb_err)
715 log.exception(tb_err)
716 if safe_call:
716 if safe_call:
717 return '', err
717 return '', err
718 else:
718 else:
719 raise exceptions.VcsException()(tb_err)
719 raise exceptions.VcsException()(tb_err)
720
720
721 @reraise_safe_exceptions
721 @reraise_safe_exceptions
722 def install_hooks(self, wire, force=False):
722 def install_hooks(self, wire, force=False):
723 from vcsserver.hook_utils import install_git_hooks
723 from vcsserver.hook_utils import install_git_hooks
724 repo = self._factory.repo(wire)
724 repo = self._factory.repo(wire)
725 return install_git_hooks(repo.path, repo.bare, force_create=force)
725 return install_git_hooks(repo.path, repo.bare, force_create=force)
726
726
727 @reraise_safe_exceptions
728 def get_hooks_info(self, wire):
729 from vcsserver.hook_utils import (
730 get_git_pre_hook_version, get_git_post_hook_version)
731 repo = self._factory.repo(wire)
732 return {
733 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
734 'post_version': get_git_post_hook_version(repo.path, repo.bare),
735 }
736
727
737
728 def str_to_dulwich(value):
738 def str_to_dulwich(value):
729 """
739 """
730 Dulwich 0.10.1a requires `unicode` objects to be passed in.
740 Dulwich 0.10.1a requires `unicode` objects to be passed in.
731 """
741 """
732 return value.decode(settings.WIRE_ENCODING)
742 return value.decode(settings.WIRE_ENCODING)
@@ -1,795 +1,803 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 import vcsserver
30 from vcsserver import exceptions
31 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
33 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
38 RepoLookupError, InterventionRequired, RequirementError)
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 def make_ui_from_config(repo_config):
43 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
44 baseui = ui.ui()
44
45
45 # clean the baseui object
46 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
47 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49 baseui._tcfg = hgconfig.config()
49
50
50 for section, option, value in repo_config:
51 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
52 baseui.setconfig(section, option, value)
52
53
53 # make our hgweb quiet so it doesn't print output
54 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
55 baseui.setconfig('ui', 'quiet', 'true')
55
56
56 baseui.setconfig('ui', 'paginate', 'never')
57 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
59 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
60 baseui.setconfig('worker', 'numcpus', 1)
60
61
61 # If there is no config for the largefiles extension, we explicitly disable
62 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
65 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
67 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
68 baseui.setconfig('extensions', 'largefiles', '!')
68
69
69 return baseui
70 return baseui
70
71
71
72
72 def reraise_safe_exceptions(func):
73 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
74 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
75 def wrapper(*args, **kwargs):
75 try:
76 try:
76 return func(*args, **kwargs)
77 return func(*args, **kwargs)
77 except (Abort, InterventionRequired) as e:
78 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException(e))
79 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError as e:
80 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException(e))
81 raise_from_original(exceptions.LookupException(e))
81 except RequirementError as e:
82 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException(e))
83 raise_from_original(exceptions.RequirementException(e))
83 except RepoError as e:
84 except RepoError as e:
84 raise_from_original(exceptions.VcsException(e))
85 raise_from_original(exceptions.VcsException(e))
85 except LookupError as e:
86 except LookupError as e:
86 raise_from_original(exceptions.LookupException(e))
87 raise_from_original(exceptions.LookupException(e))
87 except Exception as e:
88 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
89 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
90 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException(e))
91 raise_from_original(exceptions.UnhandledException(e))
91
92
92 raise
93 raise
93 return wrapper
94 return wrapper
94
95
95
96
96 class MercurialFactory(RepoFactory):
97 class MercurialFactory(RepoFactory):
97 repo_type = 'hg'
98 repo_type = 'hg'
98
99
99 def _create_config(self, config, hooks=True):
100 def _create_config(self, config, hooks=True):
100 if not hooks:
101 if not hooks:
101 hooks_to_clean = frozenset((
102 hooks_to_clean = frozenset((
102 'changegroup.repo_size', 'preoutgoing.pre_pull',
103 'changegroup.repo_size', 'preoutgoing.pre_pull',
103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
104 'outgoing.pull_logger', 'prechangegroup.pre_push'))
104 new_config = []
105 new_config = []
105 for section, option, value in config:
106 for section, option, value in config:
106 if section == 'hooks' and option in hooks_to_clean:
107 if section == 'hooks' and option in hooks_to_clean:
107 continue
108 continue
108 new_config.append((section, option, value))
109 new_config.append((section, option, value))
109 config = new_config
110 config = new_config
110
111
111 baseui = make_ui_from_config(config)
112 baseui = make_ui_from_config(config)
112 return baseui
113 return baseui
113
114
114 def _create_repo(self, wire, create):
115 def _create_repo(self, wire, create):
115 baseui = self._create_config(wire["config"])
116 baseui = self._create_config(wire["config"])
116 return localrepository(baseui, wire["path"], create)
117 return localrepository(baseui, wire["path"], create)
117
118
118
119
119 class HgRemote(object):
120 class HgRemote(object):
120
121
121 def __init__(self, factory):
122 def __init__(self, factory):
122 self._factory = factory
123 self._factory = factory
123
124
124 self._bulk_methods = {
125 self._bulk_methods = {
125 "affected_files": self.ctx_files,
126 "affected_files": self.ctx_files,
126 "author": self.ctx_user,
127 "author": self.ctx_user,
127 "branch": self.ctx_branch,
128 "branch": self.ctx_branch,
128 "children": self.ctx_children,
129 "children": self.ctx_children,
129 "date": self.ctx_date,
130 "date": self.ctx_date,
130 "message": self.ctx_description,
131 "message": self.ctx_description,
131 "parents": self.ctx_parents,
132 "parents": self.ctx_parents,
132 "status": self.ctx_status,
133 "status": self.ctx_status,
133 "obsolete": self.ctx_obsolete,
134 "obsolete": self.ctx_obsolete,
134 "phase": self.ctx_phase,
135 "phase": self.ctx_phase,
135 "hidden": self.ctx_hidden,
136 "hidden": self.ctx_hidden,
136 "_file_paths": self.ctx_list,
137 "_file_paths": self.ctx_list,
137 }
138 }
138
139
139 @reraise_safe_exceptions
140 @reraise_safe_exceptions
140 def discover_hg_version(self):
141 def discover_hg_version(self):
141 from mercurial import util
142 from mercurial import util
142 return util.version()
143 return util.version()
143
144
144 @reraise_safe_exceptions
145 @reraise_safe_exceptions
145 def archive_repo(self, archive_path, mtime, file_info, kind):
146 def archive_repo(self, archive_path, mtime, file_info, kind):
146 if kind == "tgz":
147 if kind == "tgz":
147 archiver = archival.tarit(archive_path, mtime, "gz")
148 archiver = archival.tarit(archive_path, mtime, "gz")
148 elif kind == "tbz2":
149 elif kind == "tbz2":
149 archiver = archival.tarit(archive_path, mtime, "bz2")
150 archiver = archival.tarit(archive_path, mtime, "bz2")
150 elif kind == 'zip':
151 elif kind == 'zip':
151 archiver = archival.zipit(archive_path, mtime)
152 archiver = archival.zipit(archive_path, mtime)
152 else:
153 else:
153 raise exceptions.ArchiveException()(
154 raise exceptions.ArchiveException()(
154 'Remote does not support: "%s".' % kind)
155 'Remote does not support: "%s".' % kind)
155
156
156 for f_path, f_mode, f_is_link, f_content in file_info:
157 for f_path, f_mode, f_is_link, f_content in file_info:
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 archiver.done()
159 archiver.done()
159
160
160 @reraise_safe_exceptions
161 @reraise_safe_exceptions
161 def bookmarks(self, wire):
162 def bookmarks(self, wire):
162 repo = self._factory.repo(wire)
163 repo = self._factory.repo(wire)
163 return dict(repo._bookmarks)
164 return dict(repo._bookmarks)
164
165
165 @reraise_safe_exceptions
166 @reraise_safe_exceptions
166 def branches(self, wire, normal, closed):
167 def branches(self, wire, normal, closed):
167 repo = self._factory.repo(wire)
168 repo = self._factory.repo(wire)
168 iter_branches = repo.branchmap().iterbranches()
169 iter_branches = repo.branchmap().iterbranches()
169 bt = {}
170 bt = {}
170 for branch_name, _heads, tip, is_closed in iter_branches:
171 for branch_name, _heads, tip, is_closed in iter_branches:
171 if normal and not is_closed:
172 if normal and not is_closed:
172 bt[branch_name] = tip
173 bt[branch_name] = tip
173 if closed and is_closed:
174 if closed and is_closed:
174 bt[branch_name] = tip
175 bt[branch_name] = tip
175
176
176 return bt
177 return bt
177
178
178 @reraise_safe_exceptions
179 @reraise_safe_exceptions
179 def bulk_request(self, wire, rev, pre_load):
180 def bulk_request(self, wire, rev, pre_load):
180 result = {}
181 result = {}
181 for attr in pre_load:
182 for attr in pre_load:
182 try:
183 try:
183 method = self._bulk_methods[attr]
184 method = self._bulk_methods[attr]
184 result[attr] = method(wire, rev)
185 result[attr] = method(wire, rev)
185 except KeyError as e:
186 except KeyError as e:
186 raise exceptions.VcsException(e)(
187 raise exceptions.VcsException(e)(
187 'Unknown bulk attribute: "%s"' % attr)
188 'Unknown bulk attribute: "%s"' % attr)
188 return result
189 return result
189
190
190 @reraise_safe_exceptions
191 @reraise_safe_exceptions
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
194 clone(baseui, source, dest, noupdate=not update_after_clone)
194
195
195 @reraise_safe_exceptions
196 @reraise_safe_exceptions
196 def commitctx(
197 def commitctx(
197 self, wire, message, parents, commit_time, commit_timezone,
198 self, wire, message, parents, commit_time, commit_timezone,
198 user, files, extra, removed, updated):
199 user, files, extra, removed, updated):
199
200
200 def _filectxfn(_repo, memctx, path):
201 def _filectxfn(_repo, memctx, path):
201 """
202 """
202 Marks given path as added/changed/removed in a given _repo. This is
203 Marks given path as added/changed/removed in a given _repo. This is
203 for internal mercurial commit function.
204 for internal mercurial commit function.
204 """
205 """
205
206
206 # check if this path is removed
207 # check if this path is removed
207 if path in removed:
208 if path in removed:
208 # returning None is a way to mark node for removal
209 # returning None is a way to mark node for removal
209 return None
210 return None
210
211
211 # check if this path is added
212 # check if this path is added
212 for node in updated:
213 for node in updated:
213 if node['path'] == path:
214 if node['path'] == path:
214 return memfilectx(
215 return memfilectx(
215 _repo,
216 _repo,
216 changectx=memctx,
217 changectx=memctx,
217 path=node['path'],
218 path=node['path'],
218 data=node['content'],
219 data=node['content'],
219 islink=False,
220 islink=False,
220 isexec=bool(node['mode'] & stat.S_IXUSR),
221 isexec=bool(node['mode'] & stat.S_IXUSR),
221 copied=False)
222 copied=False)
222
223
223 raise exceptions.AbortException()(
224 raise exceptions.AbortException()(
224 "Given path haven't been marked as added, "
225 "Given path haven't been marked as added, "
225 "changed or removed (%s)" % path)
226 "changed or removed (%s)" % path)
226
227
227 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
228
229
229 commit_ctx = memctx(
230 commit_ctx = memctx(
230 repo=repo,
231 repo=repo,
231 parents=parents,
232 parents=parents,
232 text=message,
233 text=message,
233 files=files,
234 files=files,
234 filectxfn=_filectxfn,
235 filectxfn=_filectxfn,
235 user=user,
236 user=user,
236 date=(commit_time, commit_timezone),
237 date=(commit_time, commit_timezone),
237 extra=extra)
238 extra=extra)
238
239
239 n = repo.commitctx(commit_ctx)
240 n = repo.commitctx(commit_ctx)
240 new_id = hex(n)
241 new_id = hex(n)
241
242
242 return new_id
243 return new_id
243
244
244 @reraise_safe_exceptions
245 @reraise_safe_exceptions
245 def ctx_branch(self, wire, revision):
246 def ctx_branch(self, wire, revision):
246 repo = self._factory.repo(wire)
247 repo = self._factory.repo(wire)
247 ctx = repo[revision]
248 ctx = repo[revision]
248 return ctx.branch()
249 return ctx.branch()
249
250
250 @reraise_safe_exceptions
251 @reraise_safe_exceptions
251 def ctx_children(self, wire, revision):
252 def ctx_children(self, wire, revision):
252 repo = self._factory.repo(wire)
253 repo = self._factory.repo(wire)
253 ctx = repo[revision]
254 ctx = repo[revision]
254 return [child.rev() for child in ctx.children()]
255 return [child.rev() for child in ctx.children()]
255
256
256 @reraise_safe_exceptions
257 @reraise_safe_exceptions
257 def ctx_date(self, wire, revision):
258 def ctx_date(self, wire, revision):
258 repo = self._factory.repo(wire)
259 repo = self._factory.repo(wire)
259 ctx = repo[revision]
260 ctx = repo[revision]
260 return ctx.date()
261 return ctx.date()
261
262
262 @reraise_safe_exceptions
263 @reraise_safe_exceptions
263 def ctx_description(self, wire, revision):
264 def ctx_description(self, wire, revision):
264 repo = self._factory.repo(wire)
265 repo = self._factory.repo(wire)
265 ctx = repo[revision]
266 ctx = repo[revision]
266 return ctx.description()
267 return ctx.description()
267
268
268 @reraise_safe_exceptions
269 @reraise_safe_exceptions
269 def ctx_diff(
270 def ctx_diff(
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
272 ctx = repo[revision]
273 ctx = repo[revision]
273 result = ctx.diff(
274 result = ctx.diff(
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 return list(result)
276 return list(result)
276
277
277 @reraise_safe_exceptions
278 @reraise_safe_exceptions
278 def ctx_files(self, wire, revision):
279 def ctx_files(self, wire, revision):
279 repo = self._factory.repo(wire)
280 repo = self._factory.repo(wire)
280 ctx = repo[revision]
281 ctx = repo[revision]
281 return ctx.files()
282 return ctx.files()
282
283
283 @reraise_safe_exceptions
284 @reraise_safe_exceptions
284 def ctx_list(self, path, revision):
285 def ctx_list(self, path, revision):
285 repo = self._factory.repo(path)
286 repo = self._factory.repo(path)
286 ctx = repo[revision]
287 ctx = repo[revision]
287 return list(ctx)
288 return list(ctx)
288
289
289 @reraise_safe_exceptions
290 @reraise_safe_exceptions
290 def ctx_parents(self, wire, revision):
291 def ctx_parents(self, wire, revision):
291 repo = self._factory.repo(wire)
292 repo = self._factory.repo(wire)
292 ctx = repo[revision]
293 ctx = repo[revision]
293 return [parent.rev() for parent in ctx.parents()]
294 return [parent.rev() for parent in ctx.parents()]
294
295
295 @reraise_safe_exceptions
296 @reraise_safe_exceptions
296 def ctx_phase(self, wire, revision):
297 def ctx_phase(self, wire, revision):
297 repo = self._factory.repo(wire)
298 repo = self._factory.repo(wire)
298 ctx = repo[revision]
299 ctx = repo[revision]
299 # public=0, draft=1, secret=3
300 # public=0, draft=1, secret=3
300 return ctx.phase()
301 return ctx.phase()
301
302
302 @reraise_safe_exceptions
303 @reraise_safe_exceptions
303 def ctx_obsolete(self, wire, revision):
304 def ctx_obsolete(self, wire, revision):
304 repo = self._factory.repo(wire)
305 repo = self._factory.repo(wire)
305 ctx = repo[revision]
306 ctx = repo[revision]
306 return ctx.obsolete()
307 return ctx.obsolete()
307
308
308 @reraise_safe_exceptions
309 @reraise_safe_exceptions
309 def ctx_hidden(self, wire, revision):
310 def ctx_hidden(self, wire, revision):
310 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
311 ctx = repo[revision]
312 ctx = repo[revision]
312 return ctx.hidden()
313 return ctx.hidden()
313
314
314 @reraise_safe_exceptions
315 @reraise_safe_exceptions
315 def ctx_substate(self, wire, revision):
316 def ctx_substate(self, wire, revision):
316 repo = self._factory.repo(wire)
317 repo = self._factory.repo(wire)
317 ctx = repo[revision]
318 ctx = repo[revision]
318 return ctx.substate
319 return ctx.substate
319
320
320 @reraise_safe_exceptions
321 @reraise_safe_exceptions
321 def ctx_status(self, wire, revision):
322 def ctx_status(self, wire, revision):
322 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
323 ctx = repo[revision]
324 ctx = repo[revision]
324 status = repo[ctx.p1().node()].status(other=ctx.node())
325 status = repo[ctx.p1().node()].status(other=ctx.node())
325 # object of status (odd, custom named tuple in mercurial) is not
326 # object of status (odd, custom named tuple in mercurial) is not
326 # correctly serializable, we make it a list, as the underling
327 # correctly serializable, we make it a list, as the underling
327 # API expects this to be a list
328 # API expects this to be a list
328 return list(status)
329 return list(status)
329
330
330 @reraise_safe_exceptions
331 @reraise_safe_exceptions
331 def ctx_user(self, wire, revision):
332 def ctx_user(self, wire, revision):
332 repo = self._factory.repo(wire)
333 repo = self._factory.repo(wire)
333 ctx = repo[revision]
334 ctx = repo[revision]
334 return ctx.user()
335 return ctx.user()
335
336
336 @reraise_safe_exceptions
337 @reraise_safe_exceptions
337 def check_url(self, url, config):
338 def check_url(self, url, config):
338 _proto = None
339 _proto = None
339 if '+' in url[:url.find('://')]:
340 if '+' in url[:url.find('://')]:
340 _proto = url[0:url.find('+')]
341 _proto = url[0:url.find('+')]
341 url = url[url.find('+') + 1:]
342 url = url[url.find('+') + 1:]
342 handlers = []
343 handlers = []
343 url_obj = url_parser(url)
344 url_obj = url_parser(url)
344 test_uri, authinfo = url_obj.authinfo()
345 test_uri, authinfo = url_obj.authinfo()
345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
346 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
346 url_obj.query = obfuscate_qs(url_obj.query)
347 url_obj.query = obfuscate_qs(url_obj.query)
347
348
348 cleaned_uri = str(url_obj)
349 cleaned_uri = str(url_obj)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350
351
351 if authinfo:
352 if authinfo:
352 # create a password manager
353 # create a password manager
353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
354 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
354 passmgr.add_password(*authinfo)
355 passmgr.add_password(*authinfo)
355
356
356 handlers.extend((httpbasicauthhandler(passmgr),
357 handlers.extend((httpbasicauthhandler(passmgr),
357 httpdigestauthhandler(passmgr)))
358 httpdigestauthhandler(passmgr)))
358
359
359 o = urllib2.build_opener(*handlers)
360 o = urllib2.build_opener(*handlers)
360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
361 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
361 ('Accept', 'application/mercurial-0.1')]
362 ('Accept', 'application/mercurial-0.1')]
362
363
363 q = {"cmd": 'between'}
364 q = {"cmd": 'between'}
364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
365 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
365 qs = '?%s' % urllib.urlencode(q)
366 qs = '?%s' % urllib.urlencode(q)
366 cu = "%s%s" % (test_uri, qs)
367 cu = "%s%s" % (test_uri, qs)
367 req = urllib2.Request(cu, None, {})
368 req = urllib2.Request(cu, None, {})
368
369
369 try:
370 try:
370 log.debug("Trying to open URL %s", cleaned_uri)
371 log.debug("Trying to open URL %s", cleaned_uri)
371 resp = o.open(req)
372 resp = o.open(req)
372 if resp.code != 200:
373 if resp.code != 200:
373 raise exceptions.URLError()('Return Code is not 200')
374 raise exceptions.URLError()('Return Code is not 200')
374 except Exception as e:
375 except Exception as e:
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
376 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
376 # means it cannot be cloned
377 # means it cannot be cloned
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
378 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
378
379
379 # now check if it's a proper hg repo, but don't do it for svn
380 # now check if it's a proper hg repo, but don't do it for svn
380 try:
381 try:
381 if _proto == 'svn':
382 if _proto == 'svn':
382 pass
383 pass
383 else:
384 else:
384 # check for pure hg repos
385 # check for pure hg repos
385 log.debug(
386 log.debug(
386 "Verifying if URL is a Mercurial repository: %s",
387 "Verifying if URL is a Mercurial repository: %s",
387 cleaned_uri)
388 cleaned_uri)
388 ui = make_ui_from_config(config)
389 ui = make_ui_from_config(config)
389 peer_checker = makepeer(ui, url)
390 peer_checker = makepeer(ui, url)
390 peer_checker.lookup('tip')
391 peer_checker.lookup('tip')
391 except Exception as e:
392 except Exception as e:
392 log.warning("URL is not a valid Mercurial repository: %s",
393 log.warning("URL is not a valid Mercurial repository: %s",
393 cleaned_uri)
394 cleaned_uri)
394 raise exceptions.URLError(e)(
395 raise exceptions.URLError(e)(
395 "url [%s] does not look like an hg repo org_exc: %s"
396 "url [%s] does not look like an hg repo org_exc: %s"
396 % (cleaned_uri, e))
397 % (cleaned_uri, e))
397
398
398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
399 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
399 return True
400 return True
400
401
401 @reraise_safe_exceptions
402 @reraise_safe_exceptions
402 def diff(
403 def diff(
403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
404 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
404 context):
405 context):
405 repo = self._factory.repo(wire)
406 repo = self._factory.repo(wire)
406
407
407 if file_filter:
408 if file_filter:
408 match_filter = match(file_filter[0], '', [file_filter[1]])
409 match_filter = match(file_filter[0], '', [file_filter[1]])
409 else:
410 else:
410 match_filter = file_filter
411 match_filter = file_filter
411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
412 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
412
413
413 try:
414 try:
414 return "".join(patch.diff(
415 return "".join(patch.diff(
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
416 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
416 except RepoLookupError as e:
417 except RepoLookupError as e:
417 raise exceptions.LookupException(e)()
418 raise exceptions.LookupException(e)()
418
419
419 @reraise_safe_exceptions
420 @reraise_safe_exceptions
420 def node_history(self, wire, revision, path, limit):
421 def node_history(self, wire, revision, path, limit):
421 repo = self._factory.repo(wire)
422 repo = self._factory.repo(wire)
422
423
423 ctx = repo[revision]
424 ctx = repo[revision]
424 fctx = ctx.filectx(path)
425 fctx = ctx.filectx(path)
425
426
426 def history_iter():
427 def history_iter():
427 limit_rev = fctx.rev()
428 limit_rev = fctx.rev()
428 for obj in reversed(list(fctx.filelog())):
429 for obj in reversed(list(fctx.filelog())):
429 obj = fctx.filectx(obj)
430 obj = fctx.filectx(obj)
430 if limit_rev >= obj.rev():
431 if limit_rev >= obj.rev():
431 yield obj
432 yield obj
432
433
433 history = []
434 history = []
434 for cnt, obj in enumerate(history_iter()):
435 for cnt, obj in enumerate(history_iter()):
435 if limit and cnt >= limit:
436 if limit and cnt >= limit:
436 break
437 break
437 history.append(hex(obj.node()))
438 history.append(hex(obj.node()))
438
439
439 return [x for x in history]
440 return [x for x in history]
440
441
441 @reraise_safe_exceptions
442 @reraise_safe_exceptions
442 def node_history_untill(self, wire, revision, path, limit):
443 def node_history_untill(self, wire, revision, path, limit):
443 repo = self._factory.repo(wire)
444 repo = self._factory.repo(wire)
444 ctx = repo[revision]
445 ctx = repo[revision]
445 fctx = ctx.filectx(path)
446 fctx = ctx.filectx(path)
446
447
447 file_log = list(fctx.filelog())
448 file_log = list(fctx.filelog())
448 if limit:
449 if limit:
449 # Limit to the last n items
450 # Limit to the last n items
450 file_log = file_log[-limit:]
451 file_log = file_log[-limit:]
451
452
452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
453 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
453
454
454 @reraise_safe_exceptions
455 @reraise_safe_exceptions
455 def fctx_annotate(self, wire, revision, path):
456 def fctx_annotate(self, wire, revision, path):
456 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
457 ctx = repo[revision]
458 ctx = repo[revision]
458 fctx = ctx.filectx(path)
459 fctx = ctx.filectx(path)
459
460
460 result = []
461 result = []
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 ln_no = i
463 ln_no = i
463 sha = hex(annotate_obj.fctx.node())
464 sha = hex(annotate_obj.fctx.node())
464 content = annotate_obj.text
465 content = annotate_obj.text
465 result.append((ln_no, sha, content))
466 result.append((ln_no, sha, content))
466 return result
467 return result
467
468
468 @reraise_safe_exceptions
469 @reraise_safe_exceptions
469 def fctx_data(self, wire, revision, path):
470 def fctx_data(self, wire, revision, path):
470 repo = self._factory.repo(wire)
471 repo = self._factory.repo(wire)
471 ctx = repo[revision]
472 ctx = repo[revision]
472 fctx = ctx.filectx(path)
473 fctx = ctx.filectx(path)
473 return fctx.data()
474 return fctx.data()
474
475
475 @reraise_safe_exceptions
476 @reraise_safe_exceptions
476 def fctx_flags(self, wire, revision, path):
477 def fctx_flags(self, wire, revision, path):
477 repo = self._factory.repo(wire)
478 repo = self._factory.repo(wire)
478 ctx = repo[revision]
479 ctx = repo[revision]
479 fctx = ctx.filectx(path)
480 fctx = ctx.filectx(path)
480 return fctx.flags()
481 return fctx.flags()
481
482
482 @reraise_safe_exceptions
483 @reraise_safe_exceptions
483 def fctx_size(self, wire, revision, path):
484 def fctx_size(self, wire, revision, path):
484 repo = self._factory.repo(wire)
485 repo = self._factory.repo(wire)
485 ctx = repo[revision]
486 ctx = repo[revision]
486 fctx = ctx.filectx(path)
487 fctx = ctx.filectx(path)
487 return fctx.size()
488 return fctx.size()
488
489
489 @reraise_safe_exceptions
490 @reraise_safe_exceptions
490 def get_all_commit_ids(self, wire, name):
491 def get_all_commit_ids(self, wire, name):
491 repo = self._factory.repo(wire)
492 repo = self._factory.repo(wire)
492 revs = repo.filtered(name).changelog.index
493 revs = repo.filtered(name).changelog.index
493 return map(lambda x: hex(x[7]), revs)[:-1]
494 return map(lambda x: hex(x[7]), revs)[:-1]
494
495
495 @reraise_safe_exceptions
496 @reraise_safe_exceptions
496 def get_config_value(self, wire, section, name, untrusted=False):
497 def get_config_value(self, wire, section, name, untrusted=False):
497 repo = self._factory.repo(wire)
498 repo = self._factory.repo(wire)
498 return repo.ui.config(section, name, untrusted=untrusted)
499 return repo.ui.config(section, name, untrusted=untrusted)
499
500
500 @reraise_safe_exceptions
501 @reraise_safe_exceptions
501 def get_config_bool(self, wire, section, name, untrusted=False):
502 def get_config_bool(self, wire, section, name, untrusted=False):
502 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
503 return repo.ui.configbool(section, name, untrusted=untrusted)
504 return repo.ui.configbool(section, name, untrusted=untrusted)
504
505
505 @reraise_safe_exceptions
506 @reraise_safe_exceptions
506 def get_config_list(self, wire, section, name, untrusted=False):
507 def get_config_list(self, wire, section, name, untrusted=False):
507 repo = self._factory.repo(wire)
508 repo = self._factory.repo(wire)
508 return repo.ui.configlist(section, name, untrusted=untrusted)
509 return repo.ui.configlist(section, name, untrusted=untrusted)
509
510
510 @reraise_safe_exceptions
511 @reraise_safe_exceptions
511 def is_large_file(self, wire, path):
512 def is_large_file(self, wire, path):
512 return largefiles.lfutil.isstandin(path)
513 return largefiles.lfutil.isstandin(path)
513
514
514 @reraise_safe_exceptions
515 @reraise_safe_exceptions
515 def in_largefiles_store(self, wire, sha):
516 def in_largefiles_store(self, wire, sha):
516 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
517 return largefiles.lfutil.instore(repo, sha)
518 return largefiles.lfutil.instore(repo, sha)
518
519
519 @reraise_safe_exceptions
520 @reraise_safe_exceptions
520 def in_user_cache(self, wire, sha):
521 def in_user_cache(self, wire, sha):
521 repo = self._factory.repo(wire)
522 repo = self._factory.repo(wire)
522 return largefiles.lfutil.inusercache(repo.ui, sha)
523 return largefiles.lfutil.inusercache(repo.ui, sha)
523
524
524 @reraise_safe_exceptions
525 @reraise_safe_exceptions
525 def store_path(self, wire, sha):
526 def store_path(self, wire, sha):
526 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
527 return largefiles.lfutil.storepath(repo, sha)
528 return largefiles.lfutil.storepath(repo, sha)
528
529
529 @reraise_safe_exceptions
530 @reraise_safe_exceptions
530 def link(self, wire, sha, path):
531 def link(self, wire, sha, path):
531 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
532 largefiles.lfutil.link(
533 largefiles.lfutil.link(
533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
534 largefiles.lfutil.usercachepath(repo.ui, sha), path)
534
535
535 @reraise_safe_exceptions
536 @reraise_safe_exceptions
536 def localrepository(self, wire, create=False):
537 def localrepository(self, wire, create=False):
537 self._factory.repo(wire, create=create)
538 self._factory.repo(wire, create=create)
538
539
539 @reraise_safe_exceptions
540 @reraise_safe_exceptions
540 def lookup(self, wire, revision, both):
541 def lookup(self, wire, revision, both):
541
542
542 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
543
544
544 if isinstance(revision, int):
545 if isinstance(revision, int):
545 # NOTE(marcink):
546 # NOTE(marcink):
546 # since Mercurial doesn't support indexes properly
547 # since Mercurial doesn't support indexes properly
547 # we need to shift accordingly by one to get proper index, e.g
548 # we need to shift accordingly by one to get proper index, e.g
548 # repo[-1] => repo[-2]
549 # repo[-1] => repo[-2]
549 # repo[0] => repo[-1]
550 # repo[0] => repo[-1]
550 # repo[1] => repo[2] we also never call repo[0] because
551 # repo[1] => repo[2] we also never call repo[0] because
551 # it's actually second commit
552 # it's actually second commit
552 if revision <= 0:
553 if revision <= 0:
553 revision = revision + -1
554 revision = revision + -1
554 else:
555 else:
555 revision = revision + 1
556 revision = revision + 1
556
557
557 try:
558 try:
558 ctx = repo[revision]
559 ctx = repo[revision]
559 except RepoLookupError as e:
560 except RepoLookupError as e:
560 raise exceptions.LookupException(e)(revision)
561 raise exceptions.LookupException(e)(revision)
561 except LookupError as e:
562 except LookupError as e:
562 raise exceptions.LookupException(e)(e.name)
563 raise exceptions.LookupException(e)(e.name)
563
564
564 if not both:
565 if not both:
565 return ctx.hex()
566 return ctx.hex()
566
567
567 ctx = repo[ctx.hex()]
568 ctx = repo[ctx.hex()]
568 return ctx.hex(), ctx.rev()
569 return ctx.hex(), ctx.rev()
569
570
570 @reraise_safe_exceptions
571 @reraise_safe_exceptions
571 def pull(self, wire, url, commit_ids=None):
572 def pull(self, wire, url, commit_ids=None):
572 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
573 # Disable any prompts for this repo
574 # Disable any prompts for this repo
574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575
576
576 remote = peer(repo, {}, url)
577 remote = peer(repo, {}, url)
577 # Disable any prompts for this remote
578 # Disable any prompts for this remote
578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
579 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
579
580
580 if commit_ids:
581 if commit_ids:
581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
582 commit_ids = [bin(commit_id) for commit_id in commit_ids]
582
583
583 return exchange.pull(
584 return exchange.pull(
584 repo, remote, heads=commit_ids, force=None).cgresult
585 repo, remote, heads=commit_ids, force=None).cgresult
585
586
586 @reraise_safe_exceptions
587 @reraise_safe_exceptions
587 def sync_push(self, wire, url):
588 def sync_push(self, wire, url):
588 if not self.check_url(url, wire['config']):
589 if not self.check_url(url, wire['config']):
589 return
590 return
590
591
591 repo = self._factory.repo(wire)
592 repo = self._factory.repo(wire)
592
593
593 # Disable any prompts for this repo
594 # Disable any prompts for this repo
594 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
595 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
595
596
596 bookmarks = dict(repo._bookmarks).keys()
597 bookmarks = dict(repo._bookmarks).keys()
597 remote = peer(repo, {}, url)
598 remote = peer(repo, {}, url)
598 # Disable any prompts for this remote
599 # Disable any prompts for this remote
599 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
600 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
600
601
601 return exchange.push(
602 return exchange.push(
602 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
603 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
603
604
604 @reraise_safe_exceptions
605 @reraise_safe_exceptions
605 def revision(self, wire, rev):
606 def revision(self, wire, rev):
606 repo = self._factory.repo(wire)
607 repo = self._factory.repo(wire)
607 ctx = repo[rev]
608 ctx = repo[rev]
608 return ctx.rev()
609 return ctx.rev()
609
610
610 @reraise_safe_exceptions
611 @reraise_safe_exceptions
611 def rev_range(self, wire, filter):
612 def rev_range(self, wire, filter):
612 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
613 revisions = [rev for rev in revrange(repo, filter)]
614 revisions = [rev for rev in revrange(repo, filter)]
614 return revisions
615 return revisions
615
616
616 @reraise_safe_exceptions
617 @reraise_safe_exceptions
617 def rev_range_hash(self, wire, node):
618 def rev_range_hash(self, wire, node):
618 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
619
620
620 def get_revs(repo, rev_opt):
621 def get_revs(repo, rev_opt):
621 if rev_opt:
622 if rev_opt:
622 revs = revrange(repo, rev_opt)
623 revs = revrange(repo, rev_opt)
623 if len(revs) == 0:
624 if len(revs) == 0:
624 return (nullrev, nullrev)
625 return (nullrev, nullrev)
625 return max(revs), min(revs)
626 return max(revs), min(revs)
626 else:
627 else:
627 return len(repo) - 1, 0
628 return len(repo) - 1, 0
628
629
629 stop, start = get_revs(repo, [node + ':'])
630 stop, start = get_revs(repo, [node + ':'])
630 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
631 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
631 return revs
632 return revs
632
633
633 @reraise_safe_exceptions
634 @reraise_safe_exceptions
634 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
635 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
635 other_path = kwargs.pop('other_path', None)
636 other_path = kwargs.pop('other_path', None)
636
637
637 # case when we want to compare two independent repositories
638 # case when we want to compare two independent repositories
638 if other_path and other_path != wire["path"]:
639 if other_path and other_path != wire["path"]:
639 baseui = self._factory._create_config(wire["config"])
640 baseui = self._factory._create_config(wire["config"])
640 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
641 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
641 else:
642 else:
642 repo = self._factory.repo(wire)
643 repo = self._factory.repo(wire)
643 return list(repo.revs(rev_spec, *args))
644 return list(repo.revs(rev_spec, *args))
644
645
645 @reraise_safe_exceptions
646 @reraise_safe_exceptions
646 def strip(self, wire, revision, update, backup):
647 def strip(self, wire, revision, update, backup):
647 repo = self._factory.repo(wire)
648 repo = self._factory.repo(wire)
648 ctx = repo[revision]
649 ctx = repo[revision]
649 hgext_strip(
650 hgext_strip(
650 repo.baseui, repo, ctx.node(), update=update, backup=backup)
651 repo.baseui, repo, ctx.node(), update=update, backup=backup)
651
652
652 @reraise_safe_exceptions
653 @reraise_safe_exceptions
653 def verify(self, wire,):
654 def verify(self, wire,):
654 repo = self._factory.repo(wire)
655 repo = self._factory.repo(wire)
655 baseui = self._factory._create_config(wire['config'])
656 baseui = self._factory._create_config(wire['config'])
656 baseui.setconfig('ui', 'quiet', 'false')
657 baseui.setconfig('ui', 'quiet', 'false')
657 output = io.BytesIO()
658 output = io.BytesIO()
658
659
659 def write(data, **unused_kwargs):
660 def write(data, **unused_kwargs):
660 output.write(data)
661 output.write(data)
661 baseui.write = write
662 baseui.write = write
662
663
663 repo.ui = baseui
664 repo.ui = baseui
664 verify.verify(repo)
665 verify.verify(repo)
665 return output.getvalue()
666 return output.getvalue()
666
667
667 @reraise_safe_exceptions
668 @reraise_safe_exceptions
668 def tag(self, wire, name, revision, message, local, user,
669 def tag(self, wire, name, revision, message, local, user,
669 tag_time, tag_timezone):
670 tag_time, tag_timezone):
670 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
671 ctx = repo[revision]
672 ctx = repo[revision]
672 node = ctx.node()
673 node = ctx.node()
673
674
674 date = (tag_time, tag_timezone)
675 date = (tag_time, tag_timezone)
675 try:
676 try:
676 hg_tag.tag(repo, name, node, message, local, user, date)
677 hg_tag.tag(repo, name, node, message, local, user, date)
677 except Abort as e:
678 except Abort as e:
678 log.exception("Tag operation aborted")
679 log.exception("Tag operation aborted")
679 # Exception can contain unicode which we convert
680 # Exception can contain unicode which we convert
680 raise exceptions.AbortException(e)(repr(e))
681 raise exceptions.AbortException(e)(repr(e))
681
682
682 @reraise_safe_exceptions
683 @reraise_safe_exceptions
683 def tags(self, wire):
684 def tags(self, wire):
684 repo = self._factory.repo(wire)
685 repo = self._factory.repo(wire)
685 return repo.tags()
686 return repo.tags()
686
687
687 @reraise_safe_exceptions
688 @reraise_safe_exceptions
688 def update(self, wire, node=None, clean=False):
689 def update(self, wire, node=None, clean=False):
689 repo = self._factory.repo(wire)
690 repo = self._factory.repo(wire)
690 baseui = self._factory._create_config(wire['config'])
691 baseui = self._factory._create_config(wire['config'])
691 commands.update(baseui, repo, node=node, clean=clean)
692 commands.update(baseui, repo, node=node, clean=clean)
692
693
693 @reraise_safe_exceptions
694 @reraise_safe_exceptions
694 def identify(self, wire):
695 def identify(self, wire):
695 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
696 baseui = self._factory._create_config(wire['config'])
697 baseui = self._factory._create_config(wire['config'])
697 output = io.BytesIO()
698 output = io.BytesIO()
698 baseui.write = output.write
699 baseui.write = output.write
699 # This is required to get a full node id
700 # This is required to get a full node id
700 baseui.debugflag = True
701 baseui.debugflag = True
701 commands.identify(baseui, repo, id=True)
702 commands.identify(baseui, repo, id=True)
702
703
703 return output.getvalue()
704 return output.getvalue()
704
705
705 @reraise_safe_exceptions
706 @reraise_safe_exceptions
706 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
707 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
707 hooks=True):
708 hooks=True):
708 repo = self._factory.repo(wire)
709 repo = self._factory.repo(wire)
709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
710 baseui = self._factory._create_config(wire['config'], hooks=hooks)
710
711
711 # Mercurial internally has a lot of logic that checks ONLY if
712 # Mercurial internally has a lot of logic that checks ONLY if
712 # option is defined, we just pass those if they are defined then
713 # option is defined, we just pass those if they are defined then
713 opts = {}
714 opts = {}
714 if bookmark:
715 if bookmark:
715 opts['bookmark'] = bookmark
716 opts['bookmark'] = bookmark
716 if branch:
717 if branch:
717 opts['branch'] = branch
718 opts['branch'] = branch
718 if revision:
719 if revision:
719 opts['rev'] = revision
720 opts['rev'] = revision
720
721
721 commands.pull(baseui, repo, source, **opts)
722 commands.pull(baseui, repo, source, **opts)
722
723
723 @reraise_safe_exceptions
724 @reraise_safe_exceptions
724 def heads(self, wire, branch=None):
725 def heads(self, wire, branch=None):
725 repo = self._factory.repo(wire)
726 repo = self._factory.repo(wire)
726 baseui = self._factory._create_config(wire['config'])
727 baseui = self._factory._create_config(wire['config'])
727 output = io.BytesIO()
728 output = io.BytesIO()
728
729
729 def write(data, **unused_kwargs):
730 def write(data, **unused_kwargs):
730 output.write(data)
731 output.write(data)
731
732
732 baseui.write = write
733 baseui.write = write
733 if branch:
734 if branch:
734 args = [branch]
735 args = [branch]
735 else:
736 else:
736 args = []
737 args = []
737 commands.heads(baseui, repo, template='{node} ', *args)
738 commands.heads(baseui, repo, template='{node} ', *args)
738
739
739 return output.getvalue()
740 return output.getvalue()
740
741
741 @reraise_safe_exceptions
742 @reraise_safe_exceptions
742 def ancestor(self, wire, revision1, revision2):
743 def ancestor(self, wire, revision1, revision2):
743 repo = self._factory.repo(wire)
744 repo = self._factory.repo(wire)
744 changelog = repo.changelog
745 changelog = repo.changelog
745 lookup = repo.lookup
746 lookup = repo.lookup
746 a = changelog.ancestor(lookup(revision1), lookup(revision2))
747 a = changelog.ancestor(lookup(revision1), lookup(revision2))
747 return hex(a)
748 return hex(a)
748
749
749 @reraise_safe_exceptions
750 @reraise_safe_exceptions
750 def push(self, wire, revisions, dest_path, hooks=True,
751 def push(self, wire, revisions, dest_path, hooks=True,
751 push_branches=False):
752 push_branches=False):
752 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
753 baseui = self._factory._create_config(wire['config'], hooks=hooks)
754 baseui = self._factory._create_config(wire['config'], hooks=hooks)
754 commands.push(baseui, repo, dest=dest_path, rev=revisions,
755 commands.push(baseui, repo, dest=dest_path, rev=revisions,
755 new_branch=push_branches)
756 new_branch=push_branches)
756
757
757 @reraise_safe_exceptions
758 @reraise_safe_exceptions
758 def merge(self, wire, revision):
759 def merge(self, wire, revision):
759 repo = self._factory.repo(wire)
760 repo = self._factory.repo(wire)
760 baseui = self._factory._create_config(wire['config'])
761 baseui = self._factory._create_config(wire['config'])
761 repo.ui.setconfig('ui', 'merge', 'internal:dump')
762 repo.ui.setconfig('ui', 'merge', 'internal:dump')
762
763
763 # In case of sub repositories are used mercurial prompts the user in
764 # In case of sub repositories are used mercurial prompts the user in
764 # case of merge conflicts or different sub repository sources. By
765 # case of merge conflicts or different sub repository sources. By
765 # setting the interactive flag to `False` mercurial doesn't prompt the
766 # setting the interactive flag to `False` mercurial doesn't prompt the
766 # used but instead uses a default value.
767 # used but instead uses a default value.
767 repo.ui.setconfig('ui', 'interactive', False)
768 repo.ui.setconfig('ui', 'interactive', False)
768
769
769 commands.merge(baseui, repo, rev=revision)
770 commands.merge(baseui, repo, rev=revision)
770
771
771 @reraise_safe_exceptions
772 @reraise_safe_exceptions
772 def commit(self, wire, message, username, close_branch=False):
773 def commit(self, wire, message, username, close_branch=False):
773 repo = self._factory.repo(wire)
774 repo = self._factory.repo(wire)
774 baseui = self._factory._create_config(wire['config'])
775 baseui = self._factory._create_config(wire['config'])
775 repo.ui.setconfig('ui', 'username', username)
776 repo.ui.setconfig('ui', 'username', username)
776 commands.commit(baseui, repo, message=message, close_branch=close_branch)
777 commands.commit(baseui, repo, message=message, close_branch=close_branch)
777
778
778 @reraise_safe_exceptions
779 @reraise_safe_exceptions
779 def rebase(self, wire, source=None, dest=None, abort=False):
780 def rebase(self, wire, source=None, dest=None, abort=False):
780 repo = self._factory.repo(wire)
781 repo = self._factory.repo(wire)
781 baseui = self._factory._create_config(wire['config'])
782 baseui = self._factory._create_config(wire['config'])
782 repo.ui.setconfig('ui', 'merge', 'internal:dump')
783 repo.ui.setconfig('ui', 'merge', 'internal:dump')
783 rebase.rebase(
784 rebase.rebase(
784 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
785 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
785
786
786 @reraise_safe_exceptions
787 @reraise_safe_exceptions
787 def bookmark(self, wire, bookmark, revision=None):
788 def bookmark(self, wire, bookmark, revision=None):
788 repo = self._factory.repo(wire)
789 repo = self._factory.repo(wire)
789 baseui = self._factory._create_config(wire['config'])
790 baseui = self._factory._create_config(wire['config'])
790 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
791 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
791
792
792 @reraise_safe_exceptions
793 @reraise_safe_exceptions
793 def install_hooks(self, wire, force=False):
794 def install_hooks(self, wire, force=False):
794 # we don't need any special hooks for Mercurial
795 # we don't need any special hooks for Mercurial
795 pass
796 pass
797
798 @reraise_safe_exceptions
799 def get_hooks_info(self, wire):
800 return {
801 'pre_version': vcsserver.__version__,
802 'post_version': vcsserver.__version__,
803 }
@@ -1,154 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import re
20 import re
21 import os
21 import os
22 import sys
22 import sys
23 import datetime
23 import datetime
24 import logging
24 import logging
25 import pkg_resources
25 import pkg_resources
26
26
27 import vcsserver
27 import vcsserver
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 def get_git_hooks_path(repo_path, bare):
33 hooks_path = os.path.join(repo_path, 'hooks')
34 if not bare:
35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36
37 return hooks_path
38
39
32 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
33 """
41 """
34 Creates a RhodeCode hook inside a git repository
42 Creates a RhodeCode hook inside a git repository
35
43
36 :param repo_path: path to repository
44 :param repo_path: path to repository
37 :param executable: binary executable to put in the hooks
45 :param executable: binary executable to put in the hooks
38 :param force_create: Create even if same name hook exists
46 :param force_create: Create even if same name hook exists
39 """
47 """
40 executable = executable or sys.executable
48 executable = executable or sys.executable
41 hooks_path = os.path.join(repo_path, 'hooks')
49 hooks_path = get_git_hooks_path(repo_path, bare)
42 if not bare:
50
43 hooks_path = os.path.join(repo_path, '.git', 'hooks')
44 if not os.path.isdir(hooks_path):
51 if not os.path.isdir(hooks_path):
45 os.makedirs(hooks_path, mode=0o777)
52 os.makedirs(hooks_path, mode=0o777)
46
53
47 tmpl_post = pkg_resources.resource_string(
54 tmpl_post = pkg_resources.resource_string(
48 'vcsserver', '/'.join(
55 'vcsserver', '/'.join(
49 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
50 tmpl_pre = pkg_resources.resource_string(
57 tmpl_pre = pkg_resources.resource_string(
51 'vcsserver', '/'.join(
58 'vcsserver', '/'.join(
52 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
53
60
54 path = '' # not used for now
61 path = '' # not used for now
55 timestamp = datetime.datetime.utcnow().isoformat()
62 timestamp = datetime.datetime.utcnow().isoformat()
56
63
57 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
58 log.debug('Installing git hook in repo %s', repo_path)
65 log.debug('Installing git hook in repo %s', repo_path)
59 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
60 _rhodecode_hook = check_rhodecode_hook(_hook_file)
67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
61
68
62 if _rhodecode_hook or force_create:
69 if _rhodecode_hook or force_create:
63 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
64 try:
71 try:
65 with open(_hook_file, 'wb') as f:
72 with open(_hook_file, 'wb') as f:
66 template = template.replace(
73 template = template.replace(
67 '_TMPL_', vcsserver.__version__)
74 '_TMPL_', vcsserver.__version__)
68 template = template.replace('_DATE_', timestamp)
75 template = template.replace('_DATE_', timestamp)
69 template = template.replace('_ENV_', executable)
76 template = template.replace('_ENV_', executable)
70 template = template.replace('_PATH_', path)
77 template = template.replace('_PATH_', path)
71 f.write(template)
78 f.write(template)
72 os.chmod(_hook_file, 0o755)
79 os.chmod(_hook_file, 0o755)
73 except IOError:
80 except IOError:
74 log.exception('error writing hook file %s', _hook_file)
81 log.exception('error writing hook file %s', _hook_file)
75 else:
82 else:
76 log.debug('skipping writing hook file')
83 log.debug('skipping writing hook file')
77
84
78 return True
85 return True
79
86
80
87
88 def get_svn_hooks_path(repo_path):
89 hooks_path = os.path.join(repo_path, 'hooks')
90
91 return hooks_path
92
93
81 def install_svn_hooks(repo_path, executable=None, force_create=False):
94 def install_svn_hooks(repo_path, executable=None, force_create=False):
82 """
95 """
83 Creates RhodeCode hooks inside a svn repository
96 Creates RhodeCode hooks inside a svn repository
84
97
85 :param repo_path: path to repository
98 :param repo_path: path to repository
86 :param executable: binary executable to put in the hooks
99 :param executable: binary executable to put in the hooks
87 :param force_create: Create even if same name hook exists
100 :param force_create: Create even if same name hook exists
88 """
101 """
89 executable = executable or sys.executable
102 executable = executable or sys.executable
90 hooks_path = os.path.join(repo_path, 'hooks')
103 hooks_path = get_svn_hooks_path(repo_path)
91 if not os.path.isdir(hooks_path):
104 if not os.path.isdir(hooks_path):
92 os.makedirs(hooks_path, mode=0o777)
105 os.makedirs(hooks_path, mode=0o777)
93
106
94 tmpl_post = pkg_resources.resource_string(
107 tmpl_post = pkg_resources.resource_string(
95 'vcsserver', '/'.join(
108 'vcsserver', '/'.join(
96 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
97 tmpl_pre = pkg_resources.resource_string(
110 tmpl_pre = pkg_resources.resource_string(
98 'vcsserver', '/'.join(
111 'vcsserver', '/'.join(
99 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
100
113
101 path = '' # not used for now
114 path = '' # not used for now
102 timestamp = datetime.datetime.utcnow().isoformat()
115 timestamp = datetime.datetime.utcnow().isoformat()
103
116
104 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
105 log.debug('Installing svn hook in repo %s', repo_path)
118 log.debug('Installing svn hook in repo %s', repo_path)
106 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
107 _rhodecode_hook = check_rhodecode_hook(_hook_file)
120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
108
121
109 if _rhodecode_hook or force_create:
122 if _rhodecode_hook or force_create:
110 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
111
124
112 try:
125 try:
113 with open(_hook_file, 'wb') as f:
126 with open(_hook_file, 'wb') as f:
114 template = template.replace(
127 template = template.replace(
115 '_TMPL_', vcsserver.__version__)
128 '_TMPL_', vcsserver.__version__)
116 template = template.replace('_DATE_', timestamp)
129 template = template.replace('_DATE_', timestamp)
117 template = template.replace('_ENV_', executable)
130 template = template.replace('_ENV_', executable)
118 template = template.replace('_PATH_', path)
131 template = template.replace('_PATH_', path)
119
132
120 f.write(template)
133 f.write(template)
121 os.chmod(_hook_file, 0o755)
134 os.chmod(_hook_file, 0o755)
122 except IOError:
135 except IOError:
123 log.exception('error writing hook file %s', _hook_file)
136 log.exception('error writing hook file %s', _hook_file)
124 else:
137 else:
125 log.debug('skipping writing hook file')
138 log.debug('skipping writing hook file')
126
139
127 return True
140 return True
128
141
129
142
143 def get_version_from_hook(hook_path):
144 version = ''
145 hook_content = read_hook_content(hook_path)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 if matches:
148 try:
149 version = matches.groups()[0]
150 log.debug('got version %s from hooks.', version)
151 except Exception:
152 log.exception("Exception while reading the hook version.")
153 return version.replace("'", "")
154
155
130 def check_rhodecode_hook(hook_path):
156 def check_rhodecode_hook(hook_path):
131 """
157 """
132 Check if the hook was created by RhodeCode
158 Check if the hook was created by RhodeCode
133 """
159 """
134 if not os.path.exists(hook_path):
160 if not os.path.exists(hook_path):
135 return True
161 return True
136
162
137 log.debug('hook exists, checking if it is from rhodecode')
163 log.debug('hook exists, checking if it is from RhodeCode')
138 hook_content = read_hook_content(hook_path)
164
139 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
165 version = get_version_from_hook(hook_path)
140 if matches:
166 if version:
141 try:
167 return True
142 version = matches.groups()[0]
143 log.debug('got version %s from hooks.', version)
144 return True
145 except Exception:
146 log.exception("Exception while reading the hook version.")
147
168
148 return False
169 return False
149
170
150
171
151 def read_hook_content(hook_path):
172 def read_hook_content(hook_path):
152 with open(hook_path, 'rb') as f:
173 with open(hook_path, 'rb') as f:
153 content = f.read()
174 content = f.read()
154 return content
175 return content
176
177
178 def get_git_pre_hook_version(repo_path, bare):
179 hooks_path = get_git_hooks_path(repo_path, bare)
180 _hook_file = os.path.join(hooks_path, 'pre-receive')
181 version = get_version_from_hook(_hook_file)
182 return version
183
184
185 def get_git_post_hook_version(repo_path, bare):
186 hooks_path = get_git_hooks_path(repo_path, bare)
187 _hook_file = os.path.join(hooks_path, 'post-receive')
188 version = get_version_from_hook(_hook_file)
189 return version
190
191
192 def get_svn_pre_hook_version(repo_path):
193 hooks_path = get_svn_hooks_path(repo_path)
194 _hook_file = os.path.join(hooks_path, 'pre-commit')
195 version = get_version_from_hook(_hook_file)
196 return version
197
198
199 def get_svn_post_hook_version(repo_path):
200 hooks_path = get_svn_hooks_path(repo_path)
201 _hook_file = os.path.join(hooks_path, 'post-commit')
202 version = get_version_from_hook(_hook_file)
203 return version
@@ -1,722 +1,732 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 # Set of svn compatible version flags.
43 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
44 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
45 svn_compatible_versions = {
46 'pre-1.4-compatible',
46 'pre-1.4-compatible',
47 'pre-1.5-compatible',
47 'pre-1.5-compatible',
48 'pre-1.6-compatible',
48 'pre-1.6-compatible',
49 'pre-1.8-compatible',
49 'pre-1.8-compatible',
50 'pre-1.9-compatible'
50 'pre-1.9-compatible'
51 }
51 }
52
52
53 svn_compatible_versions_map = {
53 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
54 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
58 'pre-1.9-compatible': '1.8',
59 }
59 }
60
60
61
61
62 def reraise_safe_exceptions(func):
62 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
63 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
65 try:
65 try:
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 except Exception as e:
67 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
68 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
69 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
70 raise_from_original(exceptions.UnhandledException(e))
71 raise
71 raise
72 return wrapper
72 return wrapper
73
73
74
74
75 class SubversionFactory(RepoFactory):
75 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
76 repo_type = 'svn'
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {'compatible-version': '1.9'}
81 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 fs_config['compatible-version'] = \
86 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
87 svn_compatible_versions_map[compatible_version]
88
88
89 log.debug('Create SVN repo with config "%s"', fs_config)
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
91 else:
92 repo = svn.repos.open(path)
92 repo = svn.repos.open(path)
93
93
94 log.debug('Got SVN object: %s', repo)
94 log.debug('Got SVN object: %s', repo)
95 return repo
95 return repo
96
96
97 def repo(self, wire, create=False, compatible_version=None):
97 def repo(self, wire, create=False, compatible_version=None):
98 """
98 """
99 Get a repository instance for the given path.
99 Get a repository instance for the given path.
100
100
101 Uses internally the low level beaker API since the decorators introduce
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
102 significant overhead.
103 """
103 """
104 region = self._cache_region
104 region = self._cache_region
105 context = wire.get('context', None)
105 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
106 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
107 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
108 cache = wire.get('cache', True)
109 cache_on = context and cache
109 cache_on = context and cache
110
110
111 @region.conditional_cache_on_arguments(condition=cache_on)
111 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
113 return self._create_repo(wire, create, compatible_version)
114
114
115 return create_new_repo(self.repo_type, repo_path, context_uid,
115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
116 compatible_version)
117
117
118
118
119 NODE_TYPE_MAPPING = {
119 NODE_TYPE_MAPPING = {
120 svn.core.svn_node_file: 'file',
120 svn.core.svn_node_file: 'file',
121 svn.core.svn_node_dir: 'dir',
121 svn.core.svn_node_dir: 'dir',
122 }
122 }
123
123
124
124
125 class SvnRemote(object):
125 class SvnRemote(object):
126
126
127 def __init__(self, factory, hg_factory=None):
127 def __init__(self, factory, hg_factory=None):
128 self._factory = factory
128 self._factory = factory
129 # TODO: Remove once we do not use internal Mercurial objects anymore
129 # TODO: Remove once we do not use internal Mercurial objects anymore
130 # for subversion
130 # for subversion
131 self._hg_factory = hg_factory
131 self._hg_factory = hg_factory
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def discover_svn_version(self):
134 def discover_svn_version(self):
135 try:
135 try:
136 import svn.core
136 import svn.core
137 svn_ver = svn.core.SVN_VERSION
137 svn_ver = svn.core.SVN_VERSION
138 except ImportError:
138 except ImportError:
139 svn_ver = None
139 svn_ver = None
140 return svn_ver
140 return svn_ver
141
141
142 def check_url(self, url, config_items):
142 def check_url(self, url, config_items):
143 # this can throw exception if not installed, but we detect this
143 # this can throw exception if not installed, but we detect this
144 from hgsubversion import svnrepo
144 from hgsubversion import svnrepo
145
145
146 baseui = self._hg_factory._create_config(config_items)
146 baseui = self._hg_factory._create_config(config_items)
147 # uuid function get's only valid UUID from proper repo, else
147 # uuid function get's only valid UUID from proper repo, else
148 # throws exception
148 # throws exception
149 try:
149 try:
150 svnrepo.svnremoterepo(baseui, url).svn.uuid
150 svnrepo.svnremoterepo(baseui, url).svn.uuid
151 except Exception:
151 except Exception:
152 tb = traceback.format_exc()
152 tb = traceback.format_exc()
153 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
153 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
154 raise URLError(
154 raise URLError(
155 '"%s" is not a valid Subversion source url.' % (url, ))
155 '"%s" is not a valid Subversion source url.' % (url, ))
156 return True
156 return True
157
157
158 def is_path_valid_repository(self, wire, path):
158 def is_path_valid_repository(self, wire, path):
159
159
160 # NOTE(marcink): short circuit the check for SVN repo
160 # NOTE(marcink): short circuit the check for SVN repo
161 # the repos.open might be expensive to check, but we have one cheap
161 # the repos.open might be expensive to check, but we have one cheap
162 # pre condition that we can use, to check for 'format' file
162 # pre condition that we can use, to check for 'format' file
163
163
164 if not os.path.isfile(os.path.join(path, 'format')):
164 if not os.path.isfile(os.path.join(path, 'format')):
165 return False
165 return False
166
166
167 try:
167 try:
168 svn.repos.open(path)
168 svn.repos.open(path)
169 except svn.core.SubversionException:
169 except svn.core.SubversionException:
170 tb = traceback.format_exc()
170 tb = traceback.format_exc()
171 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
171 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
172 return False
172 return False
173 return True
173 return True
174
174
175 @reraise_safe_exceptions
175 @reraise_safe_exceptions
176 def verify(self, wire,):
176 def verify(self, wire,):
177 repo_path = wire['path']
177 repo_path = wire['path']
178 if not self.is_path_valid_repository(wire, repo_path):
178 if not self.is_path_valid_repository(wire, repo_path):
179 raise Exception(
179 raise Exception(
180 "Path %s is not a valid Subversion repository." % repo_path)
180 "Path %s is not a valid Subversion repository." % repo_path)
181
181
182 cmd = ['svnadmin', 'info', repo_path]
182 cmd = ['svnadmin', 'info', repo_path]
183 stdout, stderr = subprocessio.run_command(cmd)
183 stdout, stderr = subprocessio.run_command(cmd)
184 return stdout
184 return stdout
185
185
186 def lookup(self, wire, revision):
186 def lookup(self, wire, revision):
187 if revision not in [-1, None, 'HEAD']:
187 if revision not in [-1, None, 'HEAD']:
188 raise NotImplementedError
188 raise NotImplementedError
189 repo = self._factory.repo(wire)
189 repo = self._factory.repo(wire)
190 fs_ptr = svn.repos.fs(repo)
190 fs_ptr = svn.repos.fs(repo)
191 head = svn.fs.youngest_rev(fs_ptr)
191 head = svn.fs.youngest_rev(fs_ptr)
192 return head
192 return head
193
193
194 def lookup_interval(self, wire, start_ts, end_ts):
194 def lookup_interval(self, wire, start_ts, end_ts):
195 repo = self._factory.repo(wire)
195 repo = self._factory.repo(wire)
196 fsobj = svn.repos.fs(repo)
196 fsobj = svn.repos.fs(repo)
197 start_rev = None
197 start_rev = None
198 end_rev = None
198 end_rev = None
199 if start_ts:
199 if start_ts:
200 start_ts_svn = apr_time_t(start_ts)
200 start_ts_svn = apr_time_t(start_ts)
201 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
201 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
202 else:
202 else:
203 start_rev = 1
203 start_rev = 1
204 if end_ts:
204 if end_ts:
205 end_ts_svn = apr_time_t(end_ts)
205 end_ts_svn = apr_time_t(end_ts)
206 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
206 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
207 else:
207 else:
208 end_rev = svn.fs.youngest_rev(fsobj)
208 end_rev = svn.fs.youngest_rev(fsobj)
209 return start_rev, end_rev
209 return start_rev, end_rev
210
210
211 def revision_properties(self, wire, revision):
211 def revision_properties(self, wire, revision):
212 repo = self._factory.repo(wire)
212 repo = self._factory.repo(wire)
213 fs_ptr = svn.repos.fs(repo)
213 fs_ptr = svn.repos.fs(repo)
214 return svn.fs.revision_proplist(fs_ptr, revision)
214 return svn.fs.revision_proplist(fs_ptr, revision)
215
215
216 def revision_changes(self, wire, revision):
216 def revision_changes(self, wire, revision):
217
217
218 repo = self._factory.repo(wire)
218 repo = self._factory.repo(wire)
219 fsobj = svn.repos.fs(repo)
219 fsobj = svn.repos.fs(repo)
220 rev_root = svn.fs.revision_root(fsobj, revision)
220 rev_root = svn.fs.revision_root(fsobj, revision)
221
221
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 base_dir = ""
224 base_dir = ""
225 send_deltas = False
225 send_deltas = False
226 svn.repos.replay2(
226 svn.repos.replay2(
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 editor_ptr, editor_baton, None)
228 editor_ptr, editor_baton, None)
229
229
230 added = []
230 added = []
231 changed = []
231 changed = []
232 removed = []
232 removed = []
233
233
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 for path, change in editor.changes.iteritems():
235 for path, change in editor.changes.iteritems():
236 # TODO: Decide what to do with directory nodes. Subversion can add
236 # TODO: Decide what to do with directory nodes. Subversion can add
237 # empty directories.
237 # empty directories.
238
238
239 if change.item_kind == svn.core.svn_node_dir:
239 if change.item_kind == svn.core.svn_node_dir:
240 continue
240 continue
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 added.append(path)
242 added.append(path)
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 svn.repos.CHANGE_ACTION_REPLACE]:
244 svn.repos.CHANGE_ACTION_REPLACE]:
245 changed.append(path)
245 changed.append(path)
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 removed.append(path)
247 removed.append(path)
248 else:
248 else:
249 raise NotImplementedError(
249 raise NotImplementedError(
250 "Action %s not supported on path %s" % (
250 "Action %s not supported on path %s" % (
251 change.action, path))
251 change.action, path))
252
252
253 changes = {
253 changes = {
254 'added': added,
254 'added': added,
255 'changed': changed,
255 'changed': changed,
256 'removed': removed,
256 'removed': removed,
257 }
257 }
258 return changes
258 return changes
259
259
260 def node_history(self, wire, path, revision, limit):
260 def node_history(self, wire, path, revision, limit):
261 cross_copies = False
261 cross_copies = False
262 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
263 fsobj = svn.repos.fs(repo)
263 fsobj = svn.repos.fs(repo)
264 rev_root = svn.fs.revision_root(fsobj, revision)
264 rev_root = svn.fs.revision_root(fsobj, revision)
265
265
266 history_revisions = []
266 history_revisions = []
267 history = svn.fs.node_history(rev_root, path)
267 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.history_prev(history, cross_copies)
268 history = svn.fs.history_prev(history, cross_copies)
269 while history:
269 while history:
270 __, node_revision = svn.fs.history_location(history)
270 __, node_revision = svn.fs.history_location(history)
271 history_revisions.append(node_revision)
271 history_revisions.append(node_revision)
272 if limit and len(history_revisions) >= limit:
272 if limit and len(history_revisions) >= limit:
273 break
273 break
274 history = svn.fs.history_prev(history, cross_copies)
274 history = svn.fs.history_prev(history, cross_copies)
275 return history_revisions
275 return history_revisions
276
276
277 def node_properties(self, wire, path, revision):
277 def node_properties(self, wire, path, revision):
278 repo = self._factory.repo(wire)
278 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
279 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
280 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
281 return svn.fs.node_proplist(rev_root, path)
282
282
283 def file_annotate(self, wire, path, revision):
283 def file_annotate(self, wire, path, revision):
284 abs_path = 'file://' + urllib.pathname2url(
284 abs_path = 'file://' + urllib.pathname2url(
285 vcspath.join(wire['path'], path))
285 vcspath.join(wire['path'], path))
286 file_uri = svn.core.svn_path_canonicalize(abs_path)
286 file_uri = svn.core.svn_path_canonicalize(abs_path)
287
287
288 start_rev = svn_opt_revision_value_t(0)
288 start_rev = svn_opt_revision_value_t(0)
289 peg_rev = svn_opt_revision_value_t(revision)
289 peg_rev = svn_opt_revision_value_t(revision)
290 end_rev = peg_rev
290 end_rev = peg_rev
291
291
292 annotations = []
292 annotations = []
293
293
294 def receiver(line_no, revision, author, date, line, pool):
294 def receiver(line_no, revision, author, date, line, pool):
295 annotations.append((line_no, revision, line))
295 annotations.append((line_no, revision, line))
296
296
297 # TODO: Cannot use blame5, missing typemap function in the swig code
297 # TODO: Cannot use blame5, missing typemap function in the swig code
298 try:
298 try:
299 svn.client.blame2(
299 svn.client.blame2(
300 file_uri, peg_rev, start_rev, end_rev,
300 file_uri, peg_rev, start_rev, end_rev,
301 receiver, svn.client.create_context())
301 receiver, svn.client.create_context())
302 except svn.core.SubversionException as exc:
302 except svn.core.SubversionException as exc:
303 log.exception("Error during blame operation.")
303 log.exception("Error during blame operation.")
304 raise Exception(
304 raise Exception(
305 "Blame not supported or file does not exist at path %s. "
305 "Blame not supported or file does not exist at path %s. "
306 "Error %s." % (path, exc))
306 "Error %s." % (path, exc))
307
307
308 return annotations
308 return annotations
309
309
310 def get_node_type(self, wire, path, rev=None):
310 def get_node_type(self, wire, path, rev=None):
311 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
312 fs_ptr = svn.repos.fs(repo)
312 fs_ptr = svn.repos.fs(repo)
313 if rev is None:
313 if rev is None:
314 rev = svn.fs.youngest_rev(fs_ptr)
314 rev = svn.fs.youngest_rev(fs_ptr)
315 root = svn.fs.revision_root(fs_ptr, rev)
315 root = svn.fs.revision_root(fs_ptr, rev)
316 node = svn.fs.check_path(root, path)
316 node = svn.fs.check_path(root, path)
317 return NODE_TYPE_MAPPING.get(node, None)
317 return NODE_TYPE_MAPPING.get(node, None)
318
318
319 def get_nodes(self, wire, path, revision=None):
319 def get_nodes(self, wire, path, revision=None):
320 repo = self._factory.repo(wire)
320 repo = self._factory.repo(wire)
321 fsobj = svn.repos.fs(repo)
321 fsobj = svn.repos.fs(repo)
322 if revision is None:
322 if revision is None:
323 revision = svn.fs.youngest_rev(fsobj)
323 revision = svn.fs.youngest_rev(fsobj)
324 root = svn.fs.revision_root(fsobj, revision)
324 root = svn.fs.revision_root(fsobj, revision)
325 entries = svn.fs.dir_entries(root, path)
325 entries = svn.fs.dir_entries(root, path)
326 result = []
326 result = []
327 for entry_path, entry_info in entries.iteritems():
327 for entry_path, entry_info in entries.iteritems():
328 result.append(
328 result.append(
329 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
329 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
330 return result
330 return result
331
331
332 def get_file_content(self, wire, path, rev=None):
332 def get_file_content(self, wire, path, rev=None):
333 repo = self._factory.repo(wire)
333 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
334 fsobj = svn.repos.fs(repo)
335 if rev is None:
335 if rev is None:
336 rev = svn.fs.youngest_revision(fsobj)
336 rev = svn.fs.youngest_revision(fsobj)
337 root = svn.fs.revision_root(fsobj, rev)
337 root = svn.fs.revision_root(fsobj, rev)
338 content = svn.core.Stream(svn.fs.file_contents(root, path))
338 content = svn.core.Stream(svn.fs.file_contents(root, path))
339 return content.read()
339 return content.read()
340
340
341 def get_file_size(self, wire, path, revision=None):
341 def get_file_size(self, wire, path, revision=None):
342 repo = self._factory.repo(wire)
342 repo = self._factory.repo(wire)
343 fsobj = svn.repos.fs(repo)
343 fsobj = svn.repos.fs(repo)
344 if revision is None:
344 if revision is None:
345 revision = svn.fs.youngest_revision(fsobj)
345 revision = svn.fs.youngest_revision(fsobj)
346 root = svn.fs.revision_root(fsobj, revision)
346 root = svn.fs.revision_root(fsobj, revision)
347 size = svn.fs.file_length(root, path)
347 size = svn.fs.file_length(root, path)
348 return size
348 return size
349
349
350 def create_repository(self, wire, compatible_version=None):
350 def create_repository(self, wire, compatible_version=None):
351 log.info('Creating Subversion repository in path "%s"', wire['path'])
351 log.info('Creating Subversion repository in path "%s"', wire['path'])
352 self._factory.repo(wire, create=True,
352 self._factory.repo(wire, create=True,
353 compatible_version=compatible_version)
353 compatible_version=compatible_version)
354
354
355 def get_url_and_credentials(self, src_url):
355 def get_url_and_credentials(self, src_url):
356 obj = urlparse.urlparse(src_url)
356 obj = urlparse.urlparse(src_url)
357 username = obj.username or None
357 username = obj.username or None
358 password = obj.password or None
358 password = obj.password or None
359 return username, password, src_url
359 return username, password, src_url
360
360
361 def import_remote_repository(self, wire, src_url):
361 def import_remote_repository(self, wire, src_url):
362 repo_path = wire['path']
362 repo_path = wire['path']
363 if not self.is_path_valid_repository(wire, repo_path):
363 if not self.is_path_valid_repository(wire, repo_path):
364 raise Exception(
364 raise Exception(
365 "Path %s is not a valid Subversion repository." % repo_path)
365 "Path %s is not a valid Subversion repository." % repo_path)
366
366
367 username, password, src_url = self.get_url_and_credentials(src_url)
367 username, password, src_url = self.get_url_and_credentials(src_url)
368 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
368 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
369 '--trust-server-cert-failures=unknown-ca']
369 '--trust-server-cert-failures=unknown-ca']
370 if username and password:
370 if username and password:
371 rdump_cmd += ['--username', username, '--password', password]
371 rdump_cmd += ['--username', username, '--password', password]
372 rdump_cmd += [src_url]
372 rdump_cmd += [src_url]
373
373
374 rdump = subprocess.Popen(
374 rdump = subprocess.Popen(
375 rdump_cmd,
375 rdump_cmd,
376 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
376 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
377 load = subprocess.Popen(
377 load = subprocess.Popen(
378 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
378 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
379
379
380 # TODO: johbo: This can be a very long operation, might be better
380 # TODO: johbo: This can be a very long operation, might be better
381 # to track some kind of status and provide an api to check if the
381 # to track some kind of status and provide an api to check if the
382 # import is done.
382 # import is done.
383 rdump.wait()
383 rdump.wait()
384 load.wait()
384 load.wait()
385
385
386 log.debug('Return process ended with code: %s', rdump.returncode)
386 log.debug('Return process ended with code: %s', rdump.returncode)
387 if rdump.returncode != 0:
387 if rdump.returncode != 0:
388 errors = rdump.stderr.read()
388 errors = rdump.stderr.read()
389 log.error('svnrdump dump failed: statuscode %s: message: %s',
389 log.error('svnrdump dump failed: statuscode %s: message: %s',
390 rdump.returncode, errors)
390 rdump.returncode, errors)
391 reason = 'UNKNOWN'
391 reason = 'UNKNOWN'
392 if 'svnrdump: E230001:' in errors:
392 if 'svnrdump: E230001:' in errors:
393 reason = 'INVALID_CERTIFICATE'
393 reason = 'INVALID_CERTIFICATE'
394
394
395 if reason == 'UNKNOWN':
395 if reason == 'UNKNOWN':
396 reason = 'UNKNOWN:{}'.format(errors)
396 reason = 'UNKNOWN:{}'.format(errors)
397 raise Exception(
397 raise Exception(
398 'Failed to dump the remote repository from %s. Reason:%s' % (
398 'Failed to dump the remote repository from %s. Reason:%s' % (
399 src_url, reason))
399 src_url, reason))
400 if load.returncode != 0:
400 if load.returncode != 0:
401 raise Exception(
401 raise Exception(
402 'Failed to load the dump of remote repository from %s.' %
402 'Failed to load the dump of remote repository from %s.' %
403 (src_url, ))
403 (src_url, ))
404
404
405 def commit(self, wire, message, author, timestamp, updated, removed):
405 def commit(self, wire, message, author, timestamp, updated, removed):
406 assert isinstance(message, str)
406 assert isinstance(message, str)
407 assert isinstance(author, str)
407 assert isinstance(author, str)
408
408
409 repo = self._factory.repo(wire)
409 repo = self._factory.repo(wire)
410 fsobj = svn.repos.fs(repo)
410 fsobj = svn.repos.fs(repo)
411
411
412 rev = svn.fs.youngest_rev(fsobj)
412 rev = svn.fs.youngest_rev(fsobj)
413 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
413 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
414 txn_root = svn.fs.txn_root(txn)
414 txn_root = svn.fs.txn_root(txn)
415
415
416 for node in updated:
416 for node in updated:
417 TxnNodeProcessor(node, txn_root).update()
417 TxnNodeProcessor(node, txn_root).update()
418 for node in removed:
418 for node in removed:
419 TxnNodeProcessor(node, txn_root).remove()
419 TxnNodeProcessor(node, txn_root).remove()
420
420
421 commit_id = svn.repos.fs_commit_txn(repo, txn)
421 commit_id = svn.repos.fs_commit_txn(repo, txn)
422
422
423 if timestamp:
423 if timestamp:
424 apr_time = apr_time_t(timestamp)
424 apr_time = apr_time_t(timestamp)
425 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
425 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
426 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
426 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
427
427
428 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
428 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
429 return commit_id
429 return commit_id
430
430
431 def diff(self, wire, rev1, rev2, path1=None, path2=None,
431 def diff(self, wire, rev1, rev2, path1=None, path2=None,
432 ignore_whitespace=False, context=3):
432 ignore_whitespace=False, context=3):
433
433
434 wire.update(cache=False)
434 wire.update(cache=False)
435 repo = self._factory.repo(wire)
435 repo = self._factory.repo(wire)
436 diff_creator = SvnDiffer(
436 diff_creator = SvnDiffer(
437 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
437 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
438 try:
438 try:
439 return diff_creator.generate_diff()
439 return diff_creator.generate_diff()
440 except svn.core.SubversionException as e:
440 except svn.core.SubversionException as e:
441 log.exception(
441 log.exception(
442 "Error during diff operation operation. "
442 "Error during diff operation operation. "
443 "Path might not exist %s, %s" % (path1, path2))
443 "Path might not exist %s, %s" % (path1, path2))
444 return ""
444 return ""
445
445
446 @reraise_safe_exceptions
446 @reraise_safe_exceptions
447 def is_large_file(self, wire, path):
447 def is_large_file(self, wire, path):
448 return False
448 return False
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def install_hooks(self, wire, force=False):
451 def install_hooks(self, wire, force=False):
452 from vcsserver.hook_utils import install_svn_hooks
452 from vcsserver.hook_utils import install_svn_hooks
453 repo_path = wire['path']
453 repo_path = wire['path']
454 binary_dir = settings.BINARY_DIR
454 binary_dir = settings.BINARY_DIR
455 executable = None
455 executable = None
456 if binary_dir:
456 if binary_dir:
457 executable = os.path.join(binary_dir, 'python')
457 executable = os.path.join(binary_dir, 'python')
458 return install_svn_hooks(
458 return install_svn_hooks(
459 repo_path, executable=executable, force_create=force)
459 repo_path, executable=executable, force_create=force)
460
460
461 @reraise_safe_exceptions
462 def get_hooks_info(self, wire):
463 from vcsserver.hook_utils import (
464 get_svn_pre_hook_version, get_svn_post_hook_version)
465 repo_path = wire['path']
466 return {
467 'pre_version': get_svn_pre_hook_version(repo_path),
468 'post_version': get_svn_post_hook_version(repo_path),
469 }
470
461
471
462 class SvnDiffer(object):
472 class SvnDiffer(object):
463 """
473 """
464 Utility to create diffs based on difflib and the Subversion api
474 Utility to create diffs based on difflib and the Subversion api
465 """
475 """
466
476
467 binary_content = False
477 binary_content = False
468
478
469 def __init__(
479 def __init__(
470 self, repo, src_rev, src_path, tgt_rev, tgt_path,
480 self, repo, src_rev, src_path, tgt_rev, tgt_path,
471 ignore_whitespace, context):
481 ignore_whitespace, context):
472 self.repo = repo
482 self.repo = repo
473 self.ignore_whitespace = ignore_whitespace
483 self.ignore_whitespace = ignore_whitespace
474 self.context = context
484 self.context = context
475
485
476 fsobj = svn.repos.fs(repo)
486 fsobj = svn.repos.fs(repo)
477
487
478 self.tgt_rev = tgt_rev
488 self.tgt_rev = tgt_rev
479 self.tgt_path = tgt_path or ''
489 self.tgt_path = tgt_path or ''
480 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
490 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
481 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
491 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
482
492
483 self.src_rev = src_rev
493 self.src_rev = src_rev
484 self.src_path = src_path or self.tgt_path
494 self.src_path = src_path or self.tgt_path
485 self.src_root = svn.fs.revision_root(fsobj, src_rev)
495 self.src_root = svn.fs.revision_root(fsobj, src_rev)
486 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
496 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
487
497
488 self._validate()
498 self._validate()
489
499
490 def _validate(self):
500 def _validate(self):
491 if (self.tgt_kind != svn.core.svn_node_none and
501 if (self.tgt_kind != svn.core.svn_node_none and
492 self.src_kind != svn.core.svn_node_none and
502 self.src_kind != svn.core.svn_node_none and
493 self.src_kind != self.tgt_kind):
503 self.src_kind != self.tgt_kind):
494 # TODO: johbo: proper error handling
504 # TODO: johbo: proper error handling
495 raise Exception(
505 raise Exception(
496 "Source and target are not compatible for diff generation. "
506 "Source and target are not compatible for diff generation. "
497 "Source type: %s, target type: %s" %
507 "Source type: %s, target type: %s" %
498 (self.src_kind, self.tgt_kind))
508 (self.src_kind, self.tgt_kind))
499
509
500 def generate_diff(self):
510 def generate_diff(self):
501 buf = StringIO.StringIO()
511 buf = StringIO.StringIO()
502 if self.tgt_kind == svn.core.svn_node_dir:
512 if self.tgt_kind == svn.core.svn_node_dir:
503 self._generate_dir_diff(buf)
513 self._generate_dir_diff(buf)
504 else:
514 else:
505 self._generate_file_diff(buf)
515 self._generate_file_diff(buf)
506 return buf.getvalue()
516 return buf.getvalue()
507
517
508 def _generate_dir_diff(self, buf):
518 def _generate_dir_diff(self, buf):
509 editor = DiffChangeEditor()
519 editor = DiffChangeEditor()
510 editor_ptr, editor_baton = svn.delta.make_editor(editor)
520 editor_ptr, editor_baton = svn.delta.make_editor(editor)
511 svn.repos.dir_delta2(
521 svn.repos.dir_delta2(
512 self.src_root,
522 self.src_root,
513 self.src_path,
523 self.src_path,
514 '', # src_entry
524 '', # src_entry
515 self.tgt_root,
525 self.tgt_root,
516 self.tgt_path,
526 self.tgt_path,
517 editor_ptr, editor_baton,
527 editor_ptr, editor_baton,
518 authorization_callback_allow_all,
528 authorization_callback_allow_all,
519 False, # text_deltas
529 False, # text_deltas
520 svn.core.svn_depth_infinity, # depth
530 svn.core.svn_depth_infinity, # depth
521 False, # entry_props
531 False, # entry_props
522 False, # ignore_ancestry
532 False, # ignore_ancestry
523 )
533 )
524
534
525 for path, __, change in sorted(editor.changes):
535 for path, __, change in sorted(editor.changes):
526 self._generate_node_diff(
536 self._generate_node_diff(
527 buf, change, path, self.tgt_path, path, self.src_path)
537 buf, change, path, self.tgt_path, path, self.src_path)
528
538
529 def _generate_file_diff(self, buf):
539 def _generate_file_diff(self, buf):
530 change = None
540 change = None
531 if self.src_kind == svn.core.svn_node_none:
541 if self.src_kind == svn.core.svn_node_none:
532 change = "add"
542 change = "add"
533 elif self.tgt_kind == svn.core.svn_node_none:
543 elif self.tgt_kind == svn.core.svn_node_none:
534 change = "delete"
544 change = "delete"
535 tgt_base, tgt_path = vcspath.split(self.tgt_path)
545 tgt_base, tgt_path = vcspath.split(self.tgt_path)
536 src_base, src_path = vcspath.split(self.src_path)
546 src_base, src_path = vcspath.split(self.src_path)
537 self._generate_node_diff(
547 self._generate_node_diff(
538 buf, change, tgt_path, tgt_base, src_path, src_base)
548 buf, change, tgt_path, tgt_base, src_path, src_base)
539
549
540 def _generate_node_diff(
550 def _generate_node_diff(
541 self, buf, change, tgt_path, tgt_base, src_path, src_base):
551 self, buf, change, tgt_path, tgt_base, src_path, src_base):
542
552
543 if self.src_rev == self.tgt_rev and tgt_base == src_base:
553 if self.src_rev == self.tgt_rev and tgt_base == src_base:
544 # makes consistent behaviour with git/hg to return empty diff if
554 # makes consistent behaviour with git/hg to return empty diff if
545 # we compare same revisions
555 # we compare same revisions
546 return
556 return
547
557
548 tgt_full_path = vcspath.join(tgt_base, tgt_path)
558 tgt_full_path = vcspath.join(tgt_base, tgt_path)
549 src_full_path = vcspath.join(src_base, src_path)
559 src_full_path = vcspath.join(src_base, src_path)
550
560
551 self.binary_content = False
561 self.binary_content = False
552 mime_type = self._get_mime_type(tgt_full_path)
562 mime_type = self._get_mime_type(tgt_full_path)
553
563
554 if mime_type and not mime_type.startswith('text'):
564 if mime_type and not mime_type.startswith('text'):
555 self.binary_content = True
565 self.binary_content = True
556 buf.write("=" * 67 + '\n')
566 buf.write("=" * 67 + '\n')
557 buf.write("Cannot display: file marked as a binary type.\n")
567 buf.write("Cannot display: file marked as a binary type.\n")
558 buf.write("svn:mime-type = %s\n" % mime_type)
568 buf.write("svn:mime-type = %s\n" % mime_type)
559 buf.write("Index: %s\n" % (tgt_path, ))
569 buf.write("Index: %s\n" % (tgt_path, ))
560 buf.write("=" * 67 + '\n')
570 buf.write("=" * 67 + '\n')
561 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
571 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
562 'tgt_path': tgt_path})
572 'tgt_path': tgt_path})
563
573
564 if change == 'add':
574 if change == 'add':
565 # TODO: johbo: SVN is missing a zero here compared to git
575 # TODO: johbo: SVN is missing a zero here compared to git
566 buf.write("new file mode 10644\n")
576 buf.write("new file mode 10644\n")
567
577
568 #TODO(marcink): intro to binary detection of svn patches
578 #TODO(marcink): intro to binary detection of svn patches
569 # if self.binary_content:
579 # if self.binary_content:
570 # buf.write('GIT binary patch\n')
580 # buf.write('GIT binary patch\n')
571
581
572 buf.write("--- /dev/null\t(revision 0)\n")
582 buf.write("--- /dev/null\t(revision 0)\n")
573 src_lines = []
583 src_lines = []
574 else:
584 else:
575 if change == 'delete':
585 if change == 'delete':
576 buf.write("deleted file mode 10644\n")
586 buf.write("deleted file mode 10644\n")
577
587
578 #TODO(marcink): intro to binary detection of svn patches
588 #TODO(marcink): intro to binary detection of svn patches
579 # if self.binary_content:
589 # if self.binary_content:
580 # buf.write('GIT binary patch\n')
590 # buf.write('GIT binary patch\n')
581
591
582 buf.write("--- a/%s\t(revision %s)\n" % (
592 buf.write("--- a/%s\t(revision %s)\n" % (
583 src_path, self.src_rev))
593 src_path, self.src_rev))
584 src_lines = self._svn_readlines(self.src_root, src_full_path)
594 src_lines = self._svn_readlines(self.src_root, src_full_path)
585
595
586 if change == 'delete':
596 if change == 'delete':
587 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
597 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
588 tgt_lines = []
598 tgt_lines = []
589 else:
599 else:
590 buf.write("+++ b/%s\t(revision %s)\n" % (
600 buf.write("+++ b/%s\t(revision %s)\n" % (
591 tgt_path, self.tgt_rev))
601 tgt_path, self.tgt_rev))
592 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
602 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
593
603
594 if not self.binary_content:
604 if not self.binary_content:
595 udiff = svn_diff.unified_diff(
605 udiff = svn_diff.unified_diff(
596 src_lines, tgt_lines, context=self.context,
606 src_lines, tgt_lines, context=self.context,
597 ignore_blank_lines=self.ignore_whitespace,
607 ignore_blank_lines=self.ignore_whitespace,
598 ignore_case=False,
608 ignore_case=False,
599 ignore_space_changes=self.ignore_whitespace)
609 ignore_space_changes=self.ignore_whitespace)
600 buf.writelines(udiff)
610 buf.writelines(udiff)
601
611
602 def _get_mime_type(self, path):
612 def _get_mime_type(self, path):
603 try:
613 try:
604 mime_type = svn.fs.node_prop(
614 mime_type = svn.fs.node_prop(
605 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
615 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
606 except svn.core.SubversionException:
616 except svn.core.SubversionException:
607 mime_type = svn.fs.node_prop(
617 mime_type = svn.fs.node_prop(
608 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
618 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
609 return mime_type
619 return mime_type
610
620
611 def _svn_readlines(self, fs_root, node_path):
621 def _svn_readlines(self, fs_root, node_path):
612 if self.binary_content:
622 if self.binary_content:
613 return []
623 return []
614 node_kind = svn.fs.check_path(fs_root, node_path)
624 node_kind = svn.fs.check_path(fs_root, node_path)
615 if node_kind not in (
625 if node_kind not in (
616 svn.core.svn_node_file, svn.core.svn_node_symlink):
626 svn.core.svn_node_file, svn.core.svn_node_symlink):
617 return []
627 return []
618 content = svn.core.Stream(
628 content = svn.core.Stream(
619 svn.fs.file_contents(fs_root, node_path)).read()
629 svn.fs.file_contents(fs_root, node_path)).read()
620 return content.splitlines(True)
630 return content.splitlines(True)
621
631
622
632
623
633
624 class DiffChangeEditor(svn.delta.Editor):
634 class DiffChangeEditor(svn.delta.Editor):
625 """
635 """
626 Records changes between two given revisions
636 Records changes between two given revisions
627 """
637 """
628
638
629 def __init__(self):
639 def __init__(self):
630 self.changes = []
640 self.changes = []
631
641
632 def delete_entry(self, path, revision, parent_baton, pool=None):
642 def delete_entry(self, path, revision, parent_baton, pool=None):
633 self.changes.append((path, None, 'delete'))
643 self.changes.append((path, None, 'delete'))
634
644
635 def add_file(
645 def add_file(
636 self, path, parent_baton, copyfrom_path, copyfrom_revision,
646 self, path, parent_baton, copyfrom_path, copyfrom_revision,
637 file_pool=None):
647 file_pool=None):
638 self.changes.append((path, 'file', 'add'))
648 self.changes.append((path, 'file', 'add'))
639
649
640 def open_file(self, path, parent_baton, base_revision, file_pool=None):
650 def open_file(self, path, parent_baton, base_revision, file_pool=None):
641 self.changes.append((path, 'file', 'change'))
651 self.changes.append((path, 'file', 'change'))
642
652
643
653
644 def authorization_callback_allow_all(root, path, pool):
654 def authorization_callback_allow_all(root, path, pool):
645 return True
655 return True
646
656
647
657
648 class TxnNodeProcessor(object):
658 class TxnNodeProcessor(object):
649 """
659 """
650 Utility to process the change of one node within a transaction root.
660 Utility to process the change of one node within a transaction root.
651
661
652 It encapsulates the knowledge of how to add, update or remove
662 It encapsulates the knowledge of how to add, update or remove
653 a node for a given transaction root. The purpose is to support the method
663 a node for a given transaction root. The purpose is to support the method
654 `SvnRemote.commit`.
664 `SvnRemote.commit`.
655 """
665 """
656
666
657 def __init__(self, node, txn_root):
667 def __init__(self, node, txn_root):
658 assert isinstance(node['path'], str)
668 assert isinstance(node['path'], str)
659
669
660 self.node = node
670 self.node = node
661 self.txn_root = txn_root
671 self.txn_root = txn_root
662
672
663 def update(self):
673 def update(self):
664 self._ensure_parent_dirs()
674 self._ensure_parent_dirs()
665 self._add_file_if_node_does_not_exist()
675 self._add_file_if_node_does_not_exist()
666 self._update_file_content()
676 self._update_file_content()
667 self._update_file_properties()
677 self._update_file_properties()
668
678
669 def remove(self):
679 def remove(self):
670 svn.fs.delete(self.txn_root, self.node['path'])
680 svn.fs.delete(self.txn_root, self.node['path'])
671 # TODO: Clean up directory if empty
681 # TODO: Clean up directory if empty
672
682
673 def _ensure_parent_dirs(self):
683 def _ensure_parent_dirs(self):
674 curdir = vcspath.dirname(self.node['path'])
684 curdir = vcspath.dirname(self.node['path'])
675 dirs_to_create = []
685 dirs_to_create = []
676 while not self._svn_path_exists(curdir):
686 while not self._svn_path_exists(curdir):
677 dirs_to_create.append(curdir)
687 dirs_to_create.append(curdir)
678 curdir = vcspath.dirname(curdir)
688 curdir = vcspath.dirname(curdir)
679
689
680 for curdir in reversed(dirs_to_create):
690 for curdir in reversed(dirs_to_create):
681 log.debug('Creating missing directory "%s"', curdir)
691 log.debug('Creating missing directory "%s"', curdir)
682 svn.fs.make_dir(self.txn_root, curdir)
692 svn.fs.make_dir(self.txn_root, curdir)
683
693
684 def _svn_path_exists(self, path):
694 def _svn_path_exists(self, path):
685 path_status = svn.fs.check_path(self.txn_root, path)
695 path_status = svn.fs.check_path(self.txn_root, path)
686 return path_status != svn.core.svn_node_none
696 return path_status != svn.core.svn_node_none
687
697
688 def _add_file_if_node_does_not_exist(self):
698 def _add_file_if_node_does_not_exist(self):
689 kind = svn.fs.check_path(self.txn_root, self.node['path'])
699 kind = svn.fs.check_path(self.txn_root, self.node['path'])
690 if kind == svn.core.svn_node_none:
700 if kind == svn.core.svn_node_none:
691 svn.fs.make_file(self.txn_root, self.node['path'])
701 svn.fs.make_file(self.txn_root, self.node['path'])
692
702
693 def _update_file_content(self):
703 def _update_file_content(self):
694 assert isinstance(self.node['content'], str)
704 assert isinstance(self.node['content'], str)
695 handler, baton = svn.fs.apply_textdelta(
705 handler, baton = svn.fs.apply_textdelta(
696 self.txn_root, self.node['path'], None, None)
706 self.txn_root, self.node['path'], None, None)
697 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
707 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
698
708
699 def _update_file_properties(self):
709 def _update_file_properties(self):
700 properties = self.node.get('properties', {})
710 properties = self.node.get('properties', {})
701 for key, value in properties.iteritems():
711 for key, value in properties.iteritems():
702 svn.fs.change_node_prop(
712 svn.fs.change_node_prop(
703 self.txn_root, self.node['path'], key, value)
713 self.txn_root, self.node['path'], key, value)
704
714
705
715
706 def apr_time_t(timestamp):
716 def apr_time_t(timestamp):
707 """
717 """
708 Convert a Python timestamp into APR timestamp type apr_time_t
718 Convert a Python timestamp into APR timestamp type apr_time_t
709 """
719 """
710 return timestamp * 1E6
720 return timestamp * 1E6
711
721
712
722
713 def svn_opt_revision_value_t(num):
723 def svn_opt_revision_value_t(num):
714 """
724 """
715 Put `num` into a `svn_opt_revision_value_t` structure.
725 Put `num` into a `svn_opt_revision_value_t` structure.
716 """
726 """
717 value = svn.core.svn_opt_revision_value_t()
727 value = svn.core.svn_opt_revision_value_t()
718 value.number = num
728 value.number = num
719 revision = svn.core.svn_opt_revision_t()
729 revision = svn.core.svn_opt_revision_t()
720 revision.kind = svn.core.svn_opt_revision_number
730 revision.kind = svn.core.svn_opt_revision_number
721 revision.value = value
731 revision.value = value
722 return revision
732 return revision
General Comments 0
You need to be logged in to leave comments. Login now