##// END OF EJS Templates
largefiles: added handling of detection and fetching of largefiles....
marcink -
r182:5af7fc0d default
parent child Browse files
Show More
@@ -1,589 +1,644 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41 from vcsserver.git_lfs.lib import LFSOidStore
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 except Exception as e:
61 except Exception as e:
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # (KeyError on empty repos), we cannot track this and catch all
63 # (KeyError on empty repos), we cannot track this and catch all
64 # exceptions, it's an exceptions from other handlers
64 # exceptions, it's an exceptions from other handlers
65 #if not hasattr(e, '_vcs_kind'):
65 #if not hasattr(e, '_vcs_kind'):
66 #log.exception("Unhandled exception in git remote call")
66 #log.exception("Unhandled exception in git remote call")
67 #raise_from_original(exceptions.UnhandledException)
67 #raise_from_original(exceptions.UnhandledException)
68 raise
68 raise
69 return wrapper
69 return wrapper
70
70
71
71
72 class Repo(DulwichRepo):
72 class Repo(DulwichRepo):
73 """
73 """
74 A wrapper for dulwich Repo class.
74 A wrapper for dulwich Repo class.
75
75
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 "Too many open files" error. We need to close all opened file descriptors
77 "Too many open files" error. We need to close all opened file descriptors
78 once the repo object is destroyed.
78 once the repo object is destroyed.
79
79
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 to 0.12.0 +
81 to 0.12.0 +
82 """
82 """
83 def __del__(self):
83 def __del__(self):
84 if hasattr(self, 'object_store'):
84 if hasattr(self, 'object_store'):
85 self.close()
85 self.close()
86
86
87
87
88 class GitFactory(RepoFactory):
88 class GitFactory(RepoFactory):
89
89
90 def _create_repo(self, wire, create):
90 def _create_repo(self, wire, create):
91 repo_path = str_to_dulwich(wire['path'])
91 repo_path = str_to_dulwich(wire['path'])
92 return Repo(repo_path)
92 return Repo(repo_path)
93
93
94
94
95 class GitRemote(object):
95 class GitRemote(object):
96
96
97 def __init__(self, factory):
97 def __init__(self, factory):
98 self._factory = factory
98 self._factory = factory
99
99
100 self._bulk_methods = {
100 self._bulk_methods = {
101 "author": self.commit_attribute,
101 "author": self.commit_attribute,
102 "date": self.get_object_attrs,
102 "date": self.get_object_attrs,
103 "message": self.commit_attribute,
103 "message": self.commit_attribute,
104 "parents": self.commit_attribute,
104 "parents": self.commit_attribute,
105 "_commit": self.revision,
105 "_commit": self.revision,
106 }
106 }
107
107
108 def _wire_to_config(self, wire):
109 if 'config' in wire:
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return {}
112
108 def _assign_ref(self, wire, ref, commit_id):
113 def _assign_ref(self, wire, ref, commit_id):
109 repo = self._factory.repo(wire)
114 repo = self._factory.repo(wire)
110 repo[ref] = commit_id
115 repo[ref] = commit_id
111
116
112 @reraise_safe_exceptions
117 @reraise_safe_exceptions
113 def add_object(self, wire, content):
118 def add_object(self, wire, content):
114 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
115 blob = objects.Blob()
120 blob = objects.Blob()
116 blob.set_raw_string(content)
121 blob.set_raw_string(content)
117 repo.object_store.add_object(blob)
122 repo.object_store.add_object(blob)
118 return blob.id
123 return blob.id
119
124
120 @reraise_safe_exceptions
125 @reraise_safe_exceptions
121 def assert_correct_path(self, wire):
126 def assert_correct_path(self, wire):
122 try:
127 try:
123 self._factory.repo(wire)
128 self._factory.repo(wire)
124 except NotGitRepository as e:
129 except NotGitRepository as e:
125 # Exception can contain unicode which we convert
130 # Exception can contain unicode which we convert
126 raise exceptions.AbortException(repr(e))
131 raise exceptions.AbortException(repr(e))
127
132
128 @reraise_safe_exceptions
133 @reraise_safe_exceptions
129 def bare(self, wire):
134 def bare(self, wire):
130 repo = self._factory.repo(wire)
135 repo = self._factory.repo(wire)
131 return repo.bare
136 return repo.bare
132
137
133 @reraise_safe_exceptions
138 @reraise_safe_exceptions
134 def blob_as_pretty_string(self, wire, sha):
139 def blob_as_pretty_string(self, wire, sha):
135 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
136 return repo[sha].as_pretty_string()
141 return repo[sha].as_pretty_string()
137
142
138 @reraise_safe_exceptions
143 @reraise_safe_exceptions
139 def blob_raw_length(self, wire, sha):
144 def blob_raw_length(self, wire, sha):
140 repo = self._factory.repo(wire)
145 repo = self._factory.repo(wire)
141 blob = repo[sha]
146 blob = repo[sha]
142 return blob.raw_length()
147 return blob.raw_length()
143
148
149 def _parse_lfs_pointer(self, raw_content):
150
151 spec_string = 'version https://git-lfs.github.com/spec'
152 if raw_content and raw_content.startswith(spec_string):
153 pattern = re.compile(r"""
154 (?:\n)?
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
158 (?:\n)?
159 """, re.VERBOSE | re.MULTILINE)
160 match = pattern.match(raw_content)
161 if match:
162 return match.groupdict()
163
164 return {}
165
166 @reraise_safe_exceptions
167 def is_large_file(self, wire, sha):
168 repo = self._factory.repo(wire)
169 blob = repo[sha]
170 return self._parse_lfs_pointer(blob.as_raw_string())
171
172 @reraise_safe_exceptions
173 def in_largefiles_store(self, wire, oid):
174 repo = self._factory.repo(wire)
175 conf = self._wire_to_config(wire)
176
177 store_location = conf.get('vcs_git_lfs_store_location')
178 if store_location:
179 repo_name = repo.path
180 store = LFSOidStore(
181 oid=oid, repo=repo_name, store_location=store_location)
182 return store.has_oid()
183
184 return False
185
186 @reraise_safe_exceptions
187 def store_path(self, wire, oid):
188 repo = self._factory.repo(wire)
189 conf = self._wire_to_config(wire)
190
191 store_location = conf.get('vcs_git_lfs_store_location')
192 if store_location:
193 repo_name = repo.path
194 store = LFSOidStore(
195 oid=oid, repo=repo_name, store_location=store_location)
196 return store.oid_path
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198
144 @reraise_safe_exceptions
199 @reraise_safe_exceptions
145 def bulk_request(self, wire, rev, pre_load):
200 def bulk_request(self, wire, rev, pre_load):
146 result = {}
201 result = {}
147 for attr in pre_load:
202 for attr in pre_load:
148 try:
203 try:
149 method = self._bulk_methods[attr]
204 method = self._bulk_methods[attr]
150 args = [wire, rev]
205 args = [wire, rev]
151 if attr == "date":
206 if attr == "date":
152 args.extend(["commit_time", "commit_timezone"])
207 args.extend(["commit_time", "commit_timezone"])
153 elif attr in ["author", "message", "parents"]:
208 elif attr in ["author", "message", "parents"]:
154 args.append(attr)
209 args.append(attr)
155 result[attr] = method(*args)
210 result[attr] = method(*args)
156 except KeyError:
211 except KeyError:
157 raise exceptions.VcsException(
212 raise exceptions.VcsException(
158 "Unknown bulk attribute: %s" % attr)
213 "Unknown bulk attribute: %s" % attr)
159 return result
214 return result
160
215
161 def _build_opener(self, url):
216 def _build_opener(self, url):
162 handlers = []
217 handlers = []
163 url_obj = url_parser(url)
218 url_obj = url_parser(url)
164 _, authinfo = url_obj.authinfo()
219 _, authinfo = url_obj.authinfo()
165
220
166 if authinfo:
221 if authinfo:
167 # create a password manager
222 # create a password manager
168 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
169 passmgr.add_password(*authinfo)
224 passmgr.add_password(*authinfo)
170
225
171 handlers.extend((httpbasicauthhandler(passmgr),
226 handlers.extend((httpbasicauthhandler(passmgr),
172 httpdigestauthhandler(passmgr)))
227 httpdigestauthhandler(passmgr)))
173
228
174 return urllib2.build_opener(*handlers)
229 return urllib2.build_opener(*handlers)
175
230
176 @reraise_safe_exceptions
231 @reraise_safe_exceptions
177 def check_url(self, url, config):
232 def check_url(self, url, config):
178 url_obj = url_parser(url)
233 url_obj = url_parser(url)
179 test_uri, _ = url_obj.authinfo()
234 test_uri, _ = url_obj.authinfo()
180 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
181 url_obj.query = obfuscate_qs(url_obj.query)
236 url_obj.query = obfuscate_qs(url_obj.query)
182 cleaned_uri = str(url_obj)
237 cleaned_uri = str(url_obj)
183 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
184
239
185 if not test_uri.endswith('info/refs'):
240 if not test_uri.endswith('info/refs'):
186 test_uri = test_uri.rstrip('/') + '/info/refs'
241 test_uri = test_uri.rstrip('/') + '/info/refs'
187
242
188 o = self._build_opener(url)
243 o = self._build_opener(url)
189 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
190
245
191 q = {"service": 'git-upload-pack'}
246 q = {"service": 'git-upload-pack'}
192 qs = '?%s' % urllib.urlencode(q)
247 qs = '?%s' % urllib.urlencode(q)
193 cu = "%s%s" % (test_uri, qs)
248 cu = "%s%s" % (test_uri, qs)
194 req = urllib2.Request(cu, None, {})
249 req = urllib2.Request(cu, None, {})
195
250
196 try:
251 try:
197 log.debug("Trying to open URL %s", cleaned_uri)
252 log.debug("Trying to open URL %s", cleaned_uri)
198 resp = o.open(req)
253 resp = o.open(req)
199 if resp.code != 200:
254 if resp.code != 200:
200 raise exceptions.URLError('Return Code is not 200')
255 raise exceptions.URLError('Return Code is not 200')
201 except Exception as e:
256 except Exception as e:
202 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
203 # means it cannot be cloned
258 # means it cannot be cloned
204 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
205
260
206 # now detect if it's proper git repo
261 # now detect if it's proper git repo
207 gitdata = resp.read()
262 gitdata = resp.read()
208 if 'service=git-upload-pack' in gitdata:
263 if 'service=git-upload-pack' in gitdata:
209 pass
264 pass
210 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
211 # old style git can return some other format !
266 # old style git can return some other format !
212 pass
267 pass
213 else:
268 else:
214 raise exceptions.URLError(
269 raise exceptions.URLError(
215 "url [%s] does not look like an git" % (cleaned_uri,))
270 "url [%s] does not look like an git" % (cleaned_uri,))
216
271
217 return True
272 return True
218
273
219 @reraise_safe_exceptions
274 @reraise_safe_exceptions
220 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
221 remote_refs = self.fetch(wire, url, apply_refs=False)
276 remote_refs = self.fetch(wire, url, apply_refs=False)
222 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
223 if isinstance(valid_refs, list):
278 if isinstance(valid_refs, list):
224 valid_refs = tuple(valid_refs)
279 valid_refs = tuple(valid_refs)
225
280
226 for k in remote_refs:
281 for k in remote_refs:
227 # only parse heads/tags and skip so called deferred tags
282 # only parse heads/tags and skip so called deferred tags
228 if k.startswith(valid_refs) and not k.endswith(deferred):
283 if k.startswith(valid_refs) and not k.endswith(deferred):
229 repo[k] = remote_refs[k]
284 repo[k] = remote_refs[k]
230
285
231 if update_after_clone:
286 if update_after_clone:
232 # we want to checkout HEAD
287 # we want to checkout HEAD
233 repo["HEAD"] = remote_refs["HEAD"]
288 repo["HEAD"] = remote_refs["HEAD"]
234 index.build_index_from_tree(repo.path, repo.index_path(),
289 index.build_index_from_tree(repo.path, repo.index_path(),
235 repo.object_store, repo["HEAD"].tree)
290 repo.object_store, repo["HEAD"].tree)
236
291
237 # TODO: this is quite complex, check if that can be simplified
292 # TODO: this is quite complex, check if that can be simplified
238 @reraise_safe_exceptions
293 @reraise_safe_exceptions
239 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
240 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
241 object_store = repo.object_store
296 object_store = repo.object_store
242
297
243 # Create tree and populates it with blobs
298 # Create tree and populates it with blobs
244 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
245
300
246 for node in updated:
301 for node in updated:
247 # Compute subdirs if needed
302 # Compute subdirs if needed
248 dirpath, nodename = vcspath.split(node['path'])
303 dirpath, nodename = vcspath.split(node['path'])
249 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
250 parent = commit_tree
305 parent = commit_tree
251 ancestors = [('', parent)]
306 ancestors = [('', parent)]
252
307
253 # Tries to dig for the deepest existing tree
308 # Tries to dig for the deepest existing tree
254 while dirnames:
309 while dirnames:
255 curdir = dirnames.pop(0)
310 curdir = dirnames.pop(0)
256 try:
311 try:
257 dir_id = parent[curdir][1]
312 dir_id = parent[curdir][1]
258 except KeyError:
313 except KeyError:
259 # put curdir back into dirnames and stops
314 # put curdir back into dirnames and stops
260 dirnames.insert(0, curdir)
315 dirnames.insert(0, curdir)
261 break
316 break
262 else:
317 else:
263 # If found, updates parent
318 # If found, updates parent
264 parent = repo[dir_id]
319 parent = repo[dir_id]
265 ancestors.append((curdir, parent))
320 ancestors.append((curdir, parent))
266 # Now parent is deepest existing tree and we need to create
321 # Now parent is deepest existing tree and we need to create
267 # subtrees for dirnames (in reverse order)
322 # subtrees for dirnames (in reverse order)
268 # [this only applies for nodes from added]
323 # [this only applies for nodes from added]
269 new_trees = []
324 new_trees = []
270
325
271 blob = objects.Blob.from_string(node['content'])
326 blob = objects.Blob.from_string(node['content'])
272
327
273 if dirnames:
328 if dirnames:
274 # If there are trees which should be created we need to build
329 # If there are trees which should be created we need to build
275 # them now (in reverse order)
330 # them now (in reverse order)
276 reversed_dirnames = list(reversed(dirnames))
331 reversed_dirnames = list(reversed(dirnames))
277 curtree = objects.Tree()
332 curtree = objects.Tree()
278 curtree[node['node_path']] = node['mode'], blob.id
333 curtree[node['node_path']] = node['mode'], blob.id
279 new_trees.append(curtree)
334 new_trees.append(curtree)
280 for dirname in reversed_dirnames[:-1]:
335 for dirname in reversed_dirnames[:-1]:
281 newtree = objects.Tree()
336 newtree = objects.Tree()
282 newtree[dirname] = (DIR_STAT, curtree.id)
337 newtree[dirname] = (DIR_STAT, curtree.id)
283 new_trees.append(newtree)
338 new_trees.append(newtree)
284 curtree = newtree
339 curtree = newtree
285 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
286 else:
341 else:
287 parent.add(
342 parent.add(
288 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
289
344
290 new_trees.append(parent)
345 new_trees.append(parent)
291 # Update ancestors
346 # Update ancestors
292 reversed_ancestors = reversed(
347 reversed_ancestors = reversed(
293 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
294 for parent, tree, path in reversed_ancestors:
349 for parent, tree, path in reversed_ancestors:
295 parent[path] = (DIR_STAT, tree.id)
350 parent[path] = (DIR_STAT, tree.id)
296 object_store.add_object(tree)
351 object_store.add_object(tree)
297
352
298 object_store.add_object(blob)
353 object_store.add_object(blob)
299 for tree in new_trees:
354 for tree in new_trees:
300 object_store.add_object(tree)
355 object_store.add_object(tree)
301
356
302 for node_path in removed:
357 for node_path in removed:
303 paths = node_path.split('/')
358 paths = node_path.split('/')
304 tree = commit_tree
359 tree = commit_tree
305 trees = [tree]
360 trees = [tree]
306 # Traverse deep into the forest...
361 # Traverse deep into the forest...
307 for path in paths:
362 for path in paths:
308 try:
363 try:
309 obj = repo[tree[path][1]]
364 obj = repo[tree[path][1]]
310 if isinstance(obj, objects.Tree):
365 if isinstance(obj, objects.Tree):
311 trees.append(obj)
366 trees.append(obj)
312 tree = obj
367 tree = obj
313 except KeyError:
368 except KeyError:
314 break
369 break
315 # Cut down the blob and all rotten trees on the way back...
370 # Cut down the blob and all rotten trees on the way back...
316 for path, tree in reversed(zip(paths, trees)):
371 for path, tree in reversed(zip(paths, trees)):
317 del tree[path]
372 del tree[path]
318 if tree:
373 if tree:
319 # This tree still has elements - don't remove it or any
374 # This tree still has elements - don't remove it or any
320 # of it's parents
375 # of it's parents
321 break
376 break
322
377
323 object_store.add_object(commit_tree)
378 object_store.add_object(commit_tree)
324
379
325 # Create commit
380 # Create commit
326 commit = objects.Commit()
381 commit = objects.Commit()
327 commit.tree = commit_tree.id
382 commit.tree = commit_tree.id
328 for k, v in commit_data.iteritems():
383 for k, v in commit_data.iteritems():
329 setattr(commit, k, v)
384 setattr(commit, k, v)
330 object_store.add_object(commit)
385 object_store.add_object(commit)
331
386
332 ref = 'refs/heads/%s' % branch
387 ref = 'refs/heads/%s' % branch
333 repo.refs[ref] = commit.id
388 repo.refs[ref] = commit.id
334
389
335 return commit.id
390 return commit.id
336
391
337 @reraise_safe_exceptions
392 @reraise_safe_exceptions
338 def fetch(self, wire, url, apply_refs=True, refs=None):
393 def fetch(self, wire, url, apply_refs=True, refs=None):
339 if url != 'default' and '://' not in url:
394 if url != 'default' and '://' not in url:
340 client = LocalGitClient(url)
395 client = LocalGitClient(url)
341 else:
396 else:
342 url_obj = url_parser(url)
397 url_obj = url_parser(url)
343 o = self._build_opener(url)
398 o = self._build_opener(url)
344 url, _ = url_obj.authinfo()
399 url, _ = url_obj.authinfo()
345 client = HttpGitClient(base_url=url, opener=o)
400 client = HttpGitClient(base_url=url, opener=o)
346 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
347
402
348 determine_wants = repo.object_store.determine_wants_all
403 determine_wants = repo.object_store.determine_wants_all
349 if refs:
404 if refs:
350 def determine_wants_requested(references):
405 def determine_wants_requested(references):
351 return [references[r] for r in references if r in refs]
406 return [references[r] for r in references if r in refs]
352 determine_wants = determine_wants_requested
407 determine_wants = determine_wants_requested
353
408
354 try:
409 try:
355 remote_refs = client.fetch(
410 remote_refs = client.fetch(
356 path=url, target=repo, determine_wants=determine_wants)
411 path=url, target=repo, determine_wants=determine_wants)
357 except NotGitRepository as e:
412 except NotGitRepository as e:
358 log.warning(
413 log.warning(
359 'Trying to fetch from "%s" failed, not a Git repository.', url)
414 'Trying to fetch from "%s" failed, not a Git repository.', url)
360 # Exception can contain unicode which we convert
415 # Exception can contain unicode which we convert
361 raise exceptions.AbortException(repr(e))
416 raise exceptions.AbortException(repr(e))
362
417
363 # mikhail: client.fetch() returns all the remote refs, but fetches only
418 # mikhail: client.fetch() returns all the remote refs, but fetches only
364 # refs filtered by `determine_wants` function. We need to filter result
419 # refs filtered by `determine_wants` function. We need to filter result
365 # as well
420 # as well
366 if refs:
421 if refs:
367 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
368
423
369 if apply_refs:
424 if apply_refs:
370 # TODO: johbo: Needs proper test coverage with a git repository
425 # TODO: johbo: Needs proper test coverage with a git repository
371 # that contains a tag object, so that we would end up with
426 # that contains a tag object, so that we would end up with
372 # a peeled ref at this point.
427 # a peeled ref at this point.
373 PEELED_REF_MARKER = '^{}'
428 PEELED_REF_MARKER = '^{}'
374 for k in remote_refs:
429 for k in remote_refs:
375 if k.endswith(PEELED_REF_MARKER):
430 if k.endswith(PEELED_REF_MARKER):
376 log.info("Skipping peeled reference %s", k)
431 log.info("Skipping peeled reference %s", k)
377 continue
432 continue
378 repo[k] = remote_refs[k]
433 repo[k] = remote_refs[k]
379
434
380 if refs:
435 if refs:
381 # mikhail: explicitly set the head to the last ref.
436 # mikhail: explicitly set the head to the last ref.
382 repo['HEAD'] = remote_refs[refs[-1]]
437 repo['HEAD'] = remote_refs[refs[-1]]
383
438
384 # TODO: mikhail: should we return remote_refs here to be
439 # TODO: mikhail: should we return remote_refs here to be
385 # consistent?
440 # consistent?
386 else:
441 else:
387 return remote_refs
442 return remote_refs
388
443
389 @reraise_safe_exceptions
444 @reraise_safe_exceptions
390 def get_remote_refs(self, wire, url):
445 def get_remote_refs(self, wire, url):
391 repo = Repo(url)
446 repo = Repo(url)
392 return repo.get_refs()
447 return repo.get_refs()
393
448
394 @reraise_safe_exceptions
449 @reraise_safe_exceptions
395 def get_description(self, wire):
450 def get_description(self, wire):
396 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
397 return repo.get_description()
452 return repo.get_description()
398
453
399 @reraise_safe_exceptions
454 @reraise_safe_exceptions
400 def get_file_history(self, wire, file_path, commit_id, limit):
455 def get_file_history(self, wire, file_path, commit_id, limit):
401 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
402 include = [commit_id]
457 include = [commit_id]
403 paths = [file_path]
458 paths = [file_path]
404
459
405 walker = repo.get_walker(include, paths=paths, max_entries=limit)
460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
406 return [x.commit.id for x in walker]
461 return [x.commit.id for x in walker]
407
462
408 @reraise_safe_exceptions
463 @reraise_safe_exceptions
409 def get_missing_revs(self, wire, rev1, rev2, path2):
464 def get_missing_revs(self, wire, rev1, rev2, path2):
410 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
411 LocalGitClient(thin_packs=False).fetch(path2, repo)
466 LocalGitClient(thin_packs=False).fetch(path2, repo)
412
467
413 wire_remote = wire.copy()
468 wire_remote = wire.copy()
414 wire_remote['path'] = path2
469 wire_remote['path'] = path2
415 repo_remote = self._factory.repo(wire_remote)
470 repo_remote = self._factory.repo(wire_remote)
416 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
417
472
418 revs = [
473 revs = [
419 x.commit.id
474 x.commit.id
420 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
421 return revs
476 return revs
422
477
423 @reraise_safe_exceptions
478 @reraise_safe_exceptions
424 def get_object(self, wire, sha):
479 def get_object(self, wire, sha):
425 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
426 obj = repo.get_object(sha)
481 obj = repo.get_object(sha)
427 commit_id = obj.id
482 commit_id = obj.id
428
483
429 if isinstance(obj, Tag):
484 if isinstance(obj, Tag):
430 commit_id = obj.object[1]
485 commit_id = obj.object[1]
431
486
432 return {
487 return {
433 'id': obj.id,
488 'id': obj.id,
434 'type': obj.type_name,
489 'type': obj.type_name,
435 'commit_id': commit_id
490 'commit_id': commit_id
436 }
491 }
437
492
438 @reraise_safe_exceptions
493 @reraise_safe_exceptions
439 def get_object_attrs(self, wire, sha, *attrs):
494 def get_object_attrs(self, wire, sha, *attrs):
440 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
441 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
442 return list(getattr(obj, a) for a in attrs)
497 return list(getattr(obj, a) for a in attrs)
443
498
444 @reraise_safe_exceptions
499 @reraise_safe_exceptions
445 def get_refs(self, wire):
500 def get_refs(self, wire):
446 repo = self._factory.repo(wire)
501 repo = self._factory.repo(wire)
447 result = {}
502 result = {}
448 for ref, sha in repo.refs.as_dict().items():
503 for ref, sha in repo.refs.as_dict().items():
449 peeled_sha = repo.get_peeled(ref)
504 peeled_sha = repo.get_peeled(ref)
450 result[ref] = peeled_sha
505 result[ref] = peeled_sha
451 return result
506 return result
452
507
453 @reraise_safe_exceptions
508 @reraise_safe_exceptions
454 def get_refs_path(self, wire):
509 def get_refs_path(self, wire):
455 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
456 return repo.refs.path
511 return repo.refs.path
457
512
458 @reraise_safe_exceptions
513 @reraise_safe_exceptions
459 def head(self, wire):
514 def head(self, wire):
460 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
461 return repo.head()
516 return repo.head()
462
517
463 @reraise_safe_exceptions
518 @reraise_safe_exceptions
464 def init(self, wire):
519 def init(self, wire):
465 repo_path = str_to_dulwich(wire['path'])
520 repo_path = str_to_dulwich(wire['path'])
466 self.repo = Repo.init(repo_path)
521 self.repo = Repo.init(repo_path)
467
522
468 @reraise_safe_exceptions
523 @reraise_safe_exceptions
469 def init_bare(self, wire):
524 def init_bare(self, wire):
470 repo_path = str_to_dulwich(wire['path'])
525 repo_path = str_to_dulwich(wire['path'])
471 self.repo = Repo.init_bare(repo_path)
526 self.repo = Repo.init_bare(repo_path)
472
527
473 @reraise_safe_exceptions
528 @reraise_safe_exceptions
474 def revision(self, wire, rev):
529 def revision(self, wire, rev):
475 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
476 obj = repo[rev]
531 obj = repo[rev]
477 obj_data = {
532 obj_data = {
478 'id': obj.id,
533 'id': obj.id,
479 }
534 }
480 try:
535 try:
481 obj_data['tree'] = obj.tree
536 obj_data['tree'] = obj.tree
482 except AttributeError:
537 except AttributeError:
483 pass
538 pass
484 return obj_data
539 return obj_data
485
540
486 @reraise_safe_exceptions
541 @reraise_safe_exceptions
487 def commit_attribute(self, wire, rev, attr):
542 def commit_attribute(self, wire, rev, attr):
488 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
489 obj = repo[rev]
544 obj = repo[rev]
490 return getattr(obj, attr)
545 return getattr(obj, attr)
491
546
492 @reraise_safe_exceptions
547 @reraise_safe_exceptions
493 def set_refs(self, wire, key, value):
548 def set_refs(self, wire, key, value):
494 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
495 repo.refs[key] = value
550 repo.refs[key] = value
496
551
497 @reraise_safe_exceptions
552 @reraise_safe_exceptions
498 def remove_ref(self, wire, key):
553 def remove_ref(self, wire, key):
499 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
500 del repo.refs[key]
555 del repo.refs[key]
501
556
502 @reraise_safe_exceptions
557 @reraise_safe_exceptions
503 def tree_changes(self, wire, source_id, target_id):
558 def tree_changes(self, wire, source_id, target_id):
504 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
505 source = repo[source_id].tree if source_id else None
560 source = repo[source_id].tree if source_id else None
506 target = repo[target_id].tree
561 target = repo[target_id].tree
507 result = repo.object_store.tree_changes(source, target)
562 result = repo.object_store.tree_changes(source, target)
508 return list(result)
563 return list(result)
509
564
510 @reraise_safe_exceptions
565 @reraise_safe_exceptions
511 def tree_items(self, wire, tree_id):
566 def tree_items(self, wire, tree_id):
512 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
513 tree = repo[tree_id]
568 tree = repo[tree_id]
514
569
515 result = []
570 result = []
516 for item in tree.iteritems():
571 for item in tree.iteritems():
517 item_sha = item.sha
572 item_sha = item.sha
518 item_mode = item.mode
573 item_mode = item.mode
519
574
520 if FILE_MODE(item_mode) == GIT_LINK:
575 if FILE_MODE(item_mode) == GIT_LINK:
521 item_type = "link"
576 item_type = "link"
522 else:
577 else:
523 item_type = repo[item_sha].type_name
578 item_type = repo[item_sha].type_name
524
579
525 result.append((item.path, item_mode, item_sha, item_type))
580 result.append((item.path, item_mode, item_sha, item_type))
526 return result
581 return result
527
582
528 @reraise_safe_exceptions
583 @reraise_safe_exceptions
529 def update_server_info(self, wire):
584 def update_server_info(self, wire):
530 repo = self._factory.repo(wire)
585 repo = self._factory.repo(wire)
531 update_server_info(repo)
586 update_server_info(repo)
532
587
533 @reraise_safe_exceptions
588 @reraise_safe_exceptions
534 def discover_git_version(self):
589 def discover_git_version(self):
535 stdout, _ = self.run_git_command(
590 stdout, _ = self.run_git_command(
536 {}, ['--version'], _bare=True, _safe=True)
591 {}, ['--version'], _bare=True, _safe=True)
537 prefix = 'git version'
592 prefix = 'git version'
538 if stdout.startswith(prefix):
593 if stdout.startswith(prefix):
539 stdout = stdout[len(prefix):]
594 stdout = stdout[len(prefix):]
540 return stdout.strip()
595 return stdout.strip()
541
596
542 @reraise_safe_exceptions
597 @reraise_safe_exceptions
543 def run_git_command(self, wire, cmd, **opts):
598 def run_git_command(self, wire, cmd, **opts):
544 path = wire.get('path', None)
599 path = wire.get('path', None)
545
600
546 if path and os.path.isdir(path):
601 if path and os.path.isdir(path):
547 opts['cwd'] = path
602 opts['cwd'] = path
548
603
549 if '_bare' in opts:
604 if '_bare' in opts:
550 _copts = []
605 _copts = []
551 del opts['_bare']
606 del opts['_bare']
552 else:
607 else:
553 _copts = ['-c', 'core.quotepath=false', ]
608 _copts = ['-c', 'core.quotepath=false', ]
554 safe_call = False
609 safe_call = False
555 if '_safe' in opts:
610 if '_safe' in opts:
556 # no exc on failure
611 # no exc on failure
557 del opts['_safe']
612 del opts['_safe']
558 safe_call = True
613 safe_call = True
559
614
560 gitenv = os.environ.copy()
615 gitenv = os.environ.copy()
561 gitenv.update(opts.pop('extra_env', {}))
616 gitenv.update(opts.pop('extra_env', {}))
562 # need to clean fix GIT_DIR !
617 # need to clean fix GIT_DIR !
563 if 'GIT_DIR' in gitenv:
618 if 'GIT_DIR' in gitenv:
564 del gitenv['GIT_DIR']
619 del gitenv['GIT_DIR']
565 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
566
621
567 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
568
623
569 try:
624 try:
570 _opts = {'env': gitenv, 'shell': False}
625 _opts = {'env': gitenv, 'shell': False}
571 _opts.update(opts)
626 _opts.update(opts)
572 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
573
628
574 return ''.join(p), ''.join(p.error)
629 return ''.join(p), ''.join(p.error)
575 except (EnvironmentError, OSError) as err:
630 except (EnvironmentError, OSError) as err:
576 tb_err = ("Couldn't run git command (%s).\n"
631 tb_err = ("Couldn't run git command (%s).\n"
577 "Original error was:%s\n" % (cmd, err))
632 "Original error was:%s\n" % (cmd, err))
578 log.exception(tb_err)
633 log.exception(tb_err)
579 if safe_call:
634 if safe_call:
580 return '', err
635 return '', err
581 else:
636 else:
582 raise exceptions.VcsException(tb_err)
637 raise exceptions.VcsException(tb_err)
583
638
584
639
585 def str_to_dulwich(value):
640 def str_to_dulwich(value):
586 """
641 """
587 Dulwich 0.10.1a requires `unicode` objects to be passed in.
642 Dulwich 0.10.1a requires `unicode` objects to be passed in.
588 """
643 """
589 return value.decode(settings.WIRE_ENCODING)
644 return value.decode(settings.WIRE_ENCODING)
@@ -1,727 +1,727 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
37 InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 class MercurialFactory(RepoFactory):
94 class MercurialFactory(RepoFactory):
95
95
96 def _create_config(self, config, hooks=True):
96 def _create_config(self, config, hooks=True):
97 if not hooks:
97 if not hooks:
98 hooks_to_clean = frozenset((
98 hooks_to_clean = frozenset((
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 new_config = []
101 new_config = []
102 for section, option, value in config:
102 for section, option, value in config:
103 if section == 'hooks' and option in hooks_to_clean:
103 if section == 'hooks' and option in hooks_to_clean:
104 continue
104 continue
105 new_config.append((section, option, value))
105 new_config.append((section, option, value))
106 config = new_config
106 config = new_config
107
107
108 baseui = make_ui_from_config(config)
108 baseui = make_ui_from_config(config)
109 return baseui
109 return baseui
110
110
111 def _create_repo(self, wire, create):
111 def _create_repo(self, wire, create):
112 baseui = self._create_config(wire["config"])
112 baseui = self._create_config(wire["config"])
113 return localrepository(baseui, wire["path"], create)
113 return localrepository(baseui, wire["path"], create)
114
114
115
115
116 class HgRemote(object):
116 class HgRemote(object):
117
117
118 def __init__(self, factory):
118 def __init__(self, factory):
119 self._factory = factory
119 self._factory = factory
120
120
121 self._bulk_methods = {
121 self._bulk_methods = {
122 "affected_files": self.ctx_files,
122 "affected_files": self.ctx_files,
123 "author": self.ctx_user,
123 "author": self.ctx_user,
124 "branch": self.ctx_branch,
124 "branch": self.ctx_branch,
125 "children": self.ctx_children,
125 "children": self.ctx_children,
126 "date": self.ctx_date,
126 "date": self.ctx_date,
127 "message": self.ctx_description,
127 "message": self.ctx_description,
128 "parents": self.ctx_parents,
128 "parents": self.ctx_parents,
129 "status": self.ctx_status,
129 "status": self.ctx_status,
130 "_file_paths": self.ctx_list,
130 "_file_paths": self.ctx_list,
131 }
131 }
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def discover_hg_version(self):
134 def discover_hg_version(self):
135 from mercurial import util
135 from mercurial import util
136 return util.version()
136 return util.version()
137
137
138 @reraise_safe_exceptions
138 @reraise_safe_exceptions
139 def archive_repo(self, archive_path, mtime, file_info, kind):
139 def archive_repo(self, archive_path, mtime, file_info, kind):
140 if kind == "tgz":
140 if kind == "tgz":
141 archiver = archival.tarit(archive_path, mtime, "gz")
141 archiver = archival.tarit(archive_path, mtime, "gz")
142 elif kind == "tbz2":
142 elif kind == "tbz2":
143 archiver = archival.tarit(archive_path, mtime, "bz2")
143 archiver = archival.tarit(archive_path, mtime, "bz2")
144 elif kind == 'zip':
144 elif kind == 'zip':
145 archiver = archival.zipit(archive_path, mtime)
145 archiver = archival.zipit(archive_path, mtime)
146 else:
146 else:
147 raise exceptions.ArchiveException(
147 raise exceptions.ArchiveException(
148 'Remote does not support: "%s".' % kind)
148 'Remote does not support: "%s".' % kind)
149
149
150 for f_path, f_mode, f_is_link, f_content in file_info:
150 for f_path, f_mode, f_is_link, f_content in file_info:
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
152 archiver.done()
152 archiver.done()
153
153
154 @reraise_safe_exceptions
154 @reraise_safe_exceptions
155 def bookmarks(self, wire):
155 def bookmarks(self, wire):
156 repo = self._factory.repo(wire)
156 repo = self._factory.repo(wire)
157 return dict(repo._bookmarks)
157 return dict(repo._bookmarks)
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def branches(self, wire, normal, closed):
160 def branches(self, wire, normal, closed):
161 repo = self._factory.repo(wire)
161 repo = self._factory.repo(wire)
162 iter_branches = repo.branchmap().iterbranches()
162 iter_branches = repo.branchmap().iterbranches()
163 bt = {}
163 bt = {}
164 for branch_name, _heads, tip, is_closed in iter_branches:
164 for branch_name, _heads, tip, is_closed in iter_branches:
165 if normal and not is_closed:
165 if normal and not is_closed:
166 bt[branch_name] = tip
166 bt[branch_name] = tip
167 if closed and is_closed:
167 if closed and is_closed:
168 bt[branch_name] = tip
168 bt[branch_name] = tip
169
169
170 return bt
170 return bt
171
171
172 @reraise_safe_exceptions
172 @reraise_safe_exceptions
173 def bulk_request(self, wire, rev, pre_load):
173 def bulk_request(self, wire, rev, pre_load):
174 result = {}
174 result = {}
175 for attr in pre_load:
175 for attr in pre_load:
176 try:
176 try:
177 method = self._bulk_methods[attr]
177 method = self._bulk_methods[attr]
178 result[attr] = method(wire, rev)
178 result[attr] = method(wire, rev)
179 except KeyError:
179 except KeyError:
180 raise exceptions.VcsException(
180 raise exceptions.VcsException(
181 'Unknown bulk attribute: "%s"' % attr)
181 'Unknown bulk attribute: "%s"' % attr)
182 return result
182 return result
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
188
188
189 @reraise_safe_exceptions
189 @reraise_safe_exceptions
190 def commitctx(
190 def commitctx(
191 self, wire, message, parents, commit_time, commit_timezone,
191 self, wire, message, parents, commit_time, commit_timezone,
192 user, files, extra, removed, updated):
192 user, files, extra, removed, updated):
193
193
194 def _filectxfn(_repo, memctx, path):
194 def _filectxfn(_repo, memctx, path):
195 """
195 """
196 Marks given path as added/changed/removed in a given _repo. This is
196 Marks given path as added/changed/removed in a given _repo. This is
197 for internal mercurial commit function.
197 for internal mercurial commit function.
198 """
198 """
199
199
200 # check if this path is removed
200 # check if this path is removed
201 if path in removed:
201 if path in removed:
202 # returning None is a way to mark node for removal
202 # returning None is a way to mark node for removal
203 return None
203 return None
204
204
205 # check if this path is added
205 # check if this path is added
206 for node in updated:
206 for node in updated:
207 if node['path'] == path:
207 if node['path'] == path:
208 return memfilectx(
208 return memfilectx(
209 _repo,
209 _repo,
210 path=node['path'],
210 path=node['path'],
211 data=node['content'],
211 data=node['content'],
212 islink=False,
212 islink=False,
213 isexec=bool(node['mode'] & stat.S_IXUSR),
213 isexec=bool(node['mode'] & stat.S_IXUSR),
214 copied=False,
214 copied=False,
215 memctx=memctx)
215 memctx=memctx)
216
216
217 raise exceptions.AbortException(
217 raise exceptions.AbortException(
218 "Given path haven't been marked as added, "
218 "Given path haven't been marked as added, "
219 "changed or removed (%s)" % path)
219 "changed or removed (%s)" % path)
220
220
221 repo = self._factory.repo(wire)
221 repo = self._factory.repo(wire)
222
222
223 commit_ctx = memctx(
223 commit_ctx = memctx(
224 repo=repo,
224 repo=repo,
225 parents=parents,
225 parents=parents,
226 text=message,
226 text=message,
227 files=files,
227 files=files,
228 filectxfn=_filectxfn,
228 filectxfn=_filectxfn,
229 user=user,
229 user=user,
230 date=(commit_time, commit_timezone),
230 date=(commit_time, commit_timezone),
231 extra=extra)
231 extra=extra)
232
232
233 n = repo.commitctx(commit_ctx)
233 n = repo.commitctx(commit_ctx)
234 new_id = hex(n)
234 new_id = hex(n)
235
235
236 return new_id
236 return new_id
237
237
238 @reraise_safe_exceptions
238 @reraise_safe_exceptions
239 def ctx_branch(self, wire, revision):
239 def ctx_branch(self, wire, revision):
240 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
241 ctx = repo[revision]
241 ctx = repo[revision]
242 return ctx.branch()
242 return ctx.branch()
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def ctx_children(self, wire, revision):
245 def ctx_children(self, wire, revision):
246 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
247 ctx = repo[revision]
247 ctx = repo[revision]
248 return [child.rev() for child in ctx.children()]
248 return [child.rev() for child in ctx.children()]
249
249
250 @reraise_safe_exceptions
250 @reraise_safe_exceptions
251 def ctx_date(self, wire, revision):
251 def ctx_date(self, wire, revision):
252 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
253 ctx = repo[revision]
253 ctx = repo[revision]
254 return ctx.date()
254 return ctx.date()
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def ctx_description(self, wire, revision):
257 def ctx_description(self, wire, revision):
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 ctx = repo[revision]
259 ctx = repo[revision]
260 return ctx.description()
260 return ctx.description()
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def ctx_diff(
263 def ctx_diff(
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
265 repo = self._factory.repo(wire)
265 repo = self._factory.repo(wire)
266 ctx = repo[revision]
266 ctx = repo[revision]
267 result = ctx.diff(
267 result = ctx.diff(
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
269 return list(result)
269 return list(result)
270
270
271 @reraise_safe_exceptions
271 @reraise_safe_exceptions
272 def ctx_files(self, wire, revision):
272 def ctx_files(self, wire, revision):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = repo[revision]
274 ctx = repo[revision]
275 return ctx.files()
275 return ctx.files()
276
276
277 @reraise_safe_exceptions
277 @reraise_safe_exceptions
278 def ctx_list(self, path, revision):
278 def ctx_list(self, path, revision):
279 repo = self._factory.repo(path)
279 repo = self._factory.repo(path)
280 ctx = repo[revision]
280 ctx = repo[revision]
281 return list(ctx)
281 return list(ctx)
282
282
283 @reraise_safe_exceptions
283 @reraise_safe_exceptions
284 def ctx_parents(self, wire, revision):
284 def ctx_parents(self, wire, revision):
285 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
286 ctx = repo[revision]
286 ctx = repo[revision]
287 return [parent.rev() for parent in ctx.parents()]
287 return [parent.rev() for parent in ctx.parents()]
288
288
289 @reraise_safe_exceptions
289 @reraise_safe_exceptions
290 def ctx_substate(self, wire, revision):
290 def ctx_substate(self, wire, revision):
291 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
292 ctx = repo[revision]
292 ctx = repo[revision]
293 return ctx.substate
293 return ctx.substate
294
294
295 @reraise_safe_exceptions
295 @reraise_safe_exceptions
296 def ctx_status(self, wire, revision):
296 def ctx_status(self, wire, revision):
297 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
298 ctx = repo[revision]
298 ctx = repo[revision]
299 status = repo[ctx.p1().node()].status(other=ctx.node())
299 status = repo[ctx.p1().node()].status(other=ctx.node())
300 # object of status (odd, custom named tuple in mercurial) is not
300 # object of status (odd, custom named tuple in mercurial) is not
301 # correctly serializable via Pyro, we make it a list, as the underling
301 # correctly serializable via Pyro, we make it a list, as the underling
302 # API expects this to be a list
302 # API expects this to be a list
303 return list(status)
303 return list(status)
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_user(self, wire, revision):
306 def ctx_user(self, wire, revision):
307 repo = self._factory.repo(wire)
307 repo = self._factory.repo(wire)
308 ctx = repo[revision]
308 ctx = repo[revision]
309 return ctx.user()
309 return ctx.user()
310
310
311 @reraise_safe_exceptions
311 @reraise_safe_exceptions
312 def check_url(self, url, config):
312 def check_url(self, url, config):
313 _proto = None
313 _proto = None
314 if '+' in url[:url.find('://')]:
314 if '+' in url[:url.find('://')]:
315 _proto = url[0:url.find('+')]
315 _proto = url[0:url.find('+')]
316 url = url[url.find('+') + 1:]
316 url = url[url.find('+') + 1:]
317 handlers = []
317 handlers = []
318 url_obj = url_parser(url)
318 url_obj = url_parser(url)
319 test_uri, authinfo = url_obj.authinfo()
319 test_uri, authinfo = url_obj.authinfo()
320 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
320 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
321 url_obj.query = obfuscate_qs(url_obj.query)
321 url_obj.query = obfuscate_qs(url_obj.query)
322
322
323 cleaned_uri = str(url_obj)
323 cleaned_uri = str(url_obj)
324 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
324 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
325
325
326 if authinfo:
326 if authinfo:
327 # create a password manager
327 # create a password manager
328 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
328 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
329 passmgr.add_password(*authinfo)
329 passmgr.add_password(*authinfo)
330
330
331 handlers.extend((httpbasicauthhandler(passmgr),
331 handlers.extend((httpbasicauthhandler(passmgr),
332 httpdigestauthhandler(passmgr)))
332 httpdigestauthhandler(passmgr)))
333
333
334 o = urllib2.build_opener(*handlers)
334 o = urllib2.build_opener(*handlers)
335 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
335 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
336 ('Accept', 'application/mercurial-0.1')]
336 ('Accept', 'application/mercurial-0.1')]
337
337
338 q = {"cmd": 'between'}
338 q = {"cmd": 'between'}
339 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
339 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
340 qs = '?%s' % urllib.urlencode(q)
340 qs = '?%s' % urllib.urlencode(q)
341 cu = "%s%s" % (test_uri, qs)
341 cu = "%s%s" % (test_uri, qs)
342 req = urllib2.Request(cu, None, {})
342 req = urllib2.Request(cu, None, {})
343
343
344 try:
344 try:
345 log.debug("Trying to open URL %s", cleaned_uri)
345 log.debug("Trying to open URL %s", cleaned_uri)
346 resp = o.open(req)
346 resp = o.open(req)
347 if resp.code != 200:
347 if resp.code != 200:
348 raise exceptions.URLError('Return Code is not 200')
348 raise exceptions.URLError('Return Code is not 200')
349 except Exception as e:
349 except Exception as e:
350 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
350 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
351 # means it cannot be cloned
351 # means it cannot be cloned
352 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
352 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
353
353
354 # now check if it's a proper hg repo, but don't do it for svn
354 # now check if it's a proper hg repo, but don't do it for svn
355 try:
355 try:
356 if _proto == 'svn':
356 if _proto == 'svn':
357 pass
357 pass
358 else:
358 else:
359 # check for pure hg repos
359 # check for pure hg repos
360 log.debug(
360 log.debug(
361 "Verifying if URL is a Mercurial repository: %s",
361 "Verifying if URL is a Mercurial repository: %s",
362 cleaned_uri)
362 cleaned_uri)
363 httppeer(make_ui_from_config(config), url).lookup('tip')
363 httppeer(make_ui_from_config(config), url).lookup('tip')
364 except Exception as e:
364 except Exception as e:
365 log.warning("URL is not a valid Mercurial repository: %s",
365 log.warning("URL is not a valid Mercurial repository: %s",
366 cleaned_uri)
366 cleaned_uri)
367 raise exceptions.URLError(
367 raise exceptions.URLError(
368 "url [%s] does not look like an hg repo org_exc: %s"
368 "url [%s] does not look like an hg repo org_exc: %s"
369 % (cleaned_uri, e))
369 % (cleaned_uri, e))
370
370
371 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
371 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
372 return True
372 return True
373
373
374 @reraise_safe_exceptions
374 @reraise_safe_exceptions
375 def diff(
375 def diff(
376 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
376 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
377 context):
377 context):
378 repo = self._factory.repo(wire)
378 repo = self._factory.repo(wire)
379
379
380 if file_filter:
380 if file_filter:
381 match_filter = match(file_filter[0], '', [file_filter[1]])
381 match_filter = match(file_filter[0], '', [file_filter[1]])
382 else:
382 else:
383 match_filter = file_filter
383 match_filter = file_filter
384 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
384 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
385
385
386 try:
386 try:
387 return "".join(patch.diff(
387 return "".join(patch.diff(
388 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
388 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
389 except RepoLookupError:
389 except RepoLookupError:
390 raise exceptions.LookupException()
390 raise exceptions.LookupException()
391
391
392 @reraise_safe_exceptions
392 @reraise_safe_exceptions
393 def file_history(self, wire, revision, path, limit):
393 def file_history(self, wire, revision, path, limit):
394 repo = self._factory.repo(wire)
394 repo = self._factory.repo(wire)
395
395
396 ctx = repo[revision]
396 ctx = repo[revision]
397 fctx = ctx.filectx(path)
397 fctx = ctx.filectx(path)
398
398
399 def history_iter():
399 def history_iter():
400 limit_rev = fctx.rev()
400 limit_rev = fctx.rev()
401 for obj in reversed(list(fctx.filelog())):
401 for obj in reversed(list(fctx.filelog())):
402 obj = fctx.filectx(obj)
402 obj = fctx.filectx(obj)
403 if limit_rev >= obj.rev():
403 if limit_rev >= obj.rev():
404 yield obj
404 yield obj
405
405
406 history = []
406 history = []
407 for cnt, obj in enumerate(history_iter()):
407 for cnt, obj in enumerate(history_iter()):
408 if limit and cnt >= limit:
408 if limit and cnt >= limit:
409 break
409 break
410 history.append(hex(obj.node()))
410 history.append(hex(obj.node()))
411
411
412 return [x for x in history]
412 return [x for x in history]
413
413
414 @reraise_safe_exceptions
414 @reraise_safe_exceptions
415 def file_history_untill(self, wire, revision, path, limit):
415 def file_history_untill(self, wire, revision, path, limit):
416 repo = self._factory.repo(wire)
416 repo = self._factory.repo(wire)
417 ctx = repo[revision]
417 ctx = repo[revision]
418 fctx = ctx.filectx(path)
418 fctx = ctx.filectx(path)
419
419
420 file_log = list(fctx.filelog())
420 file_log = list(fctx.filelog())
421 if limit:
421 if limit:
422 # Limit to the last n items
422 # Limit to the last n items
423 file_log = file_log[-limit:]
423 file_log = file_log[-limit:]
424
424
425 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
425 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
426
426
427 @reraise_safe_exceptions
427 @reraise_safe_exceptions
428 def fctx_annotate(self, wire, revision, path):
428 def fctx_annotate(self, wire, revision, path):
429 repo = self._factory.repo(wire)
429 repo = self._factory.repo(wire)
430 ctx = repo[revision]
430 ctx = repo[revision]
431 fctx = ctx.filectx(path)
431 fctx = ctx.filectx(path)
432
432
433 result = []
433 result = []
434 for i, annotate_data in enumerate(fctx.annotate()):
434 for i, annotate_data in enumerate(fctx.annotate()):
435 ln_no = i + 1
435 ln_no = i + 1
436 node_info, content = annotate_data
436 node_info, content = annotate_data
437 sha = hex(node_info[0].node())
437 sha = hex(node_info[0].node())
438 result.append((ln_no, sha, content))
438 result.append((ln_no, sha, content))
439 return result
439 return result
440
440
441 @reraise_safe_exceptions
441 @reraise_safe_exceptions
442 def fctx_data(self, wire, revision, path):
442 def fctx_data(self, wire, revision, path):
443 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
444 ctx = repo[revision]
444 ctx = repo[revision]
445 fctx = ctx.filectx(path)
445 fctx = ctx.filectx(path)
446 return fctx.data()
446 return fctx.data()
447
447
448 @reraise_safe_exceptions
448 @reraise_safe_exceptions
449 def fctx_flags(self, wire, revision, path):
449 def fctx_flags(self, wire, revision, path):
450 repo = self._factory.repo(wire)
450 repo = self._factory.repo(wire)
451 ctx = repo[revision]
451 ctx = repo[revision]
452 fctx = ctx.filectx(path)
452 fctx = ctx.filectx(path)
453 return fctx.flags()
453 return fctx.flags()
454
454
455 @reraise_safe_exceptions
455 @reraise_safe_exceptions
456 def fctx_size(self, wire, revision, path):
456 def fctx_size(self, wire, revision, path):
457 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
458 ctx = repo[revision]
458 ctx = repo[revision]
459 fctx = ctx.filectx(path)
459 fctx = ctx.filectx(path)
460 return fctx.size()
460 return fctx.size()
461
461
462 @reraise_safe_exceptions
462 @reraise_safe_exceptions
463 def get_all_commit_ids(self, wire, name):
463 def get_all_commit_ids(self, wire, name):
464 repo = self._factory.repo(wire)
464 repo = self._factory.repo(wire)
465 revs = repo.filtered(name).changelog.index
465 revs = repo.filtered(name).changelog.index
466 return map(lambda x: hex(x[7]), revs)[:-1]
466 return map(lambda x: hex(x[7]), revs)[:-1]
467
467
468 @reraise_safe_exceptions
468 @reraise_safe_exceptions
469 def get_config_value(self, wire, section, name, untrusted=False):
469 def get_config_value(self, wire, section, name, untrusted=False):
470 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
471 return repo.ui.config(section, name, untrusted=untrusted)
471 return repo.ui.config(section, name, untrusted=untrusted)
472
472
473 @reraise_safe_exceptions
473 @reraise_safe_exceptions
474 def get_config_bool(self, wire, section, name, untrusted=False):
474 def get_config_bool(self, wire, section, name, untrusted=False):
475 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
476 return repo.ui.configbool(section, name, untrusted=untrusted)
476 return repo.ui.configbool(section, name, untrusted=untrusted)
477
477
478 @reraise_safe_exceptions
478 @reraise_safe_exceptions
479 def get_config_list(self, wire, section, name, untrusted=False):
479 def get_config_list(self, wire, section, name, untrusted=False):
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 return repo.ui.configlist(section, name, untrusted=untrusted)
481 return repo.ui.configlist(section, name, untrusted=untrusted)
482
482
483 @reraise_safe_exceptions
483 @reraise_safe_exceptions
484 def is_large_file(self, wire, path):
484 def is_large_file(self, wire, path):
485 return largefiles.lfutil.isstandin(path)
485 return largefiles.lfutil.isstandin(path)
486
486
487 @reraise_safe_exceptions
487 @reraise_safe_exceptions
488 def in_store(self, wire, sha):
488 def in_largefiles_store(self, wire, sha):
489 repo = self._factory.repo(wire)
489 repo = self._factory.repo(wire)
490 return largefiles.lfutil.instore(repo, sha)
490 return largefiles.lfutil.instore(repo, sha)
491
491
492 @reraise_safe_exceptions
492 @reraise_safe_exceptions
493 def in_user_cache(self, wire, sha):
493 def in_user_cache(self, wire, sha):
494 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
495 return largefiles.lfutil.inusercache(repo.ui, sha)
495 return largefiles.lfutil.inusercache(repo.ui, sha)
496
496
497 @reraise_safe_exceptions
497 @reraise_safe_exceptions
498 def store_path(self, wire, sha):
498 def store_path(self, wire, sha):
499 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
500 return largefiles.lfutil.storepath(repo, sha)
500 return largefiles.lfutil.storepath(repo, sha)
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def link(self, wire, sha, path):
503 def link(self, wire, sha, path):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 largefiles.lfutil.link(
505 largefiles.lfutil.link(
506 largefiles.lfutil.usercachepath(repo.ui, sha), path)
506 largefiles.lfutil.usercachepath(repo.ui, sha), path)
507
507
508 @reraise_safe_exceptions
508 @reraise_safe_exceptions
509 def localrepository(self, wire, create=False):
509 def localrepository(self, wire, create=False):
510 self._factory.repo(wire, create=create)
510 self._factory.repo(wire, create=create)
511
511
512 @reraise_safe_exceptions
512 @reraise_safe_exceptions
513 def lookup(self, wire, revision, both):
513 def lookup(self, wire, revision, both):
514 # TODO Paris: Ugly hack to "deserialize" long for msgpack
514 # TODO Paris: Ugly hack to "deserialize" long for msgpack
515 if isinstance(revision, float):
515 if isinstance(revision, float):
516 revision = long(revision)
516 revision = long(revision)
517 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
518 try:
518 try:
519 ctx = repo[revision]
519 ctx = repo[revision]
520 except RepoLookupError:
520 except RepoLookupError:
521 raise exceptions.LookupException(revision)
521 raise exceptions.LookupException(revision)
522 except LookupError as e:
522 except LookupError as e:
523 raise exceptions.LookupException(e.name)
523 raise exceptions.LookupException(e.name)
524
524
525 if not both:
525 if not both:
526 return ctx.hex()
526 return ctx.hex()
527
527
528 ctx = repo[ctx.hex()]
528 ctx = repo[ctx.hex()]
529 return ctx.hex(), ctx.rev()
529 return ctx.hex(), ctx.rev()
530
530
531 @reraise_safe_exceptions
531 @reraise_safe_exceptions
532 def pull(self, wire, url, commit_ids=None):
532 def pull(self, wire, url, commit_ids=None):
533 repo = self._factory.repo(wire)
533 repo = self._factory.repo(wire)
534 remote = peer(repo, {}, url)
534 remote = peer(repo, {}, url)
535 if commit_ids:
535 if commit_ids:
536 commit_ids = [bin(commit_id) for commit_id in commit_ids]
536 commit_ids = [bin(commit_id) for commit_id in commit_ids]
537
537
538 return exchange.pull(
538 return exchange.pull(
539 repo, remote, heads=commit_ids, force=None).cgresult
539 repo, remote, heads=commit_ids, force=None).cgresult
540
540
541 @reraise_safe_exceptions
541 @reraise_safe_exceptions
542 def revision(self, wire, rev):
542 def revision(self, wire, rev):
543 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
544 ctx = repo[rev]
544 ctx = repo[rev]
545 return ctx.rev()
545 return ctx.rev()
546
546
547 @reraise_safe_exceptions
547 @reraise_safe_exceptions
548 def rev_range(self, wire, filter):
548 def rev_range(self, wire, filter):
549 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
550 revisions = [rev for rev in revrange(repo, filter)]
550 revisions = [rev for rev in revrange(repo, filter)]
551 return revisions
551 return revisions
552
552
553 @reraise_safe_exceptions
553 @reraise_safe_exceptions
554 def rev_range_hash(self, wire, node):
554 def rev_range_hash(self, wire, node):
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556
556
557 def get_revs(repo, rev_opt):
557 def get_revs(repo, rev_opt):
558 if rev_opt:
558 if rev_opt:
559 revs = revrange(repo, rev_opt)
559 revs = revrange(repo, rev_opt)
560 if len(revs) == 0:
560 if len(revs) == 0:
561 return (nullrev, nullrev)
561 return (nullrev, nullrev)
562 return max(revs), min(revs)
562 return max(revs), min(revs)
563 else:
563 else:
564 return len(repo) - 1, 0
564 return len(repo) - 1, 0
565
565
566 stop, start = get_revs(repo, [node + ':'])
566 stop, start = get_revs(repo, [node + ':'])
567 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
567 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
568 return revs
568 return revs
569
569
570 @reraise_safe_exceptions
570 @reraise_safe_exceptions
571 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
571 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
572 other_path = kwargs.pop('other_path', None)
572 other_path = kwargs.pop('other_path', None)
573
573
574 # case when we want to compare two independent repositories
574 # case when we want to compare two independent repositories
575 if other_path and other_path != wire["path"]:
575 if other_path and other_path != wire["path"]:
576 baseui = self._factory._create_config(wire["config"])
576 baseui = self._factory._create_config(wire["config"])
577 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
577 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
578 else:
578 else:
579 repo = self._factory.repo(wire)
579 repo = self._factory.repo(wire)
580 return list(repo.revs(rev_spec, *args))
580 return list(repo.revs(rev_spec, *args))
581
581
582 @reraise_safe_exceptions
582 @reraise_safe_exceptions
583 def strip(self, wire, revision, update, backup):
583 def strip(self, wire, revision, update, backup):
584 repo = self._factory.repo(wire)
584 repo = self._factory.repo(wire)
585 ctx = repo[revision]
585 ctx = repo[revision]
586 hgext_strip(
586 hgext_strip(
587 repo.baseui, repo, ctx.node(), update=update, backup=backup)
587 repo.baseui, repo, ctx.node(), update=update, backup=backup)
588
588
589 @reraise_safe_exceptions
589 @reraise_safe_exceptions
590 def verify(self, wire,):
590 def verify(self, wire,):
591 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
592 baseui = self._factory._create_config(wire['config'])
592 baseui = self._factory._create_config(wire['config'])
593 baseui.setconfig('ui', 'quiet', 'false')
593 baseui.setconfig('ui', 'quiet', 'false')
594 output = io.BytesIO()
594 output = io.BytesIO()
595
595
596 def write(data, **unused_kwargs):
596 def write(data, **unused_kwargs):
597 output.write(data)
597 output.write(data)
598 baseui.write = write
598 baseui.write = write
599
599
600 repo.ui = baseui
600 repo.ui = baseui
601 verify.verify(repo)
601 verify.verify(repo)
602 return output.getvalue()
602 return output.getvalue()
603
603
604 @reraise_safe_exceptions
604 @reraise_safe_exceptions
605 def tag(self, wire, name, revision, message, local, user,
605 def tag(self, wire, name, revision, message, local, user,
606 tag_time, tag_timezone):
606 tag_time, tag_timezone):
607 repo = self._factory.repo(wire)
607 repo = self._factory.repo(wire)
608 ctx = repo[revision]
608 ctx = repo[revision]
609 node = ctx.node()
609 node = ctx.node()
610
610
611 date = (tag_time, tag_timezone)
611 date = (tag_time, tag_timezone)
612 try:
612 try:
613 repo.tag(name, node, message, local, user, date)
613 repo.tag(name, node, message, local, user, date)
614 except Abort as e:
614 except Abort as e:
615 log.exception("Tag operation aborted")
615 log.exception("Tag operation aborted")
616 # Exception can contain unicode which we convert
616 # Exception can contain unicode which we convert
617 raise exceptions.AbortException(repr(e))
617 raise exceptions.AbortException(repr(e))
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def tags(self, wire):
620 def tags(self, wire):
621 repo = self._factory.repo(wire)
621 repo = self._factory.repo(wire)
622 return repo.tags()
622 return repo.tags()
623
623
624 @reraise_safe_exceptions
624 @reraise_safe_exceptions
625 def update(self, wire, node=None, clean=False):
625 def update(self, wire, node=None, clean=False):
626 repo = self._factory.repo(wire)
626 repo = self._factory.repo(wire)
627 baseui = self._factory._create_config(wire['config'])
627 baseui = self._factory._create_config(wire['config'])
628 commands.update(baseui, repo, node=node, clean=clean)
628 commands.update(baseui, repo, node=node, clean=clean)
629
629
630 @reraise_safe_exceptions
630 @reraise_safe_exceptions
631 def identify(self, wire):
631 def identify(self, wire):
632 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
633 baseui = self._factory._create_config(wire['config'])
633 baseui = self._factory._create_config(wire['config'])
634 output = io.BytesIO()
634 output = io.BytesIO()
635 baseui.write = output.write
635 baseui.write = output.write
636 # This is required to get a full node id
636 # This is required to get a full node id
637 baseui.debugflag = True
637 baseui.debugflag = True
638 commands.identify(baseui, repo, id=True)
638 commands.identify(baseui, repo, id=True)
639
639
640 return output.getvalue()
640 return output.getvalue()
641
641
642 @reraise_safe_exceptions
642 @reraise_safe_exceptions
643 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
643 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
644 hooks=True):
644 hooks=True):
645 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
646 baseui = self._factory._create_config(wire['config'], hooks=hooks)
646 baseui = self._factory._create_config(wire['config'], hooks=hooks)
647
647
648 # Mercurial internally has a lot of logic that checks ONLY if
648 # Mercurial internally has a lot of logic that checks ONLY if
649 # option is defined, we just pass those if they are defined then
649 # option is defined, we just pass those if they are defined then
650 opts = {}
650 opts = {}
651 if bookmark:
651 if bookmark:
652 opts['bookmark'] = bookmark
652 opts['bookmark'] = bookmark
653 if branch:
653 if branch:
654 opts['branch'] = branch
654 opts['branch'] = branch
655 if revision:
655 if revision:
656 opts['rev'] = revision
656 opts['rev'] = revision
657
657
658 commands.pull(baseui, repo, source, **opts)
658 commands.pull(baseui, repo, source, **opts)
659
659
660 @reraise_safe_exceptions
660 @reraise_safe_exceptions
661 def heads(self, wire, branch=None):
661 def heads(self, wire, branch=None):
662 repo = self._factory.repo(wire)
662 repo = self._factory.repo(wire)
663 baseui = self._factory._create_config(wire['config'])
663 baseui = self._factory._create_config(wire['config'])
664 output = io.BytesIO()
664 output = io.BytesIO()
665
665
666 def write(data, **unused_kwargs):
666 def write(data, **unused_kwargs):
667 output.write(data)
667 output.write(data)
668
668
669 baseui.write = write
669 baseui.write = write
670 if branch:
670 if branch:
671 args = [branch]
671 args = [branch]
672 else:
672 else:
673 args = []
673 args = []
674 commands.heads(baseui, repo, template='{node} ', *args)
674 commands.heads(baseui, repo, template='{node} ', *args)
675
675
676 return output.getvalue()
676 return output.getvalue()
677
677
678 @reraise_safe_exceptions
678 @reraise_safe_exceptions
679 def ancestor(self, wire, revision1, revision2):
679 def ancestor(self, wire, revision1, revision2):
680 repo = self._factory.repo(wire)
680 repo = self._factory.repo(wire)
681 changelog = repo.changelog
681 changelog = repo.changelog
682 lookup = repo.lookup
682 lookup = repo.lookup
683 a = changelog.ancestor(lookup(revision1), lookup(revision2))
683 a = changelog.ancestor(lookup(revision1), lookup(revision2))
684 return hex(a)
684 return hex(a)
685
685
686 @reraise_safe_exceptions
686 @reraise_safe_exceptions
687 def push(self, wire, revisions, dest_path, hooks=True,
687 def push(self, wire, revisions, dest_path, hooks=True,
688 push_branches=False):
688 push_branches=False):
689 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691 commands.push(baseui, repo, dest=dest_path, rev=revisions,
691 commands.push(baseui, repo, dest=dest_path, rev=revisions,
692 new_branch=push_branches)
692 new_branch=push_branches)
693
693
694 @reraise_safe_exceptions
694 @reraise_safe_exceptions
695 def merge(self, wire, revision):
695 def merge(self, wire, revision):
696 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
697 baseui = self._factory._create_config(wire['config'])
697 baseui = self._factory._create_config(wire['config'])
698 repo.ui.setconfig('ui', 'merge', 'internal:dump')
698 repo.ui.setconfig('ui', 'merge', 'internal:dump')
699
699
700 # In case of sub repositories are used mercurial prompts the user in
700 # In case of sub repositories are used mercurial prompts the user in
701 # case of merge conflicts or different sub repository sources. By
701 # case of merge conflicts or different sub repository sources. By
702 # setting the interactive flag to `False` mercurial doesn't prompt the
702 # setting the interactive flag to `False` mercurial doesn't prompt the
703 # used but instead uses a default value.
703 # used but instead uses a default value.
704 repo.ui.setconfig('ui', 'interactive', False)
704 repo.ui.setconfig('ui', 'interactive', False)
705
705
706 commands.merge(baseui, repo, rev=revision)
706 commands.merge(baseui, repo, rev=revision)
707
707
708 @reraise_safe_exceptions
708 @reraise_safe_exceptions
709 def commit(self, wire, message, username):
709 def commit(self, wire, message, username):
710 repo = self._factory.repo(wire)
710 repo = self._factory.repo(wire)
711 baseui = self._factory._create_config(wire['config'])
711 baseui = self._factory._create_config(wire['config'])
712 repo.ui.setconfig('ui', 'username', username)
712 repo.ui.setconfig('ui', 'username', username)
713 commands.commit(baseui, repo, message=message)
713 commands.commit(baseui, repo, message=message)
714
714
715 @reraise_safe_exceptions
715 @reraise_safe_exceptions
716 def rebase(self, wire, source=None, dest=None, abort=False):
716 def rebase(self, wire, source=None, dest=None, abort=False):
717 repo = self._factory.repo(wire)
717 repo = self._factory.repo(wire)
718 baseui = self._factory._create_config(wire['config'])
718 baseui = self._factory._create_config(wire['config'])
719 repo.ui.setconfig('ui', 'merge', 'internal:dump')
719 repo.ui.setconfig('ui', 'merge', 'internal:dump')
720 rebase.rebase(
720 rebase.rebase(
721 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
721 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
722
722
723 @reraise_safe_exceptions
723 @reraise_safe_exceptions
724 def bookmark(self, wire, bookmark, revision=None):
724 def bookmark(self, wire, bookmark, revision=None):
725 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
726 baseui = self._factory._create_config(wire['config'])
726 baseui = self._factory._create_config(wire['config'])
727 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
727 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,640 +1,644 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
43 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
44 svn_compatible_versions = set([
44 svn_compatible_versions = set([
45 'pre-1.4-compatible',
45 'pre-1.4-compatible',
46 'pre-1.5-compatible',
46 'pre-1.5-compatible',
47 'pre-1.6-compatible',
47 'pre-1.6-compatible',
48 'pre-1.8-compatible',
48 'pre-1.8-compatible',
49 ])
49 ])
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except Exception as e:
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
60 raise_from_original(exceptions.UnhandledException)
61 raise
61 raise
62 return wrapper
62 return wrapper
63
63
64
64
65 class SubversionFactory(RepoFactory):
65 class SubversionFactory(RepoFactory):
66
66
67 def _create_repo(self, wire, create, compatible_version):
67 def _create_repo(self, wire, create, compatible_version):
68 path = svn.core.svn_path_canonicalize(wire['path'])
68 path = svn.core.svn_path_canonicalize(wire['path'])
69 if create:
69 if create:
70 fs_config = {}
70 fs_config = {}
71 if compatible_version:
71 if compatible_version:
72 if compatible_version not in svn_compatible_versions:
72 if compatible_version not in svn_compatible_versions:
73 raise Exception('Unknown SVN compatible version "{}"'
73 raise Exception('Unknown SVN compatible version "{}"'
74 .format(compatible_version))
74 .format(compatible_version))
75 log.debug('Create SVN repo with compatible version "%s"',
75 log.debug('Create SVN repo with compatible version "%s"',
76 compatible_version)
76 compatible_version)
77 fs_config[compatible_version] = '1'
77 fs_config[compatible_version] = '1'
78 repo = svn.repos.create(path, "", "", None, fs_config)
78 repo = svn.repos.create(path, "", "", None, fs_config)
79 else:
79 else:
80 repo = svn.repos.open(path)
80 repo = svn.repos.open(path)
81 return repo
81 return repo
82
82
83 def repo(self, wire, create=False, compatible_version=None):
83 def repo(self, wire, create=False, compatible_version=None):
84 def create_new_repo():
84 def create_new_repo():
85 return self._create_repo(wire, create, compatible_version)
85 return self._create_repo(wire, create, compatible_version)
86
86
87 return self._repo(wire, create_new_repo)
87 return self._repo(wire, create_new_repo)
88
88
89
89
90
90
91 NODE_TYPE_MAPPING = {
91 NODE_TYPE_MAPPING = {
92 svn.core.svn_node_file: 'file',
92 svn.core.svn_node_file: 'file',
93 svn.core.svn_node_dir: 'dir',
93 svn.core.svn_node_dir: 'dir',
94 }
94 }
95
95
96
96
97 class SvnRemote(object):
97 class SvnRemote(object):
98
98
99 def __init__(self, factory, hg_factory=None):
99 def __init__(self, factory, hg_factory=None):
100 self._factory = factory
100 self._factory = factory
101 # TODO: Remove once we do not use internal Mercurial objects anymore
101 # TODO: Remove once we do not use internal Mercurial objects anymore
102 # for subversion
102 # for subversion
103 self._hg_factory = hg_factory
103 self._hg_factory = hg_factory
104
104
105 @reraise_safe_exceptions
105 @reraise_safe_exceptions
106 def discover_svn_version(self):
106 def discover_svn_version(self):
107 try:
107 try:
108 import svn.core
108 import svn.core
109 svn_ver = svn.core.SVN_VERSION
109 svn_ver = svn.core.SVN_VERSION
110 except ImportError:
110 except ImportError:
111 svn_ver = None
111 svn_ver = None
112 return svn_ver
112 return svn_ver
113
113
114 def check_url(self, url, config_items):
114 def check_url(self, url, config_items):
115 # this can throw exception if not installed, but we detect this
115 # this can throw exception if not installed, but we detect this
116 from hgsubversion import svnrepo
116 from hgsubversion import svnrepo
117
117
118 baseui = self._hg_factory._create_config(config_items)
118 baseui = self._hg_factory._create_config(config_items)
119 # uuid function get's only valid UUID from proper repo, else
119 # uuid function get's only valid UUID from proper repo, else
120 # throws exception
120 # throws exception
121 try:
121 try:
122 svnrepo.svnremoterepo(baseui, url).svn.uuid
122 svnrepo.svnremoterepo(baseui, url).svn.uuid
123 except:
123 except:
124 log.debug("Invalid svn url: %s", url)
124 log.debug("Invalid svn url: %s", url)
125 raise URLError(
125 raise URLError(
126 '"%s" is not a valid Subversion source url.' % (url, ))
126 '"%s" is not a valid Subversion source url.' % (url, ))
127 return True
127 return True
128
128
129 def is_path_valid_repository(self, wire, path):
129 def is_path_valid_repository(self, wire, path):
130 try:
130 try:
131 svn.repos.open(path)
131 svn.repos.open(path)
132 except svn.core.SubversionException:
132 except svn.core.SubversionException:
133 log.debug("Invalid Subversion path %s", path)
133 log.debug("Invalid Subversion path %s", path)
134 return False
134 return False
135 return True
135 return True
136
136
137 def lookup(self, wire, revision):
137 def lookup(self, wire, revision):
138 if revision not in [-1, None, 'HEAD']:
138 if revision not in [-1, None, 'HEAD']:
139 raise NotImplementedError
139 raise NotImplementedError
140 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
141 fs_ptr = svn.repos.fs(repo)
141 fs_ptr = svn.repos.fs(repo)
142 head = svn.fs.youngest_rev(fs_ptr)
142 head = svn.fs.youngest_rev(fs_ptr)
143 return head
143 return head
144
144
145 def lookup_interval(self, wire, start_ts, end_ts):
145 def lookup_interval(self, wire, start_ts, end_ts):
146 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
147 fsobj = svn.repos.fs(repo)
147 fsobj = svn.repos.fs(repo)
148 start_rev = None
148 start_rev = None
149 end_rev = None
149 end_rev = None
150 if start_ts:
150 if start_ts:
151 start_ts_svn = apr_time_t(start_ts)
151 start_ts_svn = apr_time_t(start_ts)
152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
153 else:
153 else:
154 start_rev = 1
154 start_rev = 1
155 if end_ts:
155 if end_ts:
156 end_ts_svn = apr_time_t(end_ts)
156 end_ts_svn = apr_time_t(end_ts)
157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
158 else:
158 else:
159 end_rev = svn.fs.youngest_rev(fsobj)
159 end_rev = svn.fs.youngest_rev(fsobj)
160 return start_rev, end_rev
160 return start_rev, end_rev
161
161
162 def revision_properties(self, wire, revision):
162 def revision_properties(self, wire, revision):
163 repo = self._factory.repo(wire)
163 repo = self._factory.repo(wire)
164 fs_ptr = svn.repos.fs(repo)
164 fs_ptr = svn.repos.fs(repo)
165 return svn.fs.revision_proplist(fs_ptr, revision)
165 return svn.fs.revision_proplist(fs_ptr, revision)
166
166
167 def revision_changes(self, wire, revision):
167 def revision_changes(self, wire, revision):
168
168
169 repo = self._factory.repo(wire)
169 repo = self._factory.repo(wire)
170 fsobj = svn.repos.fs(repo)
170 fsobj = svn.repos.fs(repo)
171 rev_root = svn.fs.revision_root(fsobj, revision)
171 rev_root = svn.fs.revision_root(fsobj, revision)
172
172
173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
175 base_dir = ""
175 base_dir = ""
176 send_deltas = False
176 send_deltas = False
177 svn.repos.replay2(
177 svn.repos.replay2(
178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
179 editor_ptr, editor_baton, None)
179 editor_ptr, editor_baton, None)
180
180
181 added = []
181 added = []
182 changed = []
182 changed = []
183 removed = []
183 removed = []
184
184
185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
186 for path, change in editor.changes.iteritems():
186 for path, change in editor.changes.iteritems():
187 # TODO: Decide what to do with directory nodes. Subversion can add
187 # TODO: Decide what to do with directory nodes. Subversion can add
188 # empty directories.
188 # empty directories.
189
189
190 if change.item_kind == svn.core.svn_node_dir:
190 if change.item_kind == svn.core.svn_node_dir:
191 continue
191 continue
192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
193 added.append(path)
193 added.append(path)
194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
195 svn.repos.CHANGE_ACTION_REPLACE]:
195 svn.repos.CHANGE_ACTION_REPLACE]:
196 changed.append(path)
196 changed.append(path)
197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
198 removed.append(path)
198 removed.append(path)
199 else:
199 else:
200 raise NotImplementedError(
200 raise NotImplementedError(
201 "Action %s not supported on path %s" % (
201 "Action %s not supported on path %s" % (
202 change.action, path))
202 change.action, path))
203
203
204 changes = {
204 changes = {
205 'added': added,
205 'added': added,
206 'changed': changed,
206 'changed': changed,
207 'removed': removed,
207 'removed': removed,
208 }
208 }
209 return changes
209 return changes
210
210
211 def node_history(self, wire, path, revision, limit):
211 def node_history(self, wire, path, revision, limit):
212 cross_copies = False
212 cross_copies = False
213 repo = self._factory.repo(wire)
213 repo = self._factory.repo(wire)
214 fsobj = svn.repos.fs(repo)
214 fsobj = svn.repos.fs(repo)
215 rev_root = svn.fs.revision_root(fsobj, revision)
215 rev_root = svn.fs.revision_root(fsobj, revision)
216
216
217 history_revisions = []
217 history_revisions = []
218 history = svn.fs.node_history(rev_root, path)
218 history = svn.fs.node_history(rev_root, path)
219 history = svn.fs.history_prev(history, cross_copies)
219 history = svn.fs.history_prev(history, cross_copies)
220 while history:
220 while history:
221 __, node_revision = svn.fs.history_location(history)
221 __, node_revision = svn.fs.history_location(history)
222 history_revisions.append(node_revision)
222 history_revisions.append(node_revision)
223 if limit and len(history_revisions) >= limit:
223 if limit and len(history_revisions) >= limit:
224 break
224 break
225 history = svn.fs.history_prev(history, cross_copies)
225 history = svn.fs.history_prev(history, cross_copies)
226 return history_revisions
226 return history_revisions
227
227
228 def node_properties(self, wire, path, revision):
228 def node_properties(self, wire, path, revision):
229 repo = self._factory.repo(wire)
229 repo = self._factory.repo(wire)
230 fsobj = svn.repos.fs(repo)
230 fsobj = svn.repos.fs(repo)
231 rev_root = svn.fs.revision_root(fsobj, revision)
231 rev_root = svn.fs.revision_root(fsobj, revision)
232 return svn.fs.node_proplist(rev_root, path)
232 return svn.fs.node_proplist(rev_root, path)
233
233
234 def file_annotate(self, wire, path, revision):
234 def file_annotate(self, wire, path, revision):
235 abs_path = 'file://' + urllib.pathname2url(
235 abs_path = 'file://' + urllib.pathname2url(
236 vcspath.join(wire['path'], path))
236 vcspath.join(wire['path'], path))
237 file_uri = svn.core.svn_path_canonicalize(abs_path)
237 file_uri = svn.core.svn_path_canonicalize(abs_path)
238
238
239 start_rev = svn_opt_revision_value_t(0)
239 start_rev = svn_opt_revision_value_t(0)
240 peg_rev = svn_opt_revision_value_t(revision)
240 peg_rev = svn_opt_revision_value_t(revision)
241 end_rev = peg_rev
241 end_rev = peg_rev
242
242
243 annotations = []
243 annotations = []
244
244
245 def receiver(line_no, revision, author, date, line, pool):
245 def receiver(line_no, revision, author, date, line, pool):
246 annotations.append((line_no, revision, line))
246 annotations.append((line_no, revision, line))
247
247
248 # TODO: Cannot use blame5, missing typemap function in the swig code
248 # TODO: Cannot use blame5, missing typemap function in the swig code
249 try:
249 try:
250 svn.client.blame2(
250 svn.client.blame2(
251 file_uri, peg_rev, start_rev, end_rev,
251 file_uri, peg_rev, start_rev, end_rev,
252 receiver, svn.client.create_context())
252 receiver, svn.client.create_context())
253 except svn.core.SubversionException as exc:
253 except svn.core.SubversionException as exc:
254 log.exception("Error during blame operation.")
254 log.exception("Error during blame operation.")
255 raise Exception(
255 raise Exception(
256 "Blame not supported or file does not exist at path %s. "
256 "Blame not supported or file does not exist at path %s. "
257 "Error %s." % (path, exc))
257 "Error %s." % (path, exc))
258
258
259 return annotations
259 return annotations
260
260
261 def get_node_type(self, wire, path, rev=None):
261 def get_node_type(self, wire, path, rev=None):
262 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
263 fs_ptr = svn.repos.fs(repo)
263 fs_ptr = svn.repos.fs(repo)
264 if rev is None:
264 if rev is None:
265 rev = svn.fs.youngest_rev(fs_ptr)
265 rev = svn.fs.youngest_rev(fs_ptr)
266 root = svn.fs.revision_root(fs_ptr, rev)
266 root = svn.fs.revision_root(fs_ptr, rev)
267 node = svn.fs.check_path(root, path)
267 node = svn.fs.check_path(root, path)
268 return NODE_TYPE_MAPPING.get(node, None)
268 return NODE_TYPE_MAPPING.get(node, None)
269
269
270 def get_nodes(self, wire, path, revision=None):
270 def get_nodes(self, wire, path, revision=None):
271 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
272 fsobj = svn.repos.fs(repo)
272 fsobj = svn.repos.fs(repo)
273 if revision is None:
273 if revision is None:
274 revision = svn.fs.youngest_rev(fsobj)
274 revision = svn.fs.youngest_rev(fsobj)
275 root = svn.fs.revision_root(fsobj, revision)
275 root = svn.fs.revision_root(fsobj, revision)
276 entries = svn.fs.dir_entries(root, path)
276 entries = svn.fs.dir_entries(root, path)
277 result = []
277 result = []
278 for entry_path, entry_info in entries.iteritems():
278 for entry_path, entry_info in entries.iteritems():
279 result.append(
279 result.append(
280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
281 return result
281 return result
282
282
283 def get_file_content(self, wire, path, rev=None):
283 def get_file_content(self, wire, path, rev=None):
284 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
285 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
286 if rev is None:
286 if rev is None:
287 rev = svn.fs.youngest_revision(fsobj)
287 rev = svn.fs.youngest_revision(fsobj)
288 root = svn.fs.revision_root(fsobj, rev)
288 root = svn.fs.revision_root(fsobj, rev)
289 content = svn.core.Stream(svn.fs.file_contents(root, path))
289 content = svn.core.Stream(svn.fs.file_contents(root, path))
290 return content.read()
290 return content.read()
291
291
292 def get_file_size(self, wire, path, revision=None):
292 def get_file_size(self, wire, path, revision=None):
293 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
294 fsobj = svn.repos.fs(repo)
294 fsobj = svn.repos.fs(repo)
295 if revision is None:
295 if revision is None:
296 revision = svn.fs.youngest_revision(fsobj)
296 revision = svn.fs.youngest_revision(fsobj)
297 root = svn.fs.revision_root(fsobj, revision)
297 root = svn.fs.revision_root(fsobj, revision)
298 size = svn.fs.file_length(root, path)
298 size = svn.fs.file_length(root, path)
299 return size
299 return size
300
300
301 def create_repository(self, wire, compatible_version=None):
301 def create_repository(self, wire, compatible_version=None):
302 log.info('Creating Subversion repository in path "%s"', wire['path'])
302 log.info('Creating Subversion repository in path "%s"', wire['path'])
303 self._factory.repo(wire, create=True,
303 self._factory.repo(wire, create=True,
304 compatible_version=compatible_version)
304 compatible_version=compatible_version)
305
305
306 def import_remote_repository(self, wire, src_url):
306 def import_remote_repository(self, wire, src_url):
307 repo_path = wire['path']
307 repo_path = wire['path']
308 if not self.is_path_valid_repository(wire, repo_path):
308 if not self.is_path_valid_repository(wire, repo_path):
309 raise Exception(
309 raise Exception(
310 "Path %s is not a valid Subversion repository." % repo_path)
310 "Path %s is not a valid Subversion repository." % repo_path)
311 # TODO: johbo: URL checks ?
311 # TODO: johbo: URL checks ?
312 rdump = subprocess.Popen(
312 rdump = subprocess.Popen(
313 ['svnrdump', 'dump', '--non-interactive', src_url],
313 ['svnrdump', 'dump', '--non-interactive', src_url],
314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
315 load = subprocess.Popen(
315 load = subprocess.Popen(
316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
317
317
318 # TODO: johbo: This can be a very long operation, might be better
318 # TODO: johbo: This can be a very long operation, might be better
319 # to track some kind of status and provide an api to check if the
319 # to track some kind of status and provide an api to check if the
320 # import is done.
320 # import is done.
321 rdump.wait()
321 rdump.wait()
322 load.wait()
322 load.wait()
323
323
324 if rdump.returncode != 0:
324 if rdump.returncode != 0:
325 errors = rdump.stderr.read()
325 errors = rdump.stderr.read()
326 log.error('svnrdump dump failed: statuscode %s: message: %s',
326 log.error('svnrdump dump failed: statuscode %s: message: %s',
327 rdump.returncode, errors)
327 rdump.returncode, errors)
328 reason = 'UNKNOWN'
328 reason = 'UNKNOWN'
329 if 'svnrdump: E230001:' in errors:
329 if 'svnrdump: E230001:' in errors:
330 reason = 'INVALID_CERTIFICATE'
330 reason = 'INVALID_CERTIFICATE'
331 raise Exception(
331 raise Exception(
332 'Failed to dump the remote repository from %s.' % src_url,
332 'Failed to dump the remote repository from %s.' % src_url,
333 reason)
333 reason)
334 if load.returncode != 0:
334 if load.returncode != 0:
335 raise Exception(
335 raise Exception(
336 'Failed to load the dump of remote repository from %s.' %
336 'Failed to load the dump of remote repository from %s.' %
337 (src_url, ))
337 (src_url, ))
338
338
339 def commit(self, wire, message, author, timestamp, updated, removed):
339 def commit(self, wire, message, author, timestamp, updated, removed):
340 assert isinstance(message, str)
340 assert isinstance(message, str)
341 assert isinstance(author, str)
341 assert isinstance(author, str)
342
342
343 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
344 fsobj = svn.repos.fs(repo)
344 fsobj = svn.repos.fs(repo)
345
345
346 rev = svn.fs.youngest_rev(fsobj)
346 rev = svn.fs.youngest_rev(fsobj)
347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
348 txn_root = svn.fs.txn_root(txn)
348 txn_root = svn.fs.txn_root(txn)
349
349
350 for node in updated:
350 for node in updated:
351 TxnNodeProcessor(node, txn_root).update()
351 TxnNodeProcessor(node, txn_root).update()
352 for node in removed:
352 for node in removed:
353 TxnNodeProcessor(node, txn_root).remove()
353 TxnNodeProcessor(node, txn_root).remove()
354
354
355 commit_id = svn.repos.fs_commit_txn(repo, txn)
355 commit_id = svn.repos.fs_commit_txn(repo, txn)
356
356
357 if timestamp:
357 if timestamp:
358 apr_time = apr_time_t(timestamp)
358 apr_time = apr_time_t(timestamp)
359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
361
361
362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
363 return commit_id
363 return commit_id
364
364
365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
366 ignore_whitespace=False, context=3):
366 ignore_whitespace=False, context=3):
367
367
368 wire.update(cache=False)
368 wire.update(cache=False)
369 repo = self._factory.repo(wire)
369 repo = self._factory.repo(wire)
370 diff_creator = SvnDiffer(
370 diff_creator = SvnDiffer(
371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
372 try:
372 try:
373 return diff_creator.generate_diff()
373 return diff_creator.generate_diff()
374 except svn.core.SubversionException as e:
374 except svn.core.SubversionException as e:
375 log.exception(
375 log.exception(
376 "Error during diff operation operation. "
376 "Error during diff operation operation. "
377 "Path might not exist %s, %s" % (path1, path2))
377 "Path might not exist %s, %s" % (path1, path2))
378 return ""
378 return ""
379
379
380 @reraise_safe_exceptions
381 def is_large_file(self, wire, path):
382 return False
383
380
384
381 class SvnDiffer(object):
385 class SvnDiffer(object):
382 """
386 """
383 Utility to create diffs based on difflib and the Subversion api
387 Utility to create diffs based on difflib and the Subversion api
384 """
388 """
385
389
386 binary_content = False
390 binary_content = False
387
391
388 def __init__(
392 def __init__(
389 self, repo, src_rev, src_path, tgt_rev, tgt_path,
393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
390 ignore_whitespace, context):
394 ignore_whitespace, context):
391 self.repo = repo
395 self.repo = repo
392 self.ignore_whitespace = ignore_whitespace
396 self.ignore_whitespace = ignore_whitespace
393 self.context = context
397 self.context = context
394
398
395 fsobj = svn.repos.fs(repo)
399 fsobj = svn.repos.fs(repo)
396
400
397 self.tgt_rev = tgt_rev
401 self.tgt_rev = tgt_rev
398 self.tgt_path = tgt_path or ''
402 self.tgt_path = tgt_path or ''
399 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
400 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
401
405
402 self.src_rev = src_rev
406 self.src_rev = src_rev
403 self.src_path = src_path or self.tgt_path
407 self.src_path = src_path or self.tgt_path
404 self.src_root = svn.fs.revision_root(fsobj, src_rev)
408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
405 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
406
410
407 self._validate()
411 self._validate()
408
412
409 def _validate(self):
413 def _validate(self):
410 if (self.tgt_kind != svn.core.svn_node_none and
414 if (self.tgt_kind != svn.core.svn_node_none and
411 self.src_kind != svn.core.svn_node_none and
415 self.src_kind != svn.core.svn_node_none and
412 self.src_kind != self.tgt_kind):
416 self.src_kind != self.tgt_kind):
413 # TODO: johbo: proper error handling
417 # TODO: johbo: proper error handling
414 raise Exception(
418 raise Exception(
415 "Source and target are not compatible for diff generation. "
419 "Source and target are not compatible for diff generation. "
416 "Source type: %s, target type: %s" %
420 "Source type: %s, target type: %s" %
417 (self.src_kind, self.tgt_kind))
421 (self.src_kind, self.tgt_kind))
418
422
419 def generate_diff(self):
423 def generate_diff(self):
420 buf = StringIO.StringIO()
424 buf = StringIO.StringIO()
421 if self.tgt_kind == svn.core.svn_node_dir:
425 if self.tgt_kind == svn.core.svn_node_dir:
422 self._generate_dir_diff(buf)
426 self._generate_dir_diff(buf)
423 else:
427 else:
424 self._generate_file_diff(buf)
428 self._generate_file_diff(buf)
425 return buf.getvalue()
429 return buf.getvalue()
426
430
427 def _generate_dir_diff(self, buf):
431 def _generate_dir_diff(self, buf):
428 editor = DiffChangeEditor()
432 editor = DiffChangeEditor()
429 editor_ptr, editor_baton = svn.delta.make_editor(editor)
433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
430 svn.repos.dir_delta2(
434 svn.repos.dir_delta2(
431 self.src_root,
435 self.src_root,
432 self.src_path,
436 self.src_path,
433 '', # src_entry
437 '', # src_entry
434 self.tgt_root,
438 self.tgt_root,
435 self.tgt_path,
439 self.tgt_path,
436 editor_ptr, editor_baton,
440 editor_ptr, editor_baton,
437 authorization_callback_allow_all,
441 authorization_callback_allow_all,
438 False, # text_deltas
442 False, # text_deltas
439 svn.core.svn_depth_infinity, # depth
443 svn.core.svn_depth_infinity, # depth
440 False, # entry_props
444 False, # entry_props
441 False, # ignore_ancestry
445 False, # ignore_ancestry
442 )
446 )
443
447
444 for path, __, change in sorted(editor.changes):
448 for path, __, change in sorted(editor.changes):
445 self._generate_node_diff(
449 self._generate_node_diff(
446 buf, change, path, self.tgt_path, path, self.src_path)
450 buf, change, path, self.tgt_path, path, self.src_path)
447
451
448 def _generate_file_diff(self, buf):
452 def _generate_file_diff(self, buf):
449 change = None
453 change = None
450 if self.src_kind == svn.core.svn_node_none:
454 if self.src_kind == svn.core.svn_node_none:
451 change = "add"
455 change = "add"
452 elif self.tgt_kind == svn.core.svn_node_none:
456 elif self.tgt_kind == svn.core.svn_node_none:
453 change = "delete"
457 change = "delete"
454 tgt_base, tgt_path = vcspath.split(self.tgt_path)
458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
455 src_base, src_path = vcspath.split(self.src_path)
459 src_base, src_path = vcspath.split(self.src_path)
456 self._generate_node_diff(
460 self._generate_node_diff(
457 buf, change, tgt_path, tgt_base, src_path, src_base)
461 buf, change, tgt_path, tgt_base, src_path, src_base)
458
462
459 def _generate_node_diff(
463 def _generate_node_diff(
460 self, buf, change, tgt_path, tgt_base, src_path, src_base):
464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
461
465
462 if self.src_rev == self.tgt_rev and tgt_base == src_base:
466 if self.src_rev == self.tgt_rev and tgt_base == src_base:
463 # makes consistent behaviour with git/hg to return empty diff if
467 # makes consistent behaviour with git/hg to return empty diff if
464 # we compare same revisions
468 # we compare same revisions
465 return
469 return
466
470
467 tgt_full_path = vcspath.join(tgt_base, tgt_path)
471 tgt_full_path = vcspath.join(tgt_base, tgt_path)
468 src_full_path = vcspath.join(src_base, src_path)
472 src_full_path = vcspath.join(src_base, src_path)
469
473
470 self.binary_content = False
474 self.binary_content = False
471 mime_type = self._get_mime_type(tgt_full_path)
475 mime_type = self._get_mime_type(tgt_full_path)
472
476
473 if mime_type and not mime_type.startswith('text'):
477 if mime_type and not mime_type.startswith('text'):
474 self.binary_content = True
478 self.binary_content = True
475 buf.write("=" * 67 + '\n')
479 buf.write("=" * 67 + '\n')
476 buf.write("Cannot display: file marked as a binary type.\n")
480 buf.write("Cannot display: file marked as a binary type.\n")
477 buf.write("svn:mime-type = %s\n" % mime_type)
481 buf.write("svn:mime-type = %s\n" % mime_type)
478 buf.write("Index: %s\n" % (tgt_path, ))
482 buf.write("Index: %s\n" % (tgt_path, ))
479 buf.write("=" * 67 + '\n')
483 buf.write("=" * 67 + '\n')
480 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
484 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
481 'tgt_path': tgt_path})
485 'tgt_path': tgt_path})
482
486
483 if change == 'add':
487 if change == 'add':
484 # TODO: johbo: SVN is missing a zero here compared to git
488 # TODO: johbo: SVN is missing a zero here compared to git
485 buf.write("new file mode 10644\n")
489 buf.write("new file mode 10644\n")
486
490
487 #TODO(marcink): intro to binary detection of svn patches
491 #TODO(marcink): intro to binary detection of svn patches
488 # if self.binary_content:
492 # if self.binary_content:
489 # buf.write('GIT binary patch\n')
493 # buf.write('GIT binary patch\n')
490
494
491 buf.write("--- /dev/null\t(revision 0)\n")
495 buf.write("--- /dev/null\t(revision 0)\n")
492 src_lines = []
496 src_lines = []
493 else:
497 else:
494 if change == 'delete':
498 if change == 'delete':
495 buf.write("deleted file mode 10644\n")
499 buf.write("deleted file mode 10644\n")
496
500
497 #TODO(marcink): intro to binary detection of svn patches
501 #TODO(marcink): intro to binary detection of svn patches
498 # if self.binary_content:
502 # if self.binary_content:
499 # buf.write('GIT binary patch\n')
503 # buf.write('GIT binary patch\n')
500
504
501 buf.write("--- a/%s\t(revision %s)\n" % (
505 buf.write("--- a/%s\t(revision %s)\n" % (
502 src_path, self.src_rev))
506 src_path, self.src_rev))
503 src_lines = self._svn_readlines(self.src_root, src_full_path)
507 src_lines = self._svn_readlines(self.src_root, src_full_path)
504
508
505 if change == 'delete':
509 if change == 'delete':
506 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
510 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
507 tgt_lines = []
511 tgt_lines = []
508 else:
512 else:
509 buf.write("+++ b/%s\t(revision %s)\n" % (
513 buf.write("+++ b/%s\t(revision %s)\n" % (
510 tgt_path, self.tgt_rev))
514 tgt_path, self.tgt_rev))
511 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
515 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
512
516
513 if not self.binary_content:
517 if not self.binary_content:
514 udiff = svn_diff.unified_diff(
518 udiff = svn_diff.unified_diff(
515 src_lines, tgt_lines, context=self.context,
519 src_lines, tgt_lines, context=self.context,
516 ignore_blank_lines=self.ignore_whitespace,
520 ignore_blank_lines=self.ignore_whitespace,
517 ignore_case=False,
521 ignore_case=False,
518 ignore_space_changes=self.ignore_whitespace)
522 ignore_space_changes=self.ignore_whitespace)
519 buf.writelines(udiff)
523 buf.writelines(udiff)
520
524
521 def _get_mime_type(self, path):
525 def _get_mime_type(self, path):
522 try:
526 try:
523 mime_type = svn.fs.node_prop(
527 mime_type = svn.fs.node_prop(
524 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
528 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
525 except svn.core.SubversionException:
529 except svn.core.SubversionException:
526 mime_type = svn.fs.node_prop(
530 mime_type = svn.fs.node_prop(
527 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
531 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
528 return mime_type
532 return mime_type
529
533
530 def _svn_readlines(self, fs_root, node_path):
534 def _svn_readlines(self, fs_root, node_path):
531 if self.binary_content:
535 if self.binary_content:
532 return []
536 return []
533 node_kind = svn.fs.check_path(fs_root, node_path)
537 node_kind = svn.fs.check_path(fs_root, node_path)
534 if node_kind not in (
538 if node_kind not in (
535 svn.core.svn_node_file, svn.core.svn_node_symlink):
539 svn.core.svn_node_file, svn.core.svn_node_symlink):
536 return []
540 return []
537 content = svn.core.Stream(
541 content = svn.core.Stream(
538 svn.fs.file_contents(fs_root, node_path)).read()
542 svn.fs.file_contents(fs_root, node_path)).read()
539 return content.splitlines(True)
543 return content.splitlines(True)
540
544
541
545
542 class DiffChangeEditor(svn.delta.Editor):
546 class DiffChangeEditor(svn.delta.Editor):
543 """
547 """
544 Records changes between two given revisions
548 Records changes between two given revisions
545 """
549 """
546
550
547 def __init__(self):
551 def __init__(self):
548 self.changes = []
552 self.changes = []
549
553
550 def delete_entry(self, path, revision, parent_baton, pool=None):
554 def delete_entry(self, path, revision, parent_baton, pool=None):
551 self.changes.append((path, None, 'delete'))
555 self.changes.append((path, None, 'delete'))
552
556
553 def add_file(
557 def add_file(
554 self, path, parent_baton, copyfrom_path, copyfrom_revision,
558 self, path, parent_baton, copyfrom_path, copyfrom_revision,
555 file_pool=None):
559 file_pool=None):
556 self.changes.append((path, 'file', 'add'))
560 self.changes.append((path, 'file', 'add'))
557
561
558 def open_file(self, path, parent_baton, base_revision, file_pool=None):
562 def open_file(self, path, parent_baton, base_revision, file_pool=None):
559 self.changes.append((path, 'file', 'change'))
563 self.changes.append((path, 'file', 'change'))
560
564
561
565
562 def authorization_callback_allow_all(root, path, pool):
566 def authorization_callback_allow_all(root, path, pool):
563 return True
567 return True
564
568
565
569
566 class TxnNodeProcessor(object):
570 class TxnNodeProcessor(object):
567 """
571 """
568 Utility to process the change of one node within a transaction root.
572 Utility to process the change of one node within a transaction root.
569
573
570 It encapsulates the knowledge of how to add, update or remove
574 It encapsulates the knowledge of how to add, update or remove
571 a node for a given transaction root. The purpose is to support the method
575 a node for a given transaction root. The purpose is to support the method
572 `SvnRemote.commit`.
576 `SvnRemote.commit`.
573 """
577 """
574
578
575 def __init__(self, node, txn_root):
579 def __init__(self, node, txn_root):
576 assert isinstance(node['path'], str)
580 assert isinstance(node['path'], str)
577
581
578 self.node = node
582 self.node = node
579 self.txn_root = txn_root
583 self.txn_root = txn_root
580
584
581 def update(self):
585 def update(self):
582 self._ensure_parent_dirs()
586 self._ensure_parent_dirs()
583 self._add_file_if_node_does_not_exist()
587 self._add_file_if_node_does_not_exist()
584 self._update_file_content()
588 self._update_file_content()
585 self._update_file_properties()
589 self._update_file_properties()
586
590
587 def remove(self):
591 def remove(self):
588 svn.fs.delete(self.txn_root, self.node['path'])
592 svn.fs.delete(self.txn_root, self.node['path'])
589 # TODO: Clean up directory if empty
593 # TODO: Clean up directory if empty
590
594
591 def _ensure_parent_dirs(self):
595 def _ensure_parent_dirs(self):
592 curdir = vcspath.dirname(self.node['path'])
596 curdir = vcspath.dirname(self.node['path'])
593 dirs_to_create = []
597 dirs_to_create = []
594 while not self._svn_path_exists(curdir):
598 while not self._svn_path_exists(curdir):
595 dirs_to_create.append(curdir)
599 dirs_to_create.append(curdir)
596 curdir = vcspath.dirname(curdir)
600 curdir = vcspath.dirname(curdir)
597
601
598 for curdir in reversed(dirs_to_create):
602 for curdir in reversed(dirs_to_create):
599 log.debug('Creating missing directory "%s"', curdir)
603 log.debug('Creating missing directory "%s"', curdir)
600 svn.fs.make_dir(self.txn_root, curdir)
604 svn.fs.make_dir(self.txn_root, curdir)
601
605
602 def _svn_path_exists(self, path):
606 def _svn_path_exists(self, path):
603 path_status = svn.fs.check_path(self.txn_root, path)
607 path_status = svn.fs.check_path(self.txn_root, path)
604 return path_status != svn.core.svn_node_none
608 return path_status != svn.core.svn_node_none
605
609
606 def _add_file_if_node_does_not_exist(self):
610 def _add_file_if_node_does_not_exist(self):
607 kind = svn.fs.check_path(self.txn_root, self.node['path'])
611 kind = svn.fs.check_path(self.txn_root, self.node['path'])
608 if kind == svn.core.svn_node_none:
612 if kind == svn.core.svn_node_none:
609 svn.fs.make_file(self.txn_root, self.node['path'])
613 svn.fs.make_file(self.txn_root, self.node['path'])
610
614
611 def _update_file_content(self):
615 def _update_file_content(self):
612 assert isinstance(self.node['content'], str)
616 assert isinstance(self.node['content'], str)
613 handler, baton = svn.fs.apply_textdelta(
617 handler, baton = svn.fs.apply_textdelta(
614 self.txn_root, self.node['path'], None, None)
618 self.txn_root, self.node['path'], None, None)
615 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
619 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
616
620
617 def _update_file_properties(self):
621 def _update_file_properties(self):
618 properties = self.node.get('properties', {})
622 properties = self.node.get('properties', {})
619 for key, value in properties.iteritems():
623 for key, value in properties.iteritems():
620 svn.fs.change_node_prop(
624 svn.fs.change_node_prop(
621 self.txn_root, self.node['path'], key, value)
625 self.txn_root, self.node['path'], key, value)
622
626
623
627
624 def apr_time_t(timestamp):
628 def apr_time_t(timestamp):
625 """
629 """
626 Convert a Python timestamp into APR timestamp type apr_time_t
630 Convert a Python timestamp into APR timestamp type apr_time_t
627 """
631 """
628 return timestamp * 1E6
632 return timestamp * 1E6
629
633
630
634
631 def svn_opt_revision_value_t(num):
635 def svn_opt_revision_value_t(num):
632 """
636 """
633 Put `num` into a `svn_opt_revision_value_t` structure.
637 Put `num` into a `svn_opt_revision_value_t` structure.
634 """
638 """
635 value = svn.core.svn_opt_revision_value_t()
639 value = svn.core.svn_opt_revision_value_t()
636 value.number = num
640 value.number = num
637 revision = svn.core.svn_opt_revision_t()
641 revision = svn.core.svn_opt_revision_t()
638 revision.kind = svn.core.svn_opt_revision_number
642 revision.kind = svn.core.svn_opt_revision_number
639 revision.value = value
643 revision.value = value
640 return revision
644 return revision
General Comments 0
You need to be logged in to leave comments. Login now