##// END OF EJS Templates
backends: implemented functions for fetching backend versions via remote calls....
marcink -
r101:62999e0d default
parent child Browse files
Show More
@@ -1,573 +1,576 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory
38 from vcsserver.base import RepoFactory
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
41
41
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 return wrapper
61 return wrapper
62
62
63
63
64 class Repo(DulwichRepo):
64 class Repo(DulwichRepo):
65 """
65 """
66 A wrapper for dulwich Repo class.
66 A wrapper for dulwich Repo class.
67
67
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 "Too many open files" error. We need to close all opened file descriptors
69 "Too many open files" error. We need to close all opened file descriptors
70 once the repo object is destroyed.
70 once the repo object is destroyed.
71
71
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 to 0.12.0 +
73 to 0.12.0 +
74 """
74 """
75 def __del__(self):
75 def __del__(self):
76 if hasattr(self, 'object_store'):
76 if hasattr(self, 'object_store'):
77 self.close()
77 self.close()
78
78
79
79
80 class GitFactory(RepoFactory):
80 class GitFactory(RepoFactory):
81
81
82 def _create_repo(self, wire, create):
82 def _create_repo(self, wire, create):
83 repo_path = str_to_dulwich(wire['path'])
83 repo_path = str_to_dulwich(wire['path'])
84 return Repo(repo_path)
84 return Repo(repo_path)
85
85
86
86
87 class GitRemote(object):
87 class GitRemote(object):
88
88
89 def __init__(self, factory):
89 def __init__(self, factory):
90 self._factory = factory
90 self._factory = factory
91
91
92 self._bulk_methods = {
92 self._bulk_methods = {
93 "author": self.commit_attribute,
93 "author": self.commit_attribute,
94 "date": self.get_object_attrs,
94 "date": self.get_object_attrs,
95 "message": self.commit_attribute,
95 "message": self.commit_attribute,
96 "parents": self.commit_attribute,
96 "parents": self.commit_attribute,
97 "_commit": self.revision,
97 "_commit": self.revision,
98 }
98 }
99
99
100 def _assign_ref(self, wire, ref, commit_id):
100 def _assign_ref(self, wire, ref, commit_id):
101 repo = self._factory.repo(wire)
101 repo = self._factory.repo(wire)
102 repo[ref] = commit_id
102 repo[ref] = commit_id
103
103
104 @reraise_safe_exceptions
104 @reraise_safe_exceptions
105 def add_object(self, wire, content):
105 def add_object(self, wire, content):
106 repo = self._factory.repo(wire)
106 repo = self._factory.repo(wire)
107 blob = objects.Blob()
107 blob = objects.Blob()
108 blob.set_raw_string(content)
108 blob.set_raw_string(content)
109 repo.object_store.add_object(blob)
109 repo.object_store.add_object(blob)
110 return blob.id
110 return blob.id
111
111
112 @reraise_safe_exceptions
112 @reraise_safe_exceptions
113 def assert_correct_path(self, wire):
113 def assert_correct_path(self, wire):
114 try:
114 try:
115 self._factory.repo(wire)
115 self._factory.repo(wire)
116 except NotGitRepository as e:
116 except NotGitRepository as e:
117 # Exception can contain unicode which we convert
117 # Exception can contain unicode which we convert
118 raise exceptions.AbortException(repr(e))
118 raise exceptions.AbortException(repr(e))
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def bare(self, wire):
121 def bare(self, wire):
122 repo = self._factory.repo(wire)
122 repo = self._factory.repo(wire)
123 return repo.bare
123 return repo.bare
124
124
125 @reraise_safe_exceptions
125 @reraise_safe_exceptions
126 def blob_as_pretty_string(self, wire, sha):
126 def blob_as_pretty_string(self, wire, sha):
127 repo = self._factory.repo(wire)
127 repo = self._factory.repo(wire)
128 return repo[sha].as_pretty_string()
128 return repo[sha].as_pretty_string()
129
129
130 @reraise_safe_exceptions
130 @reraise_safe_exceptions
131 def blob_raw_length(self, wire, sha):
131 def blob_raw_length(self, wire, sha):
132 repo = self._factory.repo(wire)
132 repo = self._factory.repo(wire)
133 blob = repo[sha]
133 blob = repo[sha]
134 return blob.raw_length()
134 return blob.raw_length()
135
135
136 @reraise_safe_exceptions
136 @reraise_safe_exceptions
137 def bulk_request(self, wire, rev, pre_load):
137 def bulk_request(self, wire, rev, pre_load):
138 result = {}
138 result = {}
139 for attr in pre_load:
139 for attr in pre_load:
140 try:
140 try:
141 method = self._bulk_methods[attr]
141 method = self._bulk_methods[attr]
142 args = [wire, rev]
142 args = [wire, rev]
143 if attr == "date":
143 if attr == "date":
144 args.extend(["commit_time", "commit_timezone"])
144 args.extend(["commit_time", "commit_timezone"])
145 elif attr in ["author", "message", "parents"]:
145 elif attr in ["author", "message", "parents"]:
146 args.append(attr)
146 args.append(attr)
147 result[attr] = method(*args)
147 result[attr] = method(*args)
148 except KeyError:
148 except KeyError:
149 raise exceptions.VcsException(
149 raise exceptions.VcsException(
150 "Unknown bulk attribute: %s" % attr)
150 "Unknown bulk attribute: %s" % attr)
151 return result
151 return result
152
152
153 def _build_opener(self, url):
153 def _build_opener(self, url):
154 handlers = []
154 handlers = []
155 url_obj = hg_url(url)
155 url_obj = hg_url(url)
156 _, authinfo = url_obj.authinfo()
156 _, authinfo = url_obj.authinfo()
157
157
158 if authinfo:
158 if authinfo:
159 # create a password manager
159 # create a password manager
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 passmgr.add_password(*authinfo)
161 passmgr.add_password(*authinfo)
162
162
163 handlers.extend((httpbasicauthhandler(passmgr),
163 handlers.extend((httpbasicauthhandler(passmgr),
164 httpdigestauthhandler(passmgr)))
164 httpdigestauthhandler(passmgr)))
165
165
166 return urllib2.build_opener(*handlers)
166 return urllib2.build_opener(*handlers)
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def check_url(self, url, config):
169 def check_url(self, url, config):
170 url_obj = hg_url(url)
170 url_obj = hg_url(url)
171 test_uri, _ = url_obj.authinfo()
171 test_uri, _ = url_obj.authinfo()
172 url_obj.passwd = '*****'
172 url_obj.passwd = '*****'
173 cleaned_uri = str(url_obj)
173 cleaned_uri = str(url_obj)
174
174
175 if not test_uri.endswith('info/refs'):
175 if not test_uri.endswith('info/refs'):
176 test_uri = test_uri.rstrip('/') + '/info/refs'
176 test_uri = test_uri.rstrip('/') + '/info/refs'
177
177
178 o = self._build_opener(url)
178 o = self._build_opener(url)
179 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
179 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
180
180
181 q = {"service": 'git-upload-pack'}
181 q = {"service": 'git-upload-pack'}
182 qs = '?%s' % urllib.urlencode(q)
182 qs = '?%s' % urllib.urlencode(q)
183 cu = "%s%s" % (test_uri, qs)
183 cu = "%s%s" % (test_uri, qs)
184 req = urllib2.Request(cu, None, {})
184 req = urllib2.Request(cu, None, {})
185
185
186 try:
186 try:
187 resp = o.open(req)
187 resp = o.open(req)
188 if resp.code != 200:
188 if resp.code != 200:
189 raise Exception('Return Code is not 200')
189 raise Exception('Return Code is not 200')
190 except Exception as e:
190 except Exception as e:
191 # means it cannot be cloned
191 # means it cannot be cloned
192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
193
193
194 # now detect if it's proper git repo
194 # now detect if it's proper git repo
195 gitdata = resp.read()
195 gitdata = resp.read()
196 if 'service=git-upload-pack' in gitdata:
196 if 'service=git-upload-pack' in gitdata:
197 pass
197 pass
198 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
198 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
199 # old style git can return some other format !
199 # old style git can return some other format !
200 pass
200 pass
201 else:
201 else:
202 raise urllib2.URLError(
202 raise urllib2.URLError(
203 "url [%s] does not look like an git" % (cleaned_uri,))
203 "url [%s] does not look like an git" % (cleaned_uri,))
204
204
205 return True
205 return True
206
206
207 @reraise_safe_exceptions
207 @reraise_safe_exceptions
208 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
208 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
209 remote_refs = self.fetch(wire, url, apply_refs=False)
209 remote_refs = self.fetch(wire, url, apply_refs=False)
210 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
211 if isinstance(valid_refs, list):
211 if isinstance(valid_refs, list):
212 valid_refs = tuple(valid_refs)
212 valid_refs = tuple(valid_refs)
213
213
214 for k in remote_refs:
214 for k in remote_refs:
215 # only parse heads/tags and skip so called deferred tags
215 # only parse heads/tags and skip so called deferred tags
216 if k.startswith(valid_refs) and not k.endswith(deferred):
216 if k.startswith(valid_refs) and not k.endswith(deferred):
217 repo[k] = remote_refs[k]
217 repo[k] = remote_refs[k]
218
218
219 if update_after_clone:
219 if update_after_clone:
220 # we want to checkout HEAD
220 # we want to checkout HEAD
221 repo["HEAD"] = remote_refs["HEAD"]
221 repo["HEAD"] = remote_refs["HEAD"]
222 index.build_index_from_tree(repo.path, repo.index_path(),
222 index.build_index_from_tree(repo.path, repo.index_path(),
223 repo.object_store, repo["HEAD"].tree)
223 repo.object_store, repo["HEAD"].tree)
224
224
225 # TODO: this is quite complex, check if that can be simplified
225 # TODO: this is quite complex, check if that can be simplified
226 @reraise_safe_exceptions
226 @reraise_safe_exceptions
227 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
227 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
228 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
229 object_store = repo.object_store
229 object_store = repo.object_store
230
230
231 # Create tree and populates it with blobs
231 # Create tree and populates it with blobs
232 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
232 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
233
233
234 for node in updated:
234 for node in updated:
235 # Compute subdirs if needed
235 # Compute subdirs if needed
236 dirpath, nodename = vcspath.split(node['path'])
236 dirpath, nodename = vcspath.split(node['path'])
237 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
237 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
238 parent = commit_tree
238 parent = commit_tree
239 ancestors = [('', parent)]
239 ancestors = [('', parent)]
240
240
241 # Tries to dig for the deepest existing tree
241 # Tries to dig for the deepest existing tree
242 while dirnames:
242 while dirnames:
243 curdir = dirnames.pop(0)
243 curdir = dirnames.pop(0)
244 try:
244 try:
245 dir_id = parent[curdir][1]
245 dir_id = parent[curdir][1]
246 except KeyError:
246 except KeyError:
247 # put curdir back into dirnames and stops
247 # put curdir back into dirnames and stops
248 dirnames.insert(0, curdir)
248 dirnames.insert(0, curdir)
249 break
249 break
250 else:
250 else:
251 # If found, updates parent
251 # If found, updates parent
252 parent = repo[dir_id]
252 parent = repo[dir_id]
253 ancestors.append((curdir, parent))
253 ancestors.append((curdir, parent))
254 # Now parent is deepest existing tree and we need to create
254 # Now parent is deepest existing tree and we need to create
255 # subtrees for dirnames (in reverse order)
255 # subtrees for dirnames (in reverse order)
256 # [this only applies for nodes from added]
256 # [this only applies for nodes from added]
257 new_trees = []
257 new_trees = []
258
258
259 blob = objects.Blob.from_string(node['content'])
259 blob = objects.Blob.from_string(node['content'])
260
260
261 if dirnames:
261 if dirnames:
262 # If there are trees which should be created we need to build
262 # If there are trees which should be created we need to build
263 # them now (in reverse order)
263 # them now (in reverse order)
264 reversed_dirnames = list(reversed(dirnames))
264 reversed_dirnames = list(reversed(dirnames))
265 curtree = objects.Tree()
265 curtree = objects.Tree()
266 curtree[node['node_path']] = node['mode'], blob.id
266 curtree[node['node_path']] = node['mode'], blob.id
267 new_trees.append(curtree)
267 new_trees.append(curtree)
268 for dirname in reversed_dirnames[:-1]:
268 for dirname in reversed_dirnames[:-1]:
269 newtree = objects.Tree()
269 newtree = objects.Tree()
270 newtree[dirname] = (DIR_STAT, curtree.id)
270 newtree[dirname] = (DIR_STAT, curtree.id)
271 new_trees.append(newtree)
271 new_trees.append(newtree)
272 curtree = newtree
272 curtree = newtree
273 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
273 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
274 else:
274 else:
275 parent.add(
275 parent.add(
276 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
276 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
277
277
278 new_trees.append(parent)
278 new_trees.append(parent)
279 # Update ancestors
279 # Update ancestors
280 reversed_ancestors = reversed(
280 reversed_ancestors = reversed(
281 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
281 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
282 for parent, tree, path in reversed_ancestors:
282 for parent, tree, path in reversed_ancestors:
283 parent[path] = (DIR_STAT, tree.id)
283 parent[path] = (DIR_STAT, tree.id)
284 object_store.add_object(tree)
284 object_store.add_object(tree)
285
285
286 object_store.add_object(blob)
286 object_store.add_object(blob)
287 for tree in new_trees:
287 for tree in new_trees:
288 object_store.add_object(tree)
288 object_store.add_object(tree)
289
289
290 for node_path in removed:
290 for node_path in removed:
291 paths = node_path.split('/')
291 paths = node_path.split('/')
292 tree = commit_tree
292 tree = commit_tree
293 trees = [tree]
293 trees = [tree]
294 # Traverse deep into the forest...
294 # Traverse deep into the forest...
295 for path in paths:
295 for path in paths:
296 try:
296 try:
297 obj = repo[tree[path][1]]
297 obj = repo[tree[path][1]]
298 if isinstance(obj, objects.Tree):
298 if isinstance(obj, objects.Tree):
299 trees.append(obj)
299 trees.append(obj)
300 tree = obj
300 tree = obj
301 except KeyError:
301 except KeyError:
302 break
302 break
303 # Cut down the blob and all rotten trees on the way back...
303 # Cut down the blob and all rotten trees on the way back...
304 for path, tree in reversed(zip(paths, trees)):
304 for path, tree in reversed(zip(paths, trees)):
305 del tree[path]
305 del tree[path]
306 if tree:
306 if tree:
307 # This tree still has elements - don't remove it or any
307 # This tree still has elements - don't remove it or any
308 # of it's parents
308 # of it's parents
309 break
309 break
310
310
311 object_store.add_object(commit_tree)
311 object_store.add_object(commit_tree)
312
312
313 # Create commit
313 # Create commit
314 commit = objects.Commit()
314 commit = objects.Commit()
315 commit.tree = commit_tree.id
315 commit.tree = commit_tree.id
316 for k, v in commit_data.iteritems():
316 for k, v in commit_data.iteritems():
317 setattr(commit, k, v)
317 setattr(commit, k, v)
318 object_store.add_object(commit)
318 object_store.add_object(commit)
319
319
320 ref = 'refs/heads/%s' % branch
320 ref = 'refs/heads/%s' % branch
321 repo.refs[ref] = commit.id
321 repo.refs[ref] = commit.id
322
322
323 return commit.id
323 return commit.id
324
324
325 @reraise_safe_exceptions
325 @reraise_safe_exceptions
326 def fetch(self, wire, url, apply_refs=True, refs=None):
326 def fetch(self, wire, url, apply_refs=True, refs=None):
327 if url != 'default' and '://' not in url:
327 if url != 'default' and '://' not in url:
328 client = LocalGitClient(url)
328 client = LocalGitClient(url)
329 else:
329 else:
330 url_obj = hg_url(url)
330 url_obj = hg_url(url)
331 o = self._build_opener(url)
331 o = self._build_opener(url)
332 url, _ = url_obj.authinfo()
332 url, _ = url_obj.authinfo()
333 client = HttpGitClient(base_url=url, opener=o)
333 client = HttpGitClient(base_url=url, opener=o)
334 repo = self._factory.repo(wire)
334 repo = self._factory.repo(wire)
335
335
336 determine_wants = repo.object_store.determine_wants_all
336 determine_wants = repo.object_store.determine_wants_all
337 if refs:
337 if refs:
338 def determine_wants_requested(references):
338 def determine_wants_requested(references):
339 return [references[r] for r in references if r in refs]
339 return [references[r] for r in references if r in refs]
340 determine_wants = determine_wants_requested
340 determine_wants = determine_wants_requested
341
341
342 try:
342 try:
343 remote_refs = client.fetch(
343 remote_refs = client.fetch(
344 path=url, target=repo, determine_wants=determine_wants)
344 path=url, target=repo, determine_wants=determine_wants)
345 except NotGitRepository:
345 except NotGitRepository:
346 log.warning(
346 log.warning(
347 'Trying to fetch from "%s" failed, not a Git repository.', url)
347 'Trying to fetch from "%s" failed, not a Git repository.', url)
348 raise exceptions.AbortException()
348 raise exceptions.AbortException()
349
349
350 # mikhail: client.fetch() returns all the remote refs, but fetches only
350 # mikhail: client.fetch() returns all the remote refs, but fetches only
351 # refs filtered by `determine_wants` function. We need to filter result
351 # refs filtered by `determine_wants` function. We need to filter result
352 # as well
352 # as well
353 if refs:
353 if refs:
354 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
354 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
355
355
356 if apply_refs:
356 if apply_refs:
357 # TODO: johbo: Needs proper test coverage with a git repository
357 # TODO: johbo: Needs proper test coverage with a git repository
358 # that contains a tag object, so that we would end up with
358 # that contains a tag object, so that we would end up with
359 # a peeled ref at this point.
359 # a peeled ref at this point.
360 PEELED_REF_MARKER = '^{}'
360 PEELED_REF_MARKER = '^{}'
361 for k in remote_refs:
361 for k in remote_refs:
362 if k.endswith(PEELED_REF_MARKER):
362 if k.endswith(PEELED_REF_MARKER):
363 log.info("Skipping peeled reference %s", k)
363 log.info("Skipping peeled reference %s", k)
364 continue
364 continue
365 repo[k] = remote_refs[k]
365 repo[k] = remote_refs[k]
366
366
367 if refs:
367 if refs:
368 # mikhail: explicitly set the head to the last ref.
368 # mikhail: explicitly set the head to the last ref.
369 repo['HEAD'] = remote_refs[refs[-1]]
369 repo['HEAD'] = remote_refs[refs[-1]]
370
370
371 # TODO: mikhail: should we return remote_refs here to be
371 # TODO: mikhail: should we return remote_refs here to be
372 # consistent?
372 # consistent?
373 else:
373 else:
374 return remote_refs
374 return remote_refs
375
375
376 @reraise_safe_exceptions
376 @reraise_safe_exceptions
377 def get_remote_refs(self, wire, url):
377 def get_remote_refs(self, wire, url):
378 repo = Repo(url)
378 repo = Repo(url)
379 return repo.get_refs()
379 return repo.get_refs()
380
380
381 @reraise_safe_exceptions
381 @reraise_safe_exceptions
382 def get_description(self, wire):
382 def get_description(self, wire):
383 repo = self._factory.repo(wire)
383 repo = self._factory.repo(wire)
384 return repo.get_description()
384 return repo.get_description()
385
385
386 @reraise_safe_exceptions
386 @reraise_safe_exceptions
387 def get_file_history(self, wire, file_path, commit_id, limit):
387 def get_file_history(self, wire, file_path, commit_id, limit):
388 repo = self._factory.repo(wire)
388 repo = self._factory.repo(wire)
389 include = [commit_id]
389 include = [commit_id]
390 paths = [file_path]
390 paths = [file_path]
391
391
392 walker = repo.get_walker(include, paths=paths, max_entries=limit)
392 walker = repo.get_walker(include, paths=paths, max_entries=limit)
393 return [x.commit.id for x in walker]
393 return [x.commit.id for x in walker]
394
394
395 @reraise_safe_exceptions
395 @reraise_safe_exceptions
396 def get_missing_revs(self, wire, rev1, rev2, path2):
396 def get_missing_revs(self, wire, rev1, rev2, path2):
397 repo = self._factory.repo(wire)
397 repo = self._factory.repo(wire)
398 LocalGitClient(thin_packs=False).fetch(path2, repo)
398 LocalGitClient(thin_packs=False).fetch(path2, repo)
399
399
400 wire_remote = wire.copy()
400 wire_remote = wire.copy()
401 wire_remote['path'] = path2
401 wire_remote['path'] = path2
402 repo_remote = self._factory.repo(wire_remote)
402 repo_remote = self._factory.repo(wire_remote)
403 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
403 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
404
404
405 revs = [
405 revs = [
406 x.commit.id
406 x.commit.id
407 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
407 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
408 return revs
408 return revs
409
409
410 @reraise_safe_exceptions
410 @reraise_safe_exceptions
411 def get_object(self, wire, sha):
411 def get_object(self, wire, sha):
412 repo = self._factory.repo(wire)
412 repo = self._factory.repo(wire)
413 obj = repo.get_object(sha)
413 obj = repo.get_object(sha)
414 commit_id = obj.id
414 commit_id = obj.id
415
415
416 if isinstance(obj, Tag):
416 if isinstance(obj, Tag):
417 commit_id = obj.object[1]
417 commit_id = obj.object[1]
418
418
419 return {
419 return {
420 'id': obj.id,
420 'id': obj.id,
421 'type': obj.type_name,
421 'type': obj.type_name,
422 'commit_id': commit_id
422 'commit_id': commit_id
423 }
423 }
424
424
425 @reraise_safe_exceptions
425 @reraise_safe_exceptions
426 def get_object_attrs(self, wire, sha, *attrs):
426 def get_object_attrs(self, wire, sha, *attrs):
427 repo = self._factory.repo(wire)
427 repo = self._factory.repo(wire)
428 obj = repo.get_object(sha)
428 obj = repo.get_object(sha)
429 return list(getattr(obj, a) for a in attrs)
429 return list(getattr(obj, a) for a in attrs)
430
430
431 @reraise_safe_exceptions
431 @reraise_safe_exceptions
432 def get_refs(self, wire):
432 def get_refs(self, wire):
433 repo = self._factory.repo(wire)
433 repo = self._factory.repo(wire)
434 result = {}
434 result = {}
435 for ref, sha in repo.refs.as_dict().items():
435 for ref, sha in repo.refs.as_dict().items():
436 peeled_sha = repo.get_peeled(ref)
436 peeled_sha = repo.get_peeled(ref)
437 result[ref] = peeled_sha
437 result[ref] = peeled_sha
438 return result
438 return result
439
439
440 @reraise_safe_exceptions
440 @reraise_safe_exceptions
441 def get_refs_path(self, wire):
441 def get_refs_path(self, wire):
442 repo = self._factory.repo(wire)
442 repo = self._factory.repo(wire)
443 return repo.refs.path
443 return repo.refs.path
444
444
445 @reraise_safe_exceptions
445 @reraise_safe_exceptions
446 def head(self, wire):
446 def head(self, wire):
447 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
448 return repo.head()
448 return repo.head()
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def init(self, wire):
451 def init(self, wire):
452 repo_path = str_to_dulwich(wire['path'])
452 repo_path = str_to_dulwich(wire['path'])
453 self.repo = Repo.init(repo_path)
453 self.repo = Repo.init(repo_path)
454
454
455 @reraise_safe_exceptions
455 @reraise_safe_exceptions
456 def init_bare(self, wire):
456 def init_bare(self, wire):
457 repo_path = str_to_dulwich(wire['path'])
457 repo_path = str_to_dulwich(wire['path'])
458 self.repo = Repo.init_bare(repo_path)
458 self.repo = Repo.init_bare(repo_path)
459
459
460 @reraise_safe_exceptions
460 @reraise_safe_exceptions
461 def revision(self, wire, rev):
461 def revision(self, wire, rev):
462 repo = self._factory.repo(wire)
462 repo = self._factory.repo(wire)
463 obj = repo[rev]
463 obj = repo[rev]
464 obj_data = {
464 obj_data = {
465 'id': obj.id,
465 'id': obj.id,
466 }
466 }
467 try:
467 try:
468 obj_data['tree'] = obj.tree
468 obj_data['tree'] = obj.tree
469 except AttributeError:
469 except AttributeError:
470 pass
470 pass
471 return obj_data
471 return obj_data
472
472
473 @reraise_safe_exceptions
473 @reraise_safe_exceptions
474 def commit_attribute(self, wire, rev, attr):
474 def commit_attribute(self, wire, rev, attr):
475 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
476 obj = repo[rev]
476 obj = repo[rev]
477 return getattr(obj, attr)
477 return getattr(obj, attr)
478
478
479 @reraise_safe_exceptions
479 @reraise_safe_exceptions
480 def set_refs(self, wire, key, value):
480 def set_refs(self, wire, key, value):
481 repo = self._factory.repo(wire)
481 repo = self._factory.repo(wire)
482 repo.refs[key] = value
482 repo.refs[key] = value
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def remove_ref(self, wire, key):
485 def remove_ref(self, wire, key):
486 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
487 del repo.refs[key]
487 del repo.refs[key]
488
488
489 @reraise_safe_exceptions
489 @reraise_safe_exceptions
490 def tree_changes(self, wire, source_id, target_id):
490 def tree_changes(self, wire, source_id, target_id):
491 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
492 source = repo[source_id].tree if source_id else None
492 source = repo[source_id].tree if source_id else None
493 target = repo[target_id].tree
493 target = repo[target_id].tree
494 result = repo.object_store.tree_changes(source, target)
494 result = repo.object_store.tree_changes(source, target)
495 return list(result)
495 return list(result)
496
496
497 @reraise_safe_exceptions
497 @reraise_safe_exceptions
498 def tree_items(self, wire, tree_id):
498 def tree_items(self, wire, tree_id):
499 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
500 tree = repo[tree_id]
500 tree = repo[tree_id]
501
501
502 result = []
502 result = []
503 for item in tree.iteritems():
503 for item in tree.iteritems():
504 item_sha = item.sha
504 item_sha = item.sha
505 item_mode = item.mode
505 item_mode = item.mode
506
506
507 if FILE_MODE(item_mode) == GIT_LINK:
507 if FILE_MODE(item_mode) == GIT_LINK:
508 item_type = "link"
508 item_type = "link"
509 else:
509 else:
510 item_type = repo[item_sha].type_name
510 item_type = repo[item_sha].type_name
511
511
512 result.append((item.path, item_mode, item_sha, item_type))
512 result.append((item.path, item_mode, item_sha, item_type))
513 return result
513 return result
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def update_server_info(self, wire):
516 def update_server_info(self, wire):
517 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
518 update_server_info(repo)
518 update_server_info(repo)
519
519
520 @reraise_safe_exceptions
520 @reraise_safe_exceptions
521 def discover_git_version(self):
521 def discover_git_version(self):
522 stdout, _ = self.run_git_command(
522 stdout, _ = self.run_git_command(
523 {}, ['--version'], _bare=True, _safe=True)
523 {}, ['--version'], _bare=True, _safe=True)
524 prefix = 'git version'
525 if stdout.startswith(prefix):
526 stdout = stdout[len(prefix):]
524 return stdout
527 return stdout
525
528
526 @reraise_safe_exceptions
529 @reraise_safe_exceptions
527 def run_git_command(self, wire, cmd, **opts):
530 def run_git_command(self, wire, cmd, **opts):
528 path = wire.get('path', None)
531 path = wire.get('path', None)
529
532
530 if path and os.path.isdir(path):
533 if path and os.path.isdir(path):
531 opts['cwd'] = path
534 opts['cwd'] = path
532
535
533 if '_bare' in opts:
536 if '_bare' in opts:
534 _copts = []
537 _copts = []
535 del opts['_bare']
538 del opts['_bare']
536 else:
539 else:
537 _copts = ['-c', 'core.quotepath=false', ]
540 _copts = ['-c', 'core.quotepath=false', ]
538 safe_call = False
541 safe_call = False
539 if '_safe' in opts:
542 if '_safe' in opts:
540 # no exc on failure
543 # no exc on failure
541 del opts['_safe']
544 del opts['_safe']
542 safe_call = True
545 safe_call = True
543
546
544 gitenv = os.environ.copy()
547 gitenv = os.environ.copy()
545 gitenv.update(opts.pop('extra_env', {}))
548 gitenv.update(opts.pop('extra_env', {}))
546 # need to clean fix GIT_DIR !
549 # need to clean fix GIT_DIR !
547 if 'GIT_DIR' in gitenv:
550 if 'GIT_DIR' in gitenv:
548 del gitenv['GIT_DIR']
551 del gitenv['GIT_DIR']
549 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
552 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
550
553
551 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
554 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
552
555
553 try:
556 try:
554 _opts = {'env': gitenv, 'shell': False}
557 _opts = {'env': gitenv, 'shell': False}
555 _opts.update(opts)
558 _opts.update(opts)
556 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
559 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
557
560
558 return ''.join(p), ''.join(p.error)
561 return ''.join(p), ''.join(p.error)
559 except (EnvironmentError, OSError) as err:
562 except (EnvironmentError, OSError) as err:
560 tb_err = ("Couldn't run git command (%s).\n"
563 tb_err = ("Couldn't run git command (%s).\n"
561 "Original error was:%s\n" % (cmd, err))
564 "Original error was:%s\n" % (cmd, err))
562 log.exception(tb_err)
565 log.exception(tb_err)
563 if safe_call:
566 if safe_call:
564 return '', err
567 return '', err
565 else:
568 else:
566 raise exceptions.VcsException(tb_err)
569 raise exceptions.VcsException(tb_err)
567
570
568
571
569 def str_to_dulwich(value):
572 def str_to_dulwich(value):
570 """
573 """
571 Dulwich 0.10.1a requires `unicode` objects to be passed in.
574 Dulwich 0.10.1a requires `unicode` objects to be passed in.
572 """
575 """
573 return value.decode(settings.WIRE_ENCODING)
576 return value.decode(settings.WIRE_ENCODING)
@@ -1,714 +1,719 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
21 import sys
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory
31 from vcsserver.base import RepoFactory
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
37 RequirementError)
37 RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 def raise_from_original(new_type):
94 def raise_from_original(new_type):
95 """
95 """
96 Raise a new exception type with original args and traceback.
96 Raise a new exception type with original args and traceback.
97 """
97 """
98 _, original, traceback = sys.exc_info()
98 _, original, traceback = sys.exc_info()
99 try:
99 try:
100 raise new_type(*original.args), None, traceback
100 raise new_type(*original.args), None, traceback
101 finally:
101 finally:
102 del traceback
102 del traceback
103
103
104
104
105 class MercurialFactory(RepoFactory):
105 class MercurialFactory(RepoFactory):
106
106
107 def _create_config(self, config, hooks=True):
107 def _create_config(self, config, hooks=True):
108 if not hooks:
108 if not hooks:
109 hooks_to_clean = frozenset((
109 hooks_to_clean = frozenset((
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 new_config = []
112 new_config = []
113 for section, option, value in config:
113 for section, option, value in config:
114 if section == 'hooks' and option in hooks_to_clean:
114 if section == 'hooks' and option in hooks_to_clean:
115 continue
115 continue
116 new_config.append((section, option, value))
116 new_config.append((section, option, value))
117 config = new_config
117 config = new_config
118
118
119 baseui = make_ui_from_config(config)
119 baseui = make_ui_from_config(config)
120 return baseui
120 return baseui
121
121
122 def _create_repo(self, wire, create):
122 def _create_repo(self, wire, create):
123 baseui = self._create_config(wire["config"])
123 baseui = self._create_config(wire["config"])
124 return localrepository(baseui, wire["path"], create)
124 return localrepository(baseui, wire["path"], create)
125
125
126
126
127 class HgRemote(object):
127 class HgRemote(object):
128
128
129 def __init__(self, factory):
129 def __init__(self, factory):
130 self._factory = factory
130 self._factory = factory
131
131
132 self._bulk_methods = {
132 self._bulk_methods = {
133 "affected_files": self.ctx_files,
133 "affected_files": self.ctx_files,
134 "author": self.ctx_user,
134 "author": self.ctx_user,
135 "branch": self.ctx_branch,
135 "branch": self.ctx_branch,
136 "children": self.ctx_children,
136 "children": self.ctx_children,
137 "date": self.ctx_date,
137 "date": self.ctx_date,
138 "message": self.ctx_description,
138 "message": self.ctx_description,
139 "parents": self.ctx_parents,
139 "parents": self.ctx_parents,
140 "status": self.ctx_status,
140 "status": self.ctx_status,
141 "_file_paths": self.ctx_list,
141 "_file_paths": self.ctx_list,
142 }
142 }
143
143
144 @reraise_safe_exceptions
144 @reraise_safe_exceptions
145 def discover_hg_version(self):
146 from mercurial import util
147 return util.version()
148
149 @reraise_safe_exceptions
145 def archive_repo(self, archive_path, mtime, file_info, kind):
150 def archive_repo(self, archive_path, mtime, file_info, kind):
146 if kind == "tgz":
151 if kind == "tgz":
147 archiver = archival.tarit(archive_path, mtime, "gz")
152 archiver = archival.tarit(archive_path, mtime, "gz")
148 elif kind == "tbz2":
153 elif kind == "tbz2":
149 archiver = archival.tarit(archive_path, mtime, "bz2")
154 archiver = archival.tarit(archive_path, mtime, "bz2")
150 elif kind == 'zip':
155 elif kind == 'zip':
151 archiver = archival.zipit(archive_path, mtime)
156 archiver = archival.zipit(archive_path, mtime)
152 else:
157 else:
153 raise exceptions.ArchiveException(
158 raise exceptions.ArchiveException(
154 'Remote does not support: "%s".' % kind)
159 'Remote does not support: "%s".' % kind)
155
160
156 for f_path, f_mode, f_is_link, f_content in file_info:
161 for f_path, f_mode, f_is_link, f_content in file_info:
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 archiver.done()
163 archiver.done()
159
164
160 @reraise_safe_exceptions
165 @reraise_safe_exceptions
161 def bookmarks(self, wire):
166 def bookmarks(self, wire):
162 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
163 return dict(repo._bookmarks)
168 return dict(repo._bookmarks)
164
169
165 @reraise_safe_exceptions
170 @reraise_safe_exceptions
166 def branches(self, wire, normal, closed):
171 def branches(self, wire, normal, closed):
167 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
168 iter_branches = repo.branchmap().iterbranches()
173 iter_branches = repo.branchmap().iterbranches()
169 bt = {}
174 bt = {}
170 for branch_name, _heads, tip, is_closed in iter_branches:
175 for branch_name, _heads, tip, is_closed in iter_branches:
171 if normal and not is_closed:
176 if normal and not is_closed:
172 bt[branch_name] = tip
177 bt[branch_name] = tip
173 if closed and is_closed:
178 if closed and is_closed:
174 bt[branch_name] = tip
179 bt[branch_name] = tip
175
180
176 return bt
181 return bt
177
182
178 @reraise_safe_exceptions
183 @reraise_safe_exceptions
179 def bulk_request(self, wire, rev, pre_load):
184 def bulk_request(self, wire, rev, pre_load):
180 result = {}
185 result = {}
181 for attr in pre_load:
186 for attr in pre_load:
182 try:
187 try:
183 method = self._bulk_methods[attr]
188 method = self._bulk_methods[attr]
184 result[attr] = method(wire, rev)
189 result[attr] = method(wire, rev)
185 except KeyError:
190 except KeyError:
186 raise exceptions.VcsException(
191 raise exceptions.VcsException(
187 'Unknown bulk attribute: "%s"' % attr)
192 'Unknown bulk attribute: "%s"' % attr)
188 return result
193 return result
189
194
190 @reraise_safe_exceptions
195 @reraise_safe_exceptions
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
194
199
195 @reraise_safe_exceptions
200 @reraise_safe_exceptions
196 def commitctx(
201 def commitctx(
197 self, wire, message, parents, commit_time, commit_timezone,
202 self, wire, message, parents, commit_time, commit_timezone,
198 user, files, extra, removed, updated):
203 user, files, extra, removed, updated):
199
204
200 def _filectxfn(_repo, memctx, path):
205 def _filectxfn(_repo, memctx, path):
201 """
206 """
202 Marks given path as added/changed/removed in a given _repo. This is
207 Marks given path as added/changed/removed in a given _repo. This is
203 for internal mercurial commit function.
208 for internal mercurial commit function.
204 """
209 """
205
210
206 # check if this path is removed
211 # check if this path is removed
207 if path in removed:
212 if path in removed:
208 # returning None is a way to mark node for removal
213 # returning None is a way to mark node for removal
209 return None
214 return None
210
215
211 # check if this path is added
216 # check if this path is added
212 for node in updated:
217 for node in updated:
213 if node['path'] == path:
218 if node['path'] == path:
214 return memfilectx(
219 return memfilectx(
215 _repo,
220 _repo,
216 path=node['path'],
221 path=node['path'],
217 data=node['content'],
222 data=node['content'],
218 islink=False,
223 islink=False,
219 isexec=bool(node['mode'] & stat.S_IXUSR),
224 isexec=bool(node['mode'] & stat.S_IXUSR),
220 copied=False,
225 copied=False,
221 memctx=memctx)
226 memctx=memctx)
222
227
223 raise exceptions.AbortException(
228 raise exceptions.AbortException(
224 "Given path haven't been marked as added, "
229 "Given path haven't been marked as added, "
225 "changed or removed (%s)" % path)
230 "changed or removed (%s)" % path)
226
231
227 repo = self._factory.repo(wire)
232 repo = self._factory.repo(wire)
228
233
229 commit_ctx = memctx(
234 commit_ctx = memctx(
230 repo=repo,
235 repo=repo,
231 parents=parents,
236 parents=parents,
232 text=message,
237 text=message,
233 files=files,
238 files=files,
234 filectxfn=_filectxfn,
239 filectxfn=_filectxfn,
235 user=user,
240 user=user,
236 date=(commit_time, commit_timezone),
241 date=(commit_time, commit_timezone),
237 extra=extra)
242 extra=extra)
238
243
239 n = repo.commitctx(commit_ctx)
244 n = repo.commitctx(commit_ctx)
240 new_id = hex(n)
245 new_id = hex(n)
241
246
242 return new_id
247 return new_id
243
248
244 @reraise_safe_exceptions
249 @reraise_safe_exceptions
245 def ctx_branch(self, wire, revision):
250 def ctx_branch(self, wire, revision):
246 repo = self._factory.repo(wire)
251 repo = self._factory.repo(wire)
247 ctx = repo[revision]
252 ctx = repo[revision]
248 return ctx.branch()
253 return ctx.branch()
249
254
250 @reraise_safe_exceptions
255 @reraise_safe_exceptions
251 def ctx_children(self, wire, revision):
256 def ctx_children(self, wire, revision):
252 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
253 ctx = repo[revision]
258 ctx = repo[revision]
254 return [child.rev() for child in ctx.children()]
259 return [child.rev() for child in ctx.children()]
255
260
256 @reraise_safe_exceptions
261 @reraise_safe_exceptions
257 def ctx_date(self, wire, revision):
262 def ctx_date(self, wire, revision):
258 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
259 ctx = repo[revision]
264 ctx = repo[revision]
260 return ctx.date()
265 return ctx.date()
261
266
262 @reraise_safe_exceptions
267 @reraise_safe_exceptions
263 def ctx_description(self, wire, revision):
268 def ctx_description(self, wire, revision):
264 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
265 ctx = repo[revision]
270 ctx = repo[revision]
266 return ctx.description()
271 return ctx.description()
267
272
268 @reraise_safe_exceptions
273 @reraise_safe_exceptions
269 def ctx_diff(
274 def ctx_diff(
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
272 ctx = repo[revision]
277 ctx = repo[revision]
273 result = ctx.diff(
278 result = ctx.diff(
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 return list(result)
280 return list(result)
276
281
277 @reraise_safe_exceptions
282 @reraise_safe_exceptions
278 def ctx_files(self, wire, revision):
283 def ctx_files(self, wire, revision):
279 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
280 ctx = repo[revision]
285 ctx = repo[revision]
281 return ctx.files()
286 return ctx.files()
282
287
283 @reraise_safe_exceptions
288 @reraise_safe_exceptions
284 def ctx_list(self, path, revision):
289 def ctx_list(self, path, revision):
285 repo = self._factory.repo(path)
290 repo = self._factory.repo(path)
286 ctx = repo[revision]
291 ctx = repo[revision]
287 return list(ctx)
292 return list(ctx)
288
293
289 @reraise_safe_exceptions
294 @reraise_safe_exceptions
290 def ctx_parents(self, wire, revision):
295 def ctx_parents(self, wire, revision):
291 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
292 ctx = repo[revision]
297 ctx = repo[revision]
293 return [parent.rev() for parent in ctx.parents()]
298 return [parent.rev() for parent in ctx.parents()]
294
299
295 @reraise_safe_exceptions
300 @reraise_safe_exceptions
296 def ctx_substate(self, wire, revision):
301 def ctx_substate(self, wire, revision):
297 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
298 ctx = repo[revision]
303 ctx = repo[revision]
299 return ctx.substate
304 return ctx.substate
300
305
301 @reraise_safe_exceptions
306 @reraise_safe_exceptions
302 def ctx_status(self, wire, revision):
307 def ctx_status(self, wire, revision):
303 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
304 ctx = repo[revision]
309 ctx = repo[revision]
305 status = repo[ctx.p1().node()].status(other=ctx.node())
310 status = repo[ctx.p1().node()].status(other=ctx.node())
306 # object of status (odd, custom named tuple in mercurial) is not
311 # object of status (odd, custom named tuple in mercurial) is not
307 # correctly serializable via Pyro, we make it a list, as the underling
312 # correctly serializable via Pyro, we make it a list, as the underling
308 # API expects this to be a list
313 # API expects this to be a list
309 return list(status)
314 return list(status)
310
315
311 @reraise_safe_exceptions
316 @reraise_safe_exceptions
312 def ctx_user(self, wire, revision):
317 def ctx_user(self, wire, revision):
313 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
314 ctx = repo[revision]
319 ctx = repo[revision]
315 return ctx.user()
320 return ctx.user()
316
321
317 @reraise_safe_exceptions
322 @reraise_safe_exceptions
318 def check_url(self, url, config):
323 def check_url(self, url, config):
319 log.info("Checking URL for remote cloning/import: %s", url)
324 log.info("Checking URL for remote cloning/import: %s", url)
320 _proto = None
325 _proto = None
321 if '+' in url[:url.find('://')]:
326 if '+' in url[:url.find('://')]:
322 _proto = url[0:url.find('+')]
327 _proto = url[0:url.find('+')]
323 url = url[url.find('+') + 1:]
328 url = url[url.find('+') + 1:]
324 handlers = []
329 handlers = []
325 url_obj = hg_url(url)
330 url_obj = hg_url(url)
326 test_uri, authinfo = url_obj.authinfo()
331 test_uri, authinfo = url_obj.authinfo()
327 url_obj.passwd = '*****'
332 url_obj.passwd = '*****'
328 cleaned_uri = str(url_obj)
333 cleaned_uri = str(url_obj)
329
334
330 if authinfo:
335 if authinfo:
331 # create a password manager
336 # create a password manager
332 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
337 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
333 passmgr.add_password(*authinfo)
338 passmgr.add_password(*authinfo)
334
339
335 handlers.extend((httpbasicauthhandler(passmgr),
340 handlers.extend((httpbasicauthhandler(passmgr),
336 httpdigestauthhandler(passmgr)))
341 httpdigestauthhandler(passmgr)))
337
342
338 o = urllib2.build_opener(*handlers)
343 o = urllib2.build_opener(*handlers)
339 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
344 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
340 ('Accept', 'application/mercurial-0.1')]
345 ('Accept', 'application/mercurial-0.1')]
341
346
342 q = {"cmd": 'between'}
347 q = {"cmd": 'between'}
343 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
348 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
344 qs = '?%s' % urllib.urlencode(q)
349 qs = '?%s' % urllib.urlencode(q)
345 cu = "%s%s" % (test_uri, qs)
350 cu = "%s%s" % (test_uri, qs)
346 req = urllib2.Request(cu, None, {})
351 req = urllib2.Request(cu, None, {})
347
352
348 try:
353 try:
349 log.debug("Trying to open URL %s", url)
354 log.debug("Trying to open URL %s", url)
350 resp = o.open(req)
355 resp = o.open(req)
351 if resp.code != 200:
356 if resp.code != 200:
352 raise exceptions.URLError('Return Code is not 200')
357 raise exceptions.URLError('Return Code is not 200')
353 except Exception as e:
358 except Exception as e:
354 log.warning("URL cannot be opened: %s", url, exc_info=True)
359 log.warning("URL cannot be opened: %s", url, exc_info=True)
355 # means it cannot be cloned
360 # means it cannot be cloned
356 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
361 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
357
362
358 # now check if it's a proper hg repo, but don't do it for svn
363 # now check if it's a proper hg repo, but don't do it for svn
359 try:
364 try:
360 if _proto == 'svn':
365 if _proto == 'svn':
361 pass
366 pass
362 else:
367 else:
363 # check for pure hg repos
368 # check for pure hg repos
364 log.debug(
369 log.debug(
365 "Verifying if URL is a Mercurial repository: %s", url)
370 "Verifying if URL is a Mercurial repository: %s", url)
366 httppeer(make_ui_from_config(config), url).lookup('tip')
371 httppeer(make_ui_from_config(config), url).lookup('tip')
367 except Exception as e:
372 except Exception as e:
368 log.warning("URL is not a valid Mercurial repository: %s", url)
373 log.warning("URL is not a valid Mercurial repository: %s", url)
369 raise exceptions.URLError(
374 raise exceptions.URLError(
370 "url [%s] does not look like an hg repo org_exc: %s"
375 "url [%s] does not look like an hg repo org_exc: %s"
371 % (cleaned_uri, e))
376 % (cleaned_uri, e))
372
377
373 log.info("URL is a valid Mercurial repository: %s", url)
378 log.info("URL is a valid Mercurial repository: %s", url)
374 return True
379 return True
375
380
376 @reraise_safe_exceptions
381 @reraise_safe_exceptions
377 def diff(
382 def diff(
378 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
383 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
379 context):
384 context):
380 repo = self._factory.repo(wire)
385 repo = self._factory.repo(wire)
381
386
382 if file_filter:
387 if file_filter:
383 filter = match(file_filter[0], '', [file_filter[1]])
388 filter = match(file_filter[0], '', [file_filter[1]])
384 else:
389 else:
385 filter = file_filter
390 filter = file_filter
386 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
391 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
387
392
388 try:
393 try:
389 return "".join(patch.diff(
394 return "".join(patch.diff(
390 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
395 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
391 except RepoLookupError:
396 except RepoLookupError:
392 raise exceptions.LookupException()
397 raise exceptions.LookupException()
393
398
394 @reraise_safe_exceptions
399 @reraise_safe_exceptions
395 def file_history(self, wire, revision, path, limit):
400 def file_history(self, wire, revision, path, limit):
396 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
397
402
398 ctx = repo[revision]
403 ctx = repo[revision]
399 fctx = ctx.filectx(path)
404 fctx = ctx.filectx(path)
400
405
401 def history_iter():
406 def history_iter():
402 limit_rev = fctx.rev()
407 limit_rev = fctx.rev()
403 for obj in reversed(list(fctx.filelog())):
408 for obj in reversed(list(fctx.filelog())):
404 obj = fctx.filectx(obj)
409 obj = fctx.filectx(obj)
405 if limit_rev >= obj.rev():
410 if limit_rev >= obj.rev():
406 yield obj
411 yield obj
407
412
408 history = []
413 history = []
409 for cnt, obj in enumerate(history_iter()):
414 for cnt, obj in enumerate(history_iter()):
410 if limit and cnt >= limit:
415 if limit and cnt >= limit:
411 break
416 break
412 history.append(hex(obj.node()))
417 history.append(hex(obj.node()))
413
418
414 return [x for x in history]
419 return [x for x in history]
415
420
416 @reraise_safe_exceptions
421 @reraise_safe_exceptions
417 def file_history_untill(self, wire, revision, path, limit):
422 def file_history_untill(self, wire, revision, path, limit):
418 repo = self._factory.repo(wire)
423 repo = self._factory.repo(wire)
419 ctx = repo[revision]
424 ctx = repo[revision]
420 fctx = ctx.filectx(path)
425 fctx = ctx.filectx(path)
421
426
422 file_log = list(fctx.filelog())
427 file_log = list(fctx.filelog())
423 if limit:
428 if limit:
424 # Limit to the last n items
429 # Limit to the last n items
425 file_log = file_log[-limit:]
430 file_log = file_log[-limit:]
426
431
427 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
432 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
428
433
429 @reraise_safe_exceptions
434 @reraise_safe_exceptions
430 def fctx_annotate(self, wire, revision, path):
435 def fctx_annotate(self, wire, revision, path):
431 repo = self._factory.repo(wire)
436 repo = self._factory.repo(wire)
432 ctx = repo[revision]
437 ctx = repo[revision]
433 fctx = ctx.filectx(path)
438 fctx = ctx.filectx(path)
434
439
435 result = []
440 result = []
436 for i, annotate_data in enumerate(fctx.annotate()):
441 for i, annotate_data in enumerate(fctx.annotate()):
437 ln_no = i + 1
442 ln_no = i + 1
438 sha = hex(annotate_data[0].node())
443 sha = hex(annotate_data[0].node())
439 result.append((ln_no, sha, annotate_data[1]))
444 result.append((ln_no, sha, annotate_data[1]))
440 return result
445 return result
441
446
442 @reraise_safe_exceptions
447 @reraise_safe_exceptions
443 def fctx_data(self, wire, revision, path):
448 def fctx_data(self, wire, revision, path):
444 repo = self._factory.repo(wire)
449 repo = self._factory.repo(wire)
445 ctx = repo[revision]
450 ctx = repo[revision]
446 fctx = ctx.filectx(path)
451 fctx = ctx.filectx(path)
447 return fctx.data()
452 return fctx.data()
448
453
449 @reraise_safe_exceptions
454 @reraise_safe_exceptions
450 def fctx_flags(self, wire, revision, path):
455 def fctx_flags(self, wire, revision, path):
451 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
452 ctx = repo[revision]
457 ctx = repo[revision]
453 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
454 return fctx.flags()
459 return fctx.flags()
455
460
456 @reraise_safe_exceptions
461 @reraise_safe_exceptions
457 def fctx_size(self, wire, revision, path):
462 def fctx_size(self, wire, revision, path):
458 repo = self._factory.repo(wire)
463 repo = self._factory.repo(wire)
459 ctx = repo[revision]
464 ctx = repo[revision]
460 fctx = ctx.filectx(path)
465 fctx = ctx.filectx(path)
461 return fctx.size()
466 return fctx.size()
462
467
463 @reraise_safe_exceptions
468 @reraise_safe_exceptions
464 def get_all_commit_ids(self, wire, name):
469 def get_all_commit_ids(self, wire, name):
465 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
466 revs = repo.filtered(name).changelog.index
471 revs = repo.filtered(name).changelog.index
467 return map(lambda x: hex(x[7]), revs)[:-1]
472 return map(lambda x: hex(x[7]), revs)[:-1]
468
473
469 @reraise_safe_exceptions
474 @reraise_safe_exceptions
470 def get_config_value(self, wire, section, name, untrusted=False):
475 def get_config_value(self, wire, section, name, untrusted=False):
471 repo = self._factory.repo(wire)
476 repo = self._factory.repo(wire)
472 return repo.ui.config(section, name, untrusted=untrusted)
477 return repo.ui.config(section, name, untrusted=untrusted)
473
478
474 @reraise_safe_exceptions
479 @reraise_safe_exceptions
475 def get_config_bool(self, wire, section, name, untrusted=False):
480 def get_config_bool(self, wire, section, name, untrusted=False):
476 repo = self._factory.repo(wire)
481 repo = self._factory.repo(wire)
477 return repo.ui.configbool(section, name, untrusted=untrusted)
482 return repo.ui.configbool(section, name, untrusted=untrusted)
478
483
479 @reraise_safe_exceptions
484 @reraise_safe_exceptions
480 def get_config_list(self, wire, section, name, untrusted=False):
485 def get_config_list(self, wire, section, name, untrusted=False):
481 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
482 return repo.ui.configlist(section, name, untrusted=untrusted)
487 return repo.ui.configlist(section, name, untrusted=untrusted)
483
488
484 @reraise_safe_exceptions
489 @reraise_safe_exceptions
485 def is_large_file(self, wire, path):
490 def is_large_file(self, wire, path):
486 return largefiles.lfutil.isstandin(path)
491 return largefiles.lfutil.isstandin(path)
487
492
488 @reraise_safe_exceptions
493 @reraise_safe_exceptions
489 def in_store(self, wire, sha):
494 def in_store(self, wire, sha):
490 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
491 return largefiles.lfutil.instore(repo, sha)
496 return largefiles.lfutil.instore(repo, sha)
492
497
493 @reraise_safe_exceptions
498 @reraise_safe_exceptions
494 def in_user_cache(self, wire, sha):
499 def in_user_cache(self, wire, sha):
495 repo = self._factory.repo(wire)
500 repo = self._factory.repo(wire)
496 return largefiles.lfutil.inusercache(repo.ui, sha)
501 return largefiles.lfutil.inusercache(repo.ui, sha)
497
502
498 @reraise_safe_exceptions
503 @reraise_safe_exceptions
499 def store_path(self, wire, sha):
504 def store_path(self, wire, sha):
500 repo = self._factory.repo(wire)
505 repo = self._factory.repo(wire)
501 return largefiles.lfutil.storepath(repo, sha)
506 return largefiles.lfutil.storepath(repo, sha)
502
507
503 @reraise_safe_exceptions
508 @reraise_safe_exceptions
504 def link(self, wire, sha, path):
509 def link(self, wire, sha, path):
505 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
506 largefiles.lfutil.link(
511 largefiles.lfutil.link(
507 largefiles.lfutil.usercachepath(repo.ui, sha), path)
512 largefiles.lfutil.usercachepath(repo.ui, sha), path)
508
513
509 @reraise_safe_exceptions
514 @reraise_safe_exceptions
510 def localrepository(self, wire, create=False):
515 def localrepository(self, wire, create=False):
511 self._factory.repo(wire, create=create)
516 self._factory.repo(wire, create=create)
512
517
513 @reraise_safe_exceptions
518 @reraise_safe_exceptions
514 def lookup(self, wire, revision, both):
519 def lookup(self, wire, revision, both):
515 # TODO Paris: Ugly hack to "deserialize" long for msgpack
520 # TODO Paris: Ugly hack to "deserialize" long for msgpack
516 if isinstance(revision, float):
521 if isinstance(revision, float):
517 revision = long(revision)
522 revision = long(revision)
518 repo = self._factory.repo(wire)
523 repo = self._factory.repo(wire)
519 try:
524 try:
520 ctx = repo[revision]
525 ctx = repo[revision]
521 except RepoLookupError:
526 except RepoLookupError:
522 raise exceptions.LookupException(revision)
527 raise exceptions.LookupException(revision)
523 except LookupError as e:
528 except LookupError as e:
524 raise exceptions.LookupException(e.name)
529 raise exceptions.LookupException(e.name)
525
530
526 if not both:
531 if not both:
527 return ctx.hex()
532 return ctx.hex()
528
533
529 ctx = repo[ctx.hex()]
534 ctx = repo[ctx.hex()]
530 return ctx.hex(), ctx.rev()
535 return ctx.hex(), ctx.rev()
531
536
532 @reraise_safe_exceptions
537 @reraise_safe_exceptions
533 def pull(self, wire, url, commit_ids=None):
538 def pull(self, wire, url, commit_ids=None):
534 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
535 remote = peer(repo, {}, url)
540 remote = peer(repo, {}, url)
536 if commit_ids:
541 if commit_ids:
537 commit_ids = [bin(commit_id) for commit_id in commit_ids]
542 commit_ids = [bin(commit_id) for commit_id in commit_ids]
538
543
539 return exchange.pull(
544 return exchange.pull(
540 repo, remote, heads=commit_ids, force=None).cgresult
545 repo, remote, heads=commit_ids, force=None).cgresult
541
546
542 @reraise_safe_exceptions
547 @reraise_safe_exceptions
543 def revision(self, wire, rev):
548 def revision(self, wire, rev):
544 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
545 ctx = repo[rev]
550 ctx = repo[rev]
546 return ctx.rev()
551 return ctx.rev()
547
552
548 @reraise_safe_exceptions
553 @reraise_safe_exceptions
549 def rev_range(self, wire, filter):
554 def rev_range(self, wire, filter):
550 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
551 revisions = [rev for rev in revrange(repo, filter)]
556 revisions = [rev for rev in revrange(repo, filter)]
552 return revisions
557 return revisions
553
558
554 @reraise_safe_exceptions
559 @reraise_safe_exceptions
555 def rev_range_hash(self, wire, node):
560 def rev_range_hash(self, wire, node):
556 repo = self._factory.repo(wire)
561 repo = self._factory.repo(wire)
557
562
558 def get_revs(repo, rev_opt):
563 def get_revs(repo, rev_opt):
559 if rev_opt:
564 if rev_opt:
560 revs = revrange(repo, rev_opt)
565 revs = revrange(repo, rev_opt)
561 if len(revs) == 0:
566 if len(revs) == 0:
562 return (nullrev, nullrev)
567 return (nullrev, nullrev)
563 return max(revs), min(revs)
568 return max(revs), min(revs)
564 else:
569 else:
565 return len(repo) - 1, 0
570 return len(repo) - 1, 0
566
571
567 stop, start = get_revs(repo, [node + ':'])
572 stop, start = get_revs(repo, [node + ':'])
568 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
573 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
569 return revs
574 return revs
570
575
571 @reraise_safe_exceptions
576 @reraise_safe_exceptions
572 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
577 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
573 other_path = kwargs.pop('other_path', None)
578 other_path = kwargs.pop('other_path', None)
574
579
575 # case when we want to compare two independent repositories
580 # case when we want to compare two independent repositories
576 if other_path and other_path != wire["path"]:
581 if other_path and other_path != wire["path"]:
577 baseui = self._factory._create_config(wire["config"])
582 baseui = self._factory._create_config(wire["config"])
578 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
583 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
579 else:
584 else:
580 repo = self._factory.repo(wire)
585 repo = self._factory.repo(wire)
581 return list(repo.revs(rev_spec, *args))
586 return list(repo.revs(rev_spec, *args))
582
587
583 @reraise_safe_exceptions
588 @reraise_safe_exceptions
584 def strip(self, wire, revision, update, backup):
589 def strip(self, wire, revision, update, backup):
585 repo = self._factory.repo(wire)
590 repo = self._factory.repo(wire)
586 ctx = repo[revision]
591 ctx = repo[revision]
587 hgext_strip(
592 hgext_strip(
588 repo.baseui, repo, ctx.node(), update=update, backup=backup)
593 repo.baseui, repo, ctx.node(), update=update, backup=backup)
589
594
590 @reraise_safe_exceptions
595 @reraise_safe_exceptions
591 def tag(self, wire, name, revision, message, local, user,
596 def tag(self, wire, name, revision, message, local, user,
592 tag_time, tag_timezone):
597 tag_time, tag_timezone):
593 repo = self._factory.repo(wire)
598 repo = self._factory.repo(wire)
594 ctx = repo[revision]
599 ctx = repo[revision]
595 node = ctx.node()
600 node = ctx.node()
596
601
597 date = (tag_time, tag_timezone)
602 date = (tag_time, tag_timezone)
598 try:
603 try:
599 repo.tag(name, node, message, local, user, date)
604 repo.tag(name, node, message, local, user, date)
600 except Abort:
605 except Abort:
601 log.exception("Tag operation aborted")
606 log.exception("Tag operation aborted")
602 raise exceptions.AbortException()
607 raise exceptions.AbortException()
603
608
604 @reraise_safe_exceptions
609 @reraise_safe_exceptions
605 def tags(self, wire):
610 def tags(self, wire):
606 repo = self._factory.repo(wire)
611 repo = self._factory.repo(wire)
607 return repo.tags()
612 return repo.tags()
608
613
609 @reraise_safe_exceptions
614 @reraise_safe_exceptions
610 def update(self, wire, node=None, clean=False):
615 def update(self, wire, node=None, clean=False):
611 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
612 baseui = self._factory._create_config(wire['config'])
617 baseui = self._factory._create_config(wire['config'])
613 commands.update(baseui, repo, node=node, clean=clean)
618 commands.update(baseui, repo, node=node, clean=clean)
614
619
615 @reraise_safe_exceptions
620 @reraise_safe_exceptions
616 def identify(self, wire):
621 def identify(self, wire):
617 repo = self._factory.repo(wire)
622 repo = self._factory.repo(wire)
618 baseui = self._factory._create_config(wire['config'])
623 baseui = self._factory._create_config(wire['config'])
619 output = io.BytesIO()
624 output = io.BytesIO()
620 baseui.write = output.write
625 baseui.write = output.write
621 # This is required to get a full node id
626 # This is required to get a full node id
622 baseui.debugflag = True
627 baseui.debugflag = True
623 commands.identify(baseui, repo, id=True)
628 commands.identify(baseui, repo, id=True)
624
629
625 return output.getvalue()
630 return output.getvalue()
626
631
627 @reraise_safe_exceptions
632 @reraise_safe_exceptions
628 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
633 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
629 hooks=True):
634 hooks=True):
630 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
631 baseui = self._factory._create_config(wire['config'], hooks=hooks)
636 baseui = self._factory._create_config(wire['config'], hooks=hooks)
632
637
633 # Mercurial internally has a lot of logic that checks ONLY if
638 # Mercurial internally has a lot of logic that checks ONLY if
634 # option is defined, we just pass those if they are defined then
639 # option is defined, we just pass those if they are defined then
635 opts = {}
640 opts = {}
636 if bookmark:
641 if bookmark:
637 opts['bookmark'] = bookmark
642 opts['bookmark'] = bookmark
638 if branch:
643 if branch:
639 opts['branch'] = branch
644 opts['branch'] = branch
640 if revision:
645 if revision:
641 opts['rev'] = revision
646 opts['rev'] = revision
642
647
643 commands.pull(baseui, repo, source, **opts)
648 commands.pull(baseui, repo, source, **opts)
644
649
645 @reraise_safe_exceptions
650 @reraise_safe_exceptions
646 def heads(self, wire, branch=None):
651 def heads(self, wire, branch=None):
647 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
648 baseui = self._factory._create_config(wire['config'])
653 baseui = self._factory._create_config(wire['config'])
649 output = io.BytesIO()
654 output = io.BytesIO()
650
655
651 def write(data, **unused_kwargs):
656 def write(data, **unused_kwargs):
652 output.write(data)
657 output.write(data)
653
658
654 baseui.write = write
659 baseui.write = write
655 if branch:
660 if branch:
656 args = [branch]
661 args = [branch]
657 else:
662 else:
658 args = []
663 args = []
659 commands.heads(baseui, repo, template='{node} ', *args)
664 commands.heads(baseui, repo, template='{node} ', *args)
660
665
661 return output.getvalue()
666 return output.getvalue()
662
667
663 @reraise_safe_exceptions
668 @reraise_safe_exceptions
664 def ancestor(self, wire, revision1, revision2):
669 def ancestor(self, wire, revision1, revision2):
665 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
666 baseui = self._factory._create_config(wire['config'])
671 baseui = self._factory._create_config(wire['config'])
667 output = io.BytesIO()
672 output = io.BytesIO()
668 baseui.write = output.write
673 baseui.write = output.write
669 commands.debugancestor(baseui, repo, revision1, revision2)
674 commands.debugancestor(baseui, repo, revision1, revision2)
670
675
671 return output.getvalue()
676 return output.getvalue()
672
677
673 @reraise_safe_exceptions
678 @reraise_safe_exceptions
674 def push(self, wire, revisions, dest_path, hooks=True,
679 def push(self, wire, revisions, dest_path, hooks=True,
675 push_branches=False):
680 push_branches=False):
676 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'], hooks=hooks)
682 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 commands.push(baseui, repo, dest=dest_path, rev=revisions,
683 commands.push(baseui, repo, dest=dest_path, rev=revisions,
679 new_branch=push_branches)
684 new_branch=push_branches)
680
685
681 @reraise_safe_exceptions
686 @reraise_safe_exceptions
682 def merge(self, wire, revision):
687 def merge(self, wire, revision):
683 repo = self._factory.repo(wire)
688 repo = self._factory.repo(wire)
684 baseui = self._factory._create_config(wire['config'])
689 baseui = self._factory._create_config(wire['config'])
685 repo.ui.setconfig('ui', 'merge', 'internal:dump')
690 repo.ui.setconfig('ui', 'merge', 'internal:dump')
686
691
687 # In case of sub repositories are used mercurial prompts the user in
692 # In case of sub repositories are used mercurial prompts the user in
688 # case of merge conflicts or different sub repository sources. By
693 # case of merge conflicts or different sub repository sources. By
689 # setting the interactive flag to `False` mercurial doesn't prompt the
694 # setting the interactive flag to `False` mercurial doesn't prompt the
690 # used but instead uses a default value.
695 # used but instead uses a default value.
691 repo.ui.setconfig('ui', 'interactive', False)
696 repo.ui.setconfig('ui', 'interactive', False)
692
697
693 commands.merge(baseui, repo, rev=revision)
698 commands.merge(baseui, repo, rev=revision)
694
699
695 @reraise_safe_exceptions
700 @reraise_safe_exceptions
696 def commit(self, wire, message, username):
701 def commit(self, wire, message, username):
697 repo = self._factory.repo(wire)
702 repo = self._factory.repo(wire)
698 baseui = self._factory._create_config(wire['config'])
703 baseui = self._factory._create_config(wire['config'])
699 repo.ui.setconfig('ui', 'username', username)
704 repo.ui.setconfig('ui', 'username', username)
700 commands.commit(baseui, repo, message=message)
705 commands.commit(baseui, repo, message=message)
701
706
702 @reraise_safe_exceptions
707 @reraise_safe_exceptions
703 def rebase(self, wire, source=None, dest=None, abort=False):
708 def rebase(self, wire, source=None, dest=None, abort=False):
704 repo = self._factory.repo(wire)
709 repo = self._factory.repo(wire)
705 baseui = self._factory._create_config(wire['config'])
710 baseui = self._factory._create_config(wire['config'])
706 repo.ui.setconfig('ui', 'merge', 'internal:dump')
711 repo.ui.setconfig('ui', 'merge', 'internal:dump')
707 rebase.rebase(
712 rebase.rebase(
708 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
713 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
709
714
710 @reraise_safe_exceptions
715 @reraise_safe_exceptions
711 def bookmark(self, wire, bookmark, revision=None):
716 def bookmark(self, wire, bookmark, revision=None):
712 repo = self._factory.repo(wire)
717 repo = self._factory.repo(wire)
713 baseui = self._factory._create_config(wire['config'])
718 baseui = self._factory._create_config(wire['config'])
714 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
719 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,591 +1,625 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory
36
37
37
38
38 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
39
40
40
41
41 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
44 svn_compatible_versions = set([
44 'pre-1.4-compatible',
45 'pre-1.4-compatible',
45 'pre-1.5-compatible',
46 'pre-1.5-compatible',
46 'pre-1.6-compatible',
47 'pre-1.6-compatible',
47 'pre-1.8-compatible',
48 'pre-1.8-compatible',
48 ])
49 ])
49
50
50
51
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
55 try:
56 return func(*args, **kwargs)
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
61 raise
62 return wrapper
63
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
51 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
52
77
53 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
54 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
55 if create:
80 if create:
56 fs_config = {}
81 fs_config = {}
57 if compatible_version:
82 if compatible_version:
58 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
59 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
60 .format(compatible_version))
85 .format(compatible_version))
61 log.debug('Create SVN repo with compatible version "%s"',
86 log.debug('Create SVN repo with compatible version "%s"',
62 compatible_version)
87 compatible_version)
63 fs_config[compatible_version] = '1'
88 fs_config[compatible_version] = '1'
64 repo = svn.repos.create(path, "", "", None, fs_config)
89 repo = svn.repos.create(path, "", "", None, fs_config)
65 else:
90 else:
66 repo = svn.repos.open(path)
91 repo = svn.repos.open(path)
67 return repo
92 return repo
68
93
69 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
70 def create_new_repo():
95 def create_new_repo():
71 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
72
97
73 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
74
99
75
100
76
101
77 NODE_TYPE_MAPPING = {
102 NODE_TYPE_MAPPING = {
78 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_file: 'file',
79 svn.core.svn_node_dir: 'dir',
104 svn.core.svn_node_dir: 'dir',
80 }
105 }
81
106
82
107
83 class SvnRemote(object):
108 class SvnRemote(object):
84
109
85 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
86 self._factory = factory
111 self._factory = factory
87 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # TODO: Remove once we do not use internal Mercurial objects anymore
88 # for subversion
113 # for subversion
89 self._hg_factory = hg_factory
114 self._hg_factory = hg_factory
90
115
116 @reraise_safe_exceptions
117 def discover_svn_version(self):
118 try:
119 import svn.core
120 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
122 svn_ver = None
123 return svn_ver
124
91 def check_url(self, url, config_items):
125 def check_url(self, url, config_items):
92 # this can throw exception if not installed, but we detect this
126 # this can throw exception if not installed, but we detect this
93 from hgsubversion import svnrepo
127 from hgsubversion import svnrepo
94
128
95 baseui = self._hg_factory._create_config(config_items)
129 baseui = self._hg_factory._create_config(config_items)
96 # uuid function get's only valid UUID from proper repo, else
130 # uuid function get's only valid UUID from proper repo, else
97 # throws exception
131 # throws exception
98 try:
132 try:
99 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
100 except:
134 except:
101 log.debug("Invalid svn url: %s", url)
135 log.debug("Invalid svn url: %s", url)
102 raise URLError(
136 raise URLError(
103 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
104 return True
138 return True
105
139
106 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
107 try:
141 try:
108 svn.repos.open(path)
142 svn.repos.open(path)
109 except svn.core.SubversionException:
143 except svn.core.SubversionException:
110 log.debug("Invalid Subversion path %s", path)
144 log.debug("Invalid Subversion path %s", path)
111 return False
145 return False
112 return True
146 return True
113
147
114 def lookup(self, wire, revision):
148 def lookup(self, wire, revision):
115 if revision not in [-1, None, 'HEAD']:
149 if revision not in [-1, None, 'HEAD']:
116 raise NotImplementedError
150 raise NotImplementedError
117 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
118 fs_ptr = svn.repos.fs(repo)
152 fs_ptr = svn.repos.fs(repo)
119 head = svn.fs.youngest_rev(fs_ptr)
153 head = svn.fs.youngest_rev(fs_ptr)
120 return head
154 return head
121
155
122 def lookup_interval(self, wire, start_ts, end_ts):
156 def lookup_interval(self, wire, start_ts, end_ts):
123 repo = self._factory.repo(wire)
157 repo = self._factory.repo(wire)
124 fsobj = svn.repos.fs(repo)
158 fsobj = svn.repos.fs(repo)
125 start_rev = None
159 start_rev = None
126 end_rev = None
160 end_rev = None
127 if start_ts:
161 if start_ts:
128 start_ts_svn = apr_time_t(start_ts)
162 start_ts_svn = apr_time_t(start_ts)
129 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
130 else:
164 else:
131 start_rev = 1
165 start_rev = 1
132 if end_ts:
166 if end_ts:
133 end_ts_svn = apr_time_t(end_ts)
167 end_ts_svn = apr_time_t(end_ts)
134 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
135 else:
169 else:
136 end_rev = svn.fs.youngest_rev(fsobj)
170 end_rev = svn.fs.youngest_rev(fsobj)
137 return start_rev, end_rev
171 return start_rev, end_rev
138
172
139 def revision_properties(self, wire, revision):
173 def revision_properties(self, wire, revision):
140 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
141 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
142 return svn.fs.revision_proplist(fs_ptr, revision)
176 return svn.fs.revision_proplist(fs_ptr, revision)
143
177
144 def revision_changes(self, wire, revision):
178 def revision_changes(self, wire, revision):
145
179
146 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
147 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
148 rev_root = svn.fs.revision_root(fsobj, revision)
182 rev_root = svn.fs.revision_root(fsobj, revision)
149
183
150 editor = svn.repos.ChangeCollector(fsobj, rev_root)
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
151 editor_ptr, editor_baton = svn.delta.make_editor(editor)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
152 base_dir = ""
186 base_dir = ""
153 send_deltas = False
187 send_deltas = False
154 svn.repos.replay2(
188 svn.repos.replay2(
155 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
156 editor_ptr, editor_baton, None)
190 editor_ptr, editor_baton, None)
157
191
158 added = []
192 added = []
159 changed = []
193 changed = []
160 removed = []
194 removed = []
161
195
162 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
163 for path, change in editor.changes.iteritems():
197 for path, change in editor.changes.iteritems():
164 # TODO: Decide what to do with directory nodes. Subversion can add
198 # TODO: Decide what to do with directory nodes. Subversion can add
165 # empty directories.
199 # empty directories.
166 if change.item_kind == svn.core.svn_node_dir:
200 if change.item_kind == svn.core.svn_node_dir:
167 continue
201 continue
168 if change.action == svn.repos.CHANGE_ACTION_ADD:
202 if change.action == svn.repos.CHANGE_ACTION_ADD:
169 added.append(path)
203 added.append(path)
170 elif change.action == svn.repos.CHANGE_ACTION_MODIFY:
204 elif change.action == svn.repos.CHANGE_ACTION_MODIFY:
171 changed.append(path)
205 changed.append(path)
172 elif change.action == svn.repos.CHANGE_ACTION_DELETE:
206 elif change.action == svn.repos.CHANGE_ACTION_DELETE:
173 removed.append(path)
207 removed.append(path)
174 else:
208 else:
175 raise NotImplementedError(
209 raise NotImplementedError(
176 "Action %s not supported on path %s" % (
210 "Action %s not supported on path %s" % (
177 change.action, path))
211 change.action, path))
178
212
179 changes = {
213 changes = {
180 'added': added,
214 'added': added,
181 'changed': changed,
215 'changed': changed,
182 'removed': removed,
216 'removed': removed,
183 }
217 }
184 return changes
218 return changes
185
219
186 def node_history(self, wire, path, revision, limit):
220 def node_history(self, wire, path, revision, limit):
187 cross_copies = False
221 cross_copies = False
188 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
189 fsobj = svn.repos.fs(repo)
223 fsobj = svn.repos.fs(repo)
190 rev_root = svn.fs.revision_root(fsobj, revision)
224 rev_root = svn.fs.revision_root(fsobj, revision)
191
225
192 history_revisions = []
226 history_revisions = []
193 history = svn.fs.node_history(rev_root, path)
227 history = svn.fs.node_history(rev_root, path)
194 history = svn.fs.history_prev(history, cross_copies)
228 history = svn.fs.history_prev(history, cross_copies)
195 while history:
229 while history:
196 __, node_revision = svn.fs.history_location(history)
230 __, node_revision = svn.fs.history_location(history)
197 history_revisions.append(node_revision)
231 history_revisions.append(node_revision)
198 if limit and len(history_revisions) >= limit:
232 if limit and len(history_revisions) >= limit:
199 break
233 break
200 history = svn.fs.history_prev(history, cross_copies)
234 history = svn.fs.history_prev(history, cross_copies)
201 return history_revisions
235 return history_revisions
202
236
203 def node_properties(self, wire, path, revision):
237 def node_properties(self, wire, path, revision):
204 repo = self._factory.repo(wire)
238 repo = self._factory.repo(wire)
205 fsobj = svn.repos.fs(repo)
239 fsobj = svn.repos.fs(repo)
206 rev_root = svn.fs.revision_root(fsobj, revision)
240 rev_root = svn.fs.revision_root(fsobj, revision)
207 return svn.fs.node_proplist(rev_root, path)
241 return svn.fs.node_proplist(rev_root, path)
208
242
209 def file_annotate(self, wire, path, revision):
243 def file_annotate(self, wire, path, revision):
210 abs_path = 'file://' + urllib.pathname2url(
244 abs_path = 'file://' + urllib.pathname2url(
211 vcspath.join(wire['path'], path))
245 vcspath.join(wire['path'], path))
212 file_uri = svn.core.svn_path_canonicalize(abs_path)
246 file_uri = svn.core.svn_path_canonicalize(abs_path)
213
247
214 start_rev = svn_opt_revision_value_t(0)
248 start_rev = svn_opt_revision_value_t(0)
215 peg_rev = svn_opt_revision_value_t(revision)
249 peg_rev = svn_opt_revision_value_t(revision)
216 end_rev = peg_rev
250 end_rev = peg_rev
217
251
218 annotations = []
252 annotations = []
219
253
220 def receiver(line_no, revision, author, date, line, pool):
254 def receiver(line_no, revision, author, date, line, pool):
221 annotations.append((line_no, revision, line))
255 annotations.append((line_no, revision, line))
222
256
223 # TODO: Cannot use blame5, missing typemap function in the swig code
257 # TODO: Cannot use blame5, missing typemap function in the swig code
224 try:
258 try:
225 svn.client.blame2(
259 svn.client.blame2(
226 file_uri, peg_rev, start_rev, end_rev,
260 file_uri, peg_rev, start_rev, end_rev,
227 receiver, svn.client.create_context())
261 receiver, svn.client.create_context())
228 except svn.core.SubversionException as exc:
262 except svn.core.SubversionException as exc:
229 log.exception("Error during blame operation.")
263 log.exception("Error during blame operation.")
230 raise Exception(
264 raise Exception(
231 "Blame not supported or file does not exist at path %s. "
265 "Blame not supported or file does not exist at path %s. "
232 "Error %s." % (path, exc))
266 "Error %s." % (path, exc))
233
267
234 return annotations
268 return annotations
235
269
236 def get_node_type(self, wire, path, rev=None):
270 def get_node_type(self, wire, path, rev=None):
237 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
238 fs_ptr = svn.repos.fs(repo)
272 fs_ptr = svn.repos.fs(repo)
239 if rev is None:
273 if rev is None:
240 rev = svn.fs.youngest_rev(fs_ptr)
274 rev = svn.fs.youngest_rev(fs_ptr)
241 root = svn.fs.revision_root(fs_ptr, rev)
275 root = svn.fs.revision_root(fs_ptr, rev)
242 node = svn.fs.check_path(root, path)
276 node = svn.fs.check_path(root, path)
243 return NODE_TYPE_MAPPING.get(node, None)
277 return NODE_TYPE_MAPPING.get(node, None)
244
278
245 def get_nodes(self, wire, path, revision=None):
279 def get_nodes(self, wire, path, revision=None):
246 repo = self._factory.repo(wire)
280 repo = self._factory.repo(wire)
247 fsobj = svn.repos.fs(repo)
281 fsobj = svn.repos.fs(repo)
248 if revision is None:
282 if revision is None:
249 revision = svn.fs.youngest_rev(fsobj)
283 revision = svn.fs.youngest_rev(fsobj)
250 root = svn.fs.revision_root(fsobj, revision)
284 root = svn.fs.revision_root(fsobj, revision)
251 entries = svn.fs.dir_entries(root, path)
285 entries = svn.fs.dir_entries(root, path)
252 result = []
286 result = []
253 for entry_path, entry_info in entries.iteritems():
287 for entry_path, entry_info in entries.iteritems():
254 result.append(
288 result.append(
255 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
289 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
256 return result
290 return result
257
291
258 def get_file_content(self, wire, path, rev=None):
292 def get_file_content(self, wire, path, rev=None):
259 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
260 fsobj = svn.repos.fs(repo)
294 fsobj = svn.repos.fs(repo)
261 if rev is None:
295 if rev is None:
262 rev = svn.fs.youngest_revision(fsobj)
296 rev = svn.fs.youngest_revision(fsobj)
263 root = svn.fs.revision_root(fsobj, rev)
297 root = svn.fs.revision_root(fsobj, rev)
264 content = svn.core.Stream(svn.fs.file_contents(root, path))
298 content = svn.core.Stream(svn.fs.file_contents(root, path))
265 return content.read()
299 return content.read()
266
300
267 def get_file_size(self, wire, path, revision=None):
301 def get_file_size(self, wire, path, revision=None):
268 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
269 fsobj = svn.repos.fs(repo)
303 fsobj = svn.repos.fs(repo)
270 if revision is None:
304 if revision is None:
271 revision = svn.fs.youngest_revision(fsobj)
305 revision = svn.fs.youngest_revision(fsobj)
272 root = svn.fs.revision_root(fsobj, revision)
306 root = svn.fs.revision_root(fsobj, revision)
273 size = svn.fs.file_length(root, path)
307 size = svn.fs.file_length(root, path)
274 return size
308 return size
275
309
276 def create_repository(self, wire, compatible_version=None):
310 def create_repository(self, wire, compatible_version=None):
277 log.info('Creating Subversion repository in path "%s"', wire['path'])
311 log.info('Creating Subversion repository in path "%s"', wire['path'])
278 self._factory.repo(wire, create=True,
312 self._factory.repo(wire, create=True,
279 compatible_version=compatible_version)
313 compatible_version=compatible_version)
280
314
281 def import_remote_repository(self, wire, src_url):
315 def import_remote_repository(self, wire, src_url):
282 repo_path = wire['path']
316 repo_path = wire['path']
283 if not self.is_path_valid_repository(wire, repo_path):
317 if not self.is_path_valid_repository(wire, repo_path):
284 raise Exception(
318 raise Exception(
285 "Path %s is not a valid Subversion repository." % repo_path)
319 "Path %s is not a valid Subversion repository." % repo_path)
286 # TODO: johbo: URL checks ?
320 # TODO: johbo: URL checks ?
287 rdump = subprocess.Popen(
321 rdump = subprocess.Popen(
288 ['svnrdump', 'dump', '--non-interactive', src_url],
322 ['svnrdump', 'dump', '--non-interactive', src_url],
289 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
323 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
290 load = subprocess.Popen(
324 load = subprocess.Popen(
291 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
325 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
292
326
293 # TODO: johbo: This can be a very long operation, might be better
327 # TODO: johbo: This can be a very long operation, might be better
294 # to track some kind of status and provide an api to check if the
328 # to track some kind of status and provide an api to check if the
295 # import is done.
329 # import is done.
296 rdump.wait()
330 rdump.wait()
297 load.wait()
331 load.wait()
298
332
299 if rdump.returncode != 0:
333 if rdump.returncode != 0:
300 errors = rdump.stderr.read()
334 errors = rdump.stderr.read()
301 log.error('svnrdump dump failed: statuscode %s: message: %s',
335 log.error('svnrdump dump failed: statuscode %s: message: %s',
302 rdump.returncode, errors)
336 rdump.returncode, errors)
303 reason = 'UNKNOWN'
337 reason = 'UNKNOWN'
304 if 'svnrdump: E230001:' in errors:
338 if 'svnrdump: E230001:' in errors:
305 reason = 'INVALID_CERTIFICATE'
339 reason = 'INVALID_CERTIFICATE'
306 raise Exception(
340 raise Exception(
307 'Failed to dump the remote repository from %s.' % src_url,
341 'Failed to dump the remote repository from %s.' % src_url,
308 reason)
342 reason)
309 if load.returncode != 0:
343 if load.returncode != 0:
310 raise Exception(
344 raise Exception(
311 'Failed to load the dump of remote repository from %s.' %
345 'Failed to load the dump of remote repository from %s.' %
312 (src_url, ))
346 (src_url, ))
313
347
314 def commit(self, wire, message, author, timestamp, updated, removed):
348 def commit(self, wire, message, author, timestamp, updated, removed):
315 assert isinstance(message, str)
349 assert isinstance(message, str)
316 assert isinstance(author, str)
350 assert isinstance(author, str)
317
351
318 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
319 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
320
354
321 rev = svn.fs.youngest_rev(fsobj)
355 rev = svn.fs.youngest_rev(fsobj)
322 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
356 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
323 txn_root = svn.fs.txn_root(txn)
357 txn_root = svn.fs.txn_root(txn)
324
358
325 for node in updated:
359 for node in updated:
326 TxnNodeProcessor(node, txn_root).update()
360 TxnNodeProcessor(node, txn_root).update()
327 for node in removed:
361 for node in removed:
328 TxnNodeProcessor(node, txn_root).remove()
362 TxnNodeProcessor(node, txn_root).remove()
329
363
330 commit_id = svn.repos.fs_commit_txn(repo, txn)
364 commit_id = svn.repos.fs_commit_txn(repo, txn)
331
365
332 if timestamp:
366 if timestamp:
333 apr_time = apr_time_t(timestamp)
367 apr_time = apr_time_t(timestamp)
334 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
368 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
335 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
369 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
336
370
337 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
371 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
338 return commit_id
372 return commit_id
339
373
340 def diff(self, wire, rev1, rev2, path1=None, path2=None,
374 def diff(self, wire, rev1, rev2, path1=None, path2=None,
341 ignore_whitespace=False, context=3):
375 ignore_whitespace=False, context=3):
342 wire.update(cache=False)
376 wire.update(cache=False)
343 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
344 diff_creator = SvnDiffer(
378 diff_creator = SvnDiffer(
345 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
379 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
346 return diff_creator.generate_diff()
380 return diff_creator.generate_diff()
347
381
348
382
349 class SvnDiffer(object):
383 class SvnDiffer(object):
350 """
384 """
351 Utility to create diffs based on difflib and the Subversion api
385 Utility to create diffs based on difflib and the Subversion api
352 """
386 """
353
387
354 binary_content = False
388 binary_content = False
355
389
356 def __init__(
390 def __init__(
357 self, repo, src_rev, src_path, tgt_rev, tgt_path,
391 self, repo, src_rev, src_path, tgt_rev, tgt_path,
358 ignore_whitespace, context):
392 ignore_whitespace, context):
359 self.repo = repo
393 self.repo = repo
360 self.ignore_whitespace = ignore_whitespace
394 self.ignore_whitespace = ignore_whitespace
361 self.context = context
395 self.context = context
362
396
363 fsobj = svn.repos.fs(repo)
397 fsobj = svn.repos.fs(repo)
364
398
365 self.tgt_rev = tgt_rev
399 self.tgt_rev = tgt_rev
366 self.tgt_path = tgt_path or ''
400 self.tgt_path = tgt_path or ''
367 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
401 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
368 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
402 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
369
403
370 self.src_rev = src_rev
404 self.src_rev = src_rev
371 self.src_path = src_path or self.tgt_path
405 self.src_path = src_path or self.tgt_path
372 self.src_root = svn.fs.revision_root(fsobj, src_rev)
406 self.src_root = svn.fs.revision_root(fsobj, src_rev)
373 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
407 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
374
408
375 self._validate()
409 self._validate()
376
410
377 def _validate(self):
411 def _validate(self):
378 if (self.tgt_kind != svn.core.svn_node_none and
412 if (self.tgt_kind != svn.core.svn_node_none and
379 self.src_kind != svn.core.svn_node_none and
413 self.src_kind != svn.core.svn_node_none and
380 self.src_kind != self.tgt_kind):
414 self.src_kind != self.tgt_kind):
381 # TODO: johbo: proper error handling
415 # TODO: johbo: proper error handling
382 raise Exception(
416 raise Exception(
383 "Source and target are not compatible for diff generation. "
417 "Source and target are not compatible for diff generation. "
384 "Source type: %s, target type: %s" %
418 "Source type: %s, target type: %s" %
385 (self.src_kind, self.tgt_kind))
419 (self.src_kind, self.tgt_kind))
386
420
387 def generate_diff(self):
421 def generate_diff(self):
388 buf = StringIO.StringIO()
422 buf = StringIO.StringIO()
389 if self.tgt_kind == svn.core.svn_node_dir:
423 if self.tgt_kind == svn.core.svn_node_dir:
390 self._generate_dir_diff(buf)
424 self._generate_dir_diff(buf)
391 else:
425 else:
392 self._generate_file_diff(buf)
426 self._generate_file_diff(buf)
393 return buf.getvalue()
427 return buf.getvalue()
394
428
395 def _generate_dir_diff(self, buf):
429 def _generate_dir_diff(self, buf):
396 editor = DiffChangeEditor()
430 editor = DiffChangeEditor()
397 editor_ptr, editor_baton = svn.delta.make_editor(editor)
431 editor_ptr, editor_baton = svn.delta.make_editor(editor)
398 svn.repos.dir_delta2(
432 svn.repos.dir_delta2(
399 self.src_root,
433 self.src_root,
400 self.src_path,
434 self.src_path,
401 '', # src_entry
435 '', # src_entry
402 self.tgt_root,
436 self.tgt_root,
403 self.tgt_path,
437 self.tgt_path,
404 editor_ptr, editor_baton,
438 editor_ptr, editor_baton,
405 authorization_callback_allow_all,
439 authorization_callback_allow_all,
406 False, # text_deltas
440 False, # text_deltas
407 svn.core.svn_depth_infinity, # depth
441 svn.core.svn_depth_infinity, # depth
408 False, # entry_props
442 False, # entry_props
409 False, # ignore_ancestry
443 False, # ignore_ancestry
410 )
444 )
411
445
412 for path, __, change in sorted(editor.changes):
446 for path, __, change in sorted(editor.changes):
413 self._generate_node_diff(
447 self._generate_node_diff(
414 buf, change, path, self.tgt_path, path, self.src_path)
448 buf, change, path, self.tgt_path, path, self.src_path)
415
449
416 def _generate_file_diff(self, buf):
450 def _generate_file_diff(self, buf):
417 change = None
451 change = None
418 if self.src_kind == svn.core.svn_node_none:
452 if self.src_kind == svn.core.svn_node_none:
419 change = "add"
453 change = "add"
420 elif self.tgt_kind == svn.core.svn_node_none:
454 elif self.tgt_kind == svn.core.svn_node_none:
421 change = "delete"
455 change = "delete"
422 tgt_base, tgt_path = vcspath.split(self.tgt_path)
456 tgt_base, tgt_path = vcspath.split(self.tgt_path)
423 src_base, src_path = vcspath.split(self.src_path)
457 src_base, src_path = vcspath.split(self.src_path)
424 self._generate_node_diff(
458 self._generate_node_diff(
425 buf, change, tgt_path, tgt_base, src_path, src_base)
459 buf, change, tgt_path, tgt_base, src_path, src_base)
426
460
427 def _generate_node_diff(
461 def _generate_node_diff(
428 self, buf, change, tgt_path, tgt_base, src_path, src_base):
462 self, buf, change, tgt_path, tgt_base, src_path, src_base):
429 tgt_full_path = vcspath.join(tgt_base, tgt_path)
463 tgt_full_path = vcspath.join(tgt_base, tgt_path)
430 src_full_path = vcspath.join(src_base, src_path)
464 src_full_path = vcspath.join(src_base, src_path)
431
465
432 self.binary_content = False
466 self.binary_content = False
433 mime_type = self._get_mime_type(tgt_full_path)
467 mime_type = self._get_mime_type(tgt_full_path)
434 if mime_type and not mime_type.startswith('text'):
468 if mime_type and not mime_type.startswith('text'):
435 self.binary_content = True
469 self.binary_content = True
436 buf.write("=" * 67 + '\n')
470 buf.write("=" * 67 + '\n')
437 buf.write("Cannot display: file marked as a binary type.\n")
471 buf.write("Cannot display: file marked as a binary type.\n")
438 buf.write("svn:mime-type = %s\n" % mime_type)
472 buf.write("svn:mime-type = %s\n" % mime_type)
439 buf.write("Index: %s\n" % (tgt_path, ))
473 buf.write("Index: %s\n" % (tgt_path, ))
440 buf.write("=" * 67 + '\n')
474 buf.write("=" * 67 + '\n')
441 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
475 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
442 'tgt_path': tgt_path})
476 'tgt_path': tgt_path})
443
477
444 if change == 'add':
478 if change == 'add':
445 # TODO: johbo: SVN is missing a zero here compared to git
479 # TODO: johbo: SVN is missing a zero here compared to git
446 buf.write("new file mode 10644\n")
480 buf.write("new file mode 10644\n")
447 buf.write("--- /dev/null\t(revision 0)\n")
481 buf.write("--- /dev/null\t(revision 0)\n")
448 src_lines = []
482 src_lines = []
449 else:
483 else:
450 if change == 'delete':
484 if change == 'delete':
451 buf.write("deleted file mode 10644\n")
485 buf.write("deleted file mode 10644\n")
452 buf.write("--- a/%s\t(revision %s)\n" % (
486 buf.write("--- a/%s\t(revision %s)\n" % (
453 src_path, self.src_rev))
487 src_path, self.src_rev))
454 src_lines = self._svn_readlines(self.src_root, src_full_path)
488 src_lines = self._svn_readlines(self.src_root, src_full_path)
455
489
456 if change == 'delete':
490 if change == 'delete':
457 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
491 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
458 tgt_lines = []
492 tgt_lines = []
459 else:
493 else:
460 buf.write("+++ b/%s\t(revision %s)\n" % (
494 buf.write("+++ b/%s\t(revision %s)\n" % (
461 tgt_path, self.tgt_rev))
495 tgt_path, self.tgt_rev))
462 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
496 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
463
497
464 if not self.binary_content:
498 if not self.binary_content:
465 udiff = svn_diff.unified_diff(
499 udiff = svn_diff.unified_diff(
466 src_lines, tgt_lines, context=self.context,
500 src_lines, tgt_lines, context=self.context,
467 ignore_blank_lines=self.ignore_whitespace,
501 ignore_blank_lines=self.ignore_whitespace,
468 ignore_case=False,
502 ignore_case=False,
469 ignore_space_changes=self.ignore_whitespace)
503 ignore_space_changes=self.ignore_whitespace)
470 buf.writelines(udiff)
504 buf.writelines(udiff)
471
505
472 def _get_mime_type(self, path):
506 def _get_mime_type(self, path):
473 try:
507 try:
474 mime_type = svn.fs.node_prop(
508 mime_type = svn.fs.node_prop(
475 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
509 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
476 except svn.core.SubversionException:
510 except svn.core.SubversionException:
477 mime_type = svn.fs.node_prop(
511 mime_type = svn.fs.node_prop(
478 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
512 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
479 return mime_type
513 return mime_type
480
514
481 def _svn_readlines(self, fs_root, node_path):
515 def _svn_readlines(self, fs_root, node_path):
482 if self.binary_content:
516 if self.binary_content:
483 return []
517 return []
484 node_kind = svn.fs.check_path(fs_root, node_path)
518 node_kind = svn.fs.check_path(fs_root, node_path)
485 if node_kind not in (
519 if node_kind not in (
486 svn.core.svn_node_file, svn.core.svn_node_symlink):
520 svn.core.svn_node_file, svn.core.svn_node_symlink):
487 return []
521 return []
488 content = svn.core.Stream(
522 content = svn.core.Stream(
489 svn.fs.file_contents(fs_root, node_path)).read()
523 svn.fs.file_contents(fs_root, node_path)).read()
490 return content.splitlines(True)
524 return content.splitlines(True)
491
525
492
526
493 class DiffChangeEditor(svn.delta.Editor):
527 class DiffChangeEditor(svn.delta.Editor):
494 """
528 """
495 Records changes between two given revisions
529 Records changes between two given revisions
496 """
530 """
497
531
498 def __init__(self):
532 def __init__(self):
499 self.changes = []
533 self.changes = []
500
534
501 def delete_entry(self, path, revision, parent_baton, pool=None):
535 def delete_entry(self, path, revision, parent_baton, pool=None):
502 self.changes.append((path, None, 'delete'))
536 self.changes.append((path, None, 'delete'))
503
537
504 def add_file(
538 def add_file(
505 self, path, parent_baton, copyfrom_path, copyfrom_revision,
539 self, path, parent_baton, copyfrom_path, copyfrom_revision,
506 file_pool=None):
540 file_pool=None):
507 self.changes.append((path, 'file', 'add'))
541 self.changes.append((path, 'file', 'add'))
508
542
509 def open_file(self, path, parent_baton, base_revision, file_pool=None):
543 def open_file(self, path, parent_baton, base_revision, file_pool=None):
510 self.changes.append((path, 'file', 'change'))
544 self.changes.append((path, 'file', 'change'))
511
545
512
546
513 def authorization_callback_allow_all(root, path, pool):
547 def authorization_callback_allow_all(root, path, pool):
514 return True
548 return True
515
549
516
550
517 class TxnNodeProcessor(object):
551 class TxnNodeProcessor(object):
518 """
552 """
519 Utility to process the change of one node within a transaction root.
553 Utility to process the change of one node within a transaction root.
520
554
521 It encapsulates the knowledge of how to add, update or remove
555 It encapsulates the knowledge of how to add, update or remove
522 a node for a given transaction root. The purpose is to support the method
556 a node for a given transaction root. The purpose is to support the method
523 `SvnRemote.commit`.
557 `SvnRemote.commit`.
524 """
558 """
525
559
526 def __init__(self, node, txn_root):
560 def __init__(self, node, txn_root):
527 assert isinstance(node['path'], str)
561 assert isinstance(node['path'], str)
528
562
529 self.node = node
563 self.node = node
530 self.txn_root = txn_root
564 self.txn_root = txn_root
531
565
532 def update(self):
566 def update(self):
533 self._ensure_parent_dirs()
567 self._ensure_parent_dirs()
534 self._add_file_if_node_does_not_exist()
568 self._add_file_if_node_does_not_exist()
535 self._update_file_content()
569 self._update_file_content()
536 self._update_file_properties()
570 self._update_file_properties()
537
571
538 def remove(self):
572 def remove(self):
539 svn.fs.delete(self.txn_root, self.node['path'])
573 svn.fs.delete(self.txn_root, self.node['path'])
540 # TODO: Clean up directory if empty
574 # TODO: Clean up directory if empty
541
575
542 def _ensure_parent_dirs(self):
576 def _ensure_parent_dirs(self):
543 curdir = vcspath.dirname(self.node['path'])
577 curdir = vcspath.dirname(self.node['path'])
544 dirs_to_create = []
578 dirs_to_create = []
545 while not self._svn_path_exists(curdir):
579 while not self._svn_path_exists(curdir):
546 dirs_to_create.append(curdir)
580 dirs_to_create.append(curdir)
547 curdir = vcspath.dirname(curdir)
581 curdir = vcspath.dirname(curdir)
548
582
549 for curdir in reversed(dirs_to_create):
583 for curdir in reversed(dirs_to_create):
550 log.debug('Creating missing directory "%s"', curdir)
584 log.debug('Creating missing directory "%s"', curdir)
551 svn.fs.make_dir(self.txn_root, curdir)
585 svn.fs.make_dir(self.txn_root, curdir)
552
586
553 def _svn_path_exists(self, path):
587 def _svn_path_exists(self, path):
554 path_status = svn.fs.check_path(self.txn_root, path)
588 path_status = svn.fs.check_path(self.txn_root, path)
555 return path_status != svn.core.svn_node_none
589 return path_status != svn.core.svn_node_none
556
590
557 def _add_file_if_node_does_not_exist(self):
591 def _add_file_if_node_does_not_exist(self):
558 kind = svn.fs.check_path(self.txn_root, self.node['path'])
592 kind = svn.fs.check_path(self.txn_root, self.node['path'])
559 if kind == svn.core.svn_node_none:
593 if kind == svn.core.svn_node_none:
560 svn.fs.make_file(self.txn_root, self.node['path'])
594 svn.fs.make_file(self.txn_root, self.node['path'])
561
595
562 def _update_file_content(self):
596 def _update_file_content(self):
563 assert isinstance(self.node['content'], str)
597 assert isinstance(self.node['content'], str)
564 handler, baton = svn.fs.apply_textdelta(
598 handler, baton = svn.fs.apply_textdelta(
565 self.txn_root, self.node['path'], None, None)
599 self.txn_root, self.node['path'], None, None)
566 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
600 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
567
601
568 def _update_file_properties(self):
602 def _update_file_properties(self):
569 properties = self.node.get('properties', {})
603 properties = self.node.get('properties', {})
570 for key, value in properties.iteritems():
604 for key, value in properties.iteritems():
571 svn.fs.change_node_prop(
605 svn.fs.change_node_prop(
572 self.txn_root, self.node['path'], key, value)
606 self.txn_root, self.node['path'], key, value)
573
607
574
608
575 def apr_time_t(timestamp):
609 def apr_time_t(timestamp):
576 """
610 """
577 Convert a Python timestamp into APR timestamp type apr_time_t
611 Convert a Python timestamp into APR timestamp type apr_time_t
578 """
612 """
579 return timestamp * 1E6
613 return timestamp * 1E6
580
614
581
615
582 def svn_opt_revision_value_t(num):
616 def svn_opt_revision_value_t(num):
583 """
617 """
584 Put `num` into a `svn_opt_revision_value_t` structure.
618 Put `num` into a `svn_opt_revision_value_t` structure.
585 """
619 """
586 value = svn.core.svn_opt_revision_value_t()
620 value = svn.core.svn_opt_revision_value_t()
587 value.number = num
621 value.number = num
588 revision = svn.core.svn_opt_revision_t()
622 revision = svn.core.svn_opt_revision_t()
589 revision.kind = svn.core.svn_opt_revision_number
623 revision.kind = svn.core.svn_opt_revision_number
590 revision.value = value
624 revision.value = value
591 return revision
625 return revision
General Comments 0
You need to be logged in to leave comments. Login now