##// END OF EJS Templates
vcsserver: implement set head ref and optimize deletion of filenodes
super-admin -
r966:1ce1ca3f default
parent child Browse files
Show More
@@ -1,1257 +1,1281 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = '^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def str_to_dulwich(value):
59 def str_to_dulwich(value):
60 """
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
62 """
63 return value.decode(settings.WIRE_ENCODING)
63 return value.decode(settings.WIRE_ENCODING)
64
64
65
65
66 def reraise_safe_exceptions(func):
66 def reraise_safe_exceptions(func):
67 """Converts Dulwich exceptions to something neutral."""
67 """Converts Dulwich exceptions to something neutral."""
68
68
69 @wraps(func)
69 @wraps(func)
70 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
71 try:
71 try:
72 return func(*args, **kwargs)
72 return func(*args, **kwargs)
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
74 exc = exceptions.LookupException(org_exc=e)
74 exc = exceptions.LookupException(org_exc=e)
75 raise exc(safe_str(e))
75 raise exc(safe_str(e))
76 except (HangupException, UnexpectedCommandError) as e:
76 except (HangupException, UnexpectedCommandError) as e:
77 exc = exceptions.VcsException(org_exc=e)
77 exc = exceptions.VcsException(org_exc=e)
78 raise exc(safe_str(e))
78 raise exc(safe_str(e))
79 except Exception as e:
79 except Exception as e:
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
81 # (KeyError on empty repos), we cannot track this and catch all
81 # (KeyError on empty repos), we cannot track this and catch all
82 # exceptions, it's an exceptions from other handlers
82 # exceptions, it's an exceptions from other handlers
83 #if not hasattr(e, '_vcs_kind'):
83 #if not hasattr(e, '_vcs_kind'):
84 #log.exception("Unhandled exception in git remote call")
84 #log.exception("Unhandled exception in git remote call")
85 #raise_from_original(exceptions.UnhandledException)
85 #raise_from_original(exceptions.UnhandledException)
86 raise
86 raise
87 return wrapper
87 return wrapper
88
88
89
89
90 class Repo(DulwichRepo):
90 class Repo(DulwichRepo):
91 """
91 """
92 A wrapper for dulwich Repo class.
92 A wrapper for dulwich Repo class.
93
93
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
95 "Too many open files" error. We need to close all opened file descriptors
95 "Too many open files" error. We need to close all opened file descriptors
96 once the repo object is destroyed.
96 once the repo object is destroyed.
97 """
97 """
98 def __del__(self):
98 def __del__(self):
99 if hasattr(self, 'object_store'):
99 if hasattr(self, 'object_store'):
100 self.close()
100 self.close()
101
101
102
102
103 class Repository(LibGit2Repo):
103 class Repository(LibGit2Repo):
104
104
105 def __enter__(self):
105 def __enter__(self):
106 return self
106 return self
107
107
108 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
109 self.free()
109 self.free()
110
110
111
111
112 class GitFactory(RepoFactory):
112 class GitFactory(RepoFactory):
113 repo_type = 'git'
113 repo_type = 'git'
114
114
115 def _create_repo(self, wire, create, use_libgit2=False):
115 def _create_repo(self, wire, create, use_libgit2=False):
116 if use_libgit2:
116 if use_libgit2:
117 return Repository(wire['path'])
117 return Repository(wire['path'])
118 else:
118 else:
119 repo_path = str_to_dulwich(wire['path'])
119 repo_path = str_to_dulwich(wire['path'])
120 return Repo(repo_path)
120 return Repo(repo_path)
121
121
122 def repo(self, wire, create=False, use_libgit2=False):
122 def repo(self, wire, create=False, use_libgit2=False):
123 """
123 """
124 Get a repository instance for the given path.
124 Get a repository instance for the given path.
125 """
125 """
126 return self._create_repo(wire, create, use_libgit2)
126 return self._create_repo(wire, create, use_libgit2)
127
127
128 def repo_libgit2(self, wire):
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
129 return self.repo(wire, use_libgit2=True)
130
130
131
131
132 class GitRemote(RemoteBase):
132 class GitRemote(RemoteBase):
133
133
134 def __init__(self, factory):
134 def __init__(self, factory):
135 self._factory = factory
135 self._factory = factory
136 self._bulk_methods = {
136 self._bulk_methods = {
137 "date": self.date,
137 "date": self.date,
138 "author": self.author,
138 "author": self.author,
139 "branch": self.branch,
139 "branch": self.branch,
140 "message": self.message,
140 "message": self.message,
141 "parents": self.parents,
141 "parents": self.parents,
142 "_commit": self.revision,
142 "_commit": self.revision,
143 }
143 }
144
144
145 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
146 if 'config' in wire:
146 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
148 return {}
149
149
150 def _remote_conf(self, config):
150 def _remote_conf(self, config):
151 params = [
151 params = [
152 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
153 ]
153 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
155 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
157 return params
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def discover_git_version(self):
160 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
163 prefix = 'git version'
164 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
166 return stdout.strip()
166 return stdout.strip()
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def is_empty(self, wire):
169 def is_empty(self, wire):
170 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
171 with repo_init as repo:
171 with repo_init as repo:
172
172
173 try:
173 try:
174 has_head = repo.head.name
174 has_head = repo.head.name
175 if has_head:
175 if has_head:
176 return False
176 return False
177
177
178 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
179 return repo.is_empty
179 return repo.is_empty
180 except Exception:
180 except Exception:
181 pass
181 pass
182
182
183 return True
183 return True
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
188 region = self._region(wire)
188 region = self._region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
189 @region.conditional_cache_on_arguments(condition=cache_on)
190 def _assert_correct_path(_context_uid, _repo_id):
190 def _assert_correct_path(_context_uid, _repo_id):
191 try:
191 try:
192 repo_init = self._factory.repo_libgit2(wire)
192 repo_init = self._factory.repo_libgit2(wire)
193 with repo_init as repo:
193 with repo_init as repo:
194 pass
194 pass
195 except pygit2.GitError:
195 except pygit2.GitError:
196 path = wire.get('path')
196 path = wire.get('path')
197 tb = traceback.format_exc()
197 tb = traceback.format_exc()
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
199 return False
199 return False
200
200
201 return True
201 return True
202 return _assert_correct_path(context_uid, repo_id)
202 return _assert_correct_path(context_uid, repo_id)
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def bare(self, wire):
205 def bare(self, wire):
206 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
207 with repo_init as repo:
208 return repo.is_bare
208 return repo.is_bare
209
209
210 @reraise_safe_exceptions
210 @reraise_safe_exceptions
211 def blob_as_pretty_string(self, wire, sha):
211 def blob_as_pretty_string(self, wire, sha):
212 repo_init = self._factory.repo_libgit2(wire)
212 repo_init = self._factory.repo_libgit2(wire)
213 with repo_init as repo:
213 with repo_init as repo:
214 blob_obj = repo[sha]
214 blob_obj = repo[sha]
215 blob = blob_obj.data
215 blob = blob_obj.data
216 return blob
216 return blob
217
217
218 @reraise_safe_exceptions
218 @reraise_safe_exceptions
219 def blob_raw_length(self, wire, sha):
219 def blob_raw_length(self, wire, sha):
220 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
221 region = self._region(wire)
221 region = self._region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
222 @region.conditional_cache_on_arguments(condition=cache_on)
223 def _blob_raw_length(_repo_id, _sha):
223 def _blob_raw_length(_repo_id, _sha):
224
224
225 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
226 with repo_init as repo:
226 with repo_init as repo:
227 blob = repo[sha]
227 blob = repo[sha]
228 return blob.size
228 return blob.size
229
229
230 return _blob_raw_length(repo_id, sha)
230 return _blob_raw_length(repo_id, sha)
231
231
232 def _parse_lfs_pointer(self, raw_content):
232 def _parse_lfs_pointer(self, raw_content):
233
233
234 spec_string = 'version https://git-lfs.github.com/spec'
234 spec_string = 'version https://git-lfs.github.com/spec'
235 if raw_content and raw_content.startswith(spec_string):
235 if raw_content and raw_content.startswith(spec_string):
236 pattern = re.compile(r"""
236 pattern = re.compile(r"""
237 (?:\n)?
237 (?:\n)?
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
241 (?:\n)?
241 (?:\n)?
242 """, re.VERBOSE | re.MULTILINE)
242 """, re.VERBOSE | re.MULTILINE)
243 match = pattern.match(raw_content)
243 match = pattern.match(raw_content)
244 if match:
244 if match:
245 return match.groupdict()
245 return match.groupdict()
246
246
247 return {}
247 return {}
248
248
249 @reraise_safe_exceptions
249 @reraise_safe_exceptions
250 def is_large_file(self, wire, commit_id):
250 def is_large_file(self, wire, commit_id):
251 cache_on, context_uid, repo_id = self._cache_on(wire)
251 cache_on, context_uid, repo_id = self._cache_on(wire)
252
252
253 region = self._region(wire)
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
254 @region.conditional_cache_on_arguments(condition=cache_on)
255 def _is_large_file(_repo_id, _sha):
255 def _is_large_file(_repo_id, _sha):
256 repo_init = self._factory.repo_libgit2(wire)
256 repo_init = self._factory.repo_libgit2(wire)
257 with repo_init as repo:
257 with repo_init as repo:
258 blob = repo[commit_id]
258 blob = repo[commit_id]
259 if blob.is_binary:
259 if blob.is_binary:
260 return {}
260 return {}
261
261
262 return self._parse_lfs_pointer(blob.data)
262 return self._parse_lfs_pointer(blob.data)
263
263
264 return _is_large_file(repo_id, commit_id)
264 return _is_large_file(repo_id, commit_id)
265
265
266 @reraise_safe_exceptions
266 @reraise_safe_exceptions
267 def is_binary(self, wire, tree_id):
267 def is_binary(self, wire, tree_id):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
269
269
270 region = self._region(wire)
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
271 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _is_binary(_repo_id, _tree_id):
272 def _is_binary(_repo_id, _tree_id):
273 repo_init = self._factory.repo_libgit2(wire)
273 repo_init = self._factory.repo_libgit2(wire)
274 with repo_init as repo:
274 with repo_init as repo:
275 blob_obj = repo[tree_id]
275 blob_obj = repo[tree_id]
276 return blob_obj.is_binary
276 return blob_obj.is_binary
277
277
278 return _is_binary(repo_id, tree_id)
278 return _is_binary(repo_id, tree_id)
279
279
280 @reraise_safe_exceptions
280 @reraise_safe_exceptions
281 def in_largefiles_store(self, wire, oid):
281 def in_largefiles_store(self, wire, oid):
282 conf = self._wire_to_config(wire)
282 conf = self._wire_to_config(wire)
283 repo_init = self._factory.repo_libgit2(wire)
283 repo_init = self._factory.repo_libgit2(wire)
284 with repo_init as repo:
284 with repo_init as repo:
285 repo_name = repo.path
285 repo_name = repo.path
286
286
287 store_location = conf.get('vcs_git_lfs_store_location')
287 store_location = conf.get('vcs_git_lfs_store_location')
288 if store_location:
288 if store_location:
289
289
290 store = LFSOidStore(
290 store = LFSOidStore(
291 oid=oid, repo=repo_name, store_location=store_location)
291 oid=oid, repo=repo_name, store_location=store_location)
292 return store.has_oid()
292 return store.has_oid()
293
293
294 return False
294 return False
295
295
296 @reraise_safe_exceptions
296 @reraise_safe_exceptions
297 def store_path(self, wire, oid):
297 def store_path(self, wire, oid):
298 conf = self._wire_to_config(wire)
298 conf = self._wire_to_config(wire)
299 repo_init = self._factory.repo_libgit2(wire)
299 repo_init = self._factory.repo_libgit2(wire)
300 with repo_init as repo:
300 with repo_init as repo:
301 repo_name = repo.path
301 repo_name = repo.path
302
302
303 store_location = conf.get('vcs_git_lfs_store_location')
303 store_location = conf.get('vcs_git_lfs_store_location')
304 if store_location:
304 if store_location:
305 store = LFSOidStore(
305 store = LFSOidStore(
306 oid=oid, repo=repo_name, store_location=store_location)
306 oid=oid, repo=repo_name, store_location=store_location)
307 return store.oid_path
307 return store.oid_path
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
309
309
310 @reraise_safe_exceptions
310 @reraise_safe_exceptions
311 def bulk_request(self, wire, rev, pre_load):
311 def bulk_request(self, wire, rev, pre_load):
312 cache_on, context_uid, repo_id = self._cache_on(wire)
312 cache_on, context_uid, repo_id = self._cache_on(wire)
313 region = self._region(wire)
313 region = self._region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
314 @region.conditional_cache_on_arguments(condition=cache_on)
315 def _bulk_request(_repo_id, _rev, _pre_load):
315 def _bulk_request(_repo_id, _rev, _pre_load):
316 result = {}
316 result = {}
317 for attr in pre_load:
317 for attr in pre_load:
318 try:
318 try:
319 method = self._bulk_methods[attr]
319 method = self._bulk_methods[attr]
320 args = [wire, rev]
320 args = [wire, rev]
321 result[attr] = method(*args)
321 result[attr] = method(*args)
322 except KeyError as e:
322 except KeyError as e:
323 raise exceptions.VcsException(e)(
323 raise exceptions.VcsException(e)(
324 "Unknown bulk attribute: %s" % attr)
324 "Unknown bulk attribute: %s" % attr)
325 return result
325 return result
326
326
327 return _bulk_request(repo_id, rev, sorted(pre_load))
327 return _bulk_request(repo_id, rev, sorted(pre_load))
328
328
329 def _build_opener(self, url):
329 def _build_opener(self, url):
330 handlers = []
330 handlers = []
331 url_obj = url_parser(url)
331 url_obj = url_parser(url)
332 _, authinfo = url_obj.authinfo()
332 _, authinfo = url_obj.authinfo()
333
333
334 if authinfo:
334 if authinfo:
335 # create a password manager
335 # create a password manager
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
337 passmgr.add_password(*authinfo)
337 passmgr.add_password(*authinfo)
338
338
339 handlers.extend((httpbasicauthhandler(passmgr),
339 handlers.extend((httpbasicauthhandler(passmgr),
340 httpdigestauthhandler(passmgr)))
340 httpdigestauthhandler(passmgr)))
341
341
342 return urllib2.build_opener(*handlers)
342 return urllib2.build_opener(*handlers)
343
343
344 def _type_id_to_name(self, type_id):
344 def _type_id_to_name(self, type_id):
345 return {
345 return {
346 1: b'commit',
346 1: b'commit',
347 2: b'tree',
347 2: b'tree',
348 3: b'blob',
348 3: b'blob',
349 4: b'tag'
349 4: b'tag'
350 }[type_id]
350 }[type_id]
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def check_url(self, url, config):
353 def check_url(self, url, config):
354 url_obj = url_parser(url)
354 url_obj = url_parser(url)
355 test_uri, _ = url_obj.authinfo()
355 test_uri, _ = url_obj.authinfo()
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
357 url_obj.query = obfuscate_qs(url_obj.query)
357 url_obj.query = obfuscate_qs(url_obj.query)
358 cleaned_uri = str(url_obj)
358 cleaned_uri = str(url_obj)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
360
360
361 if not test_uri.endswith('info/refs'):
361 if not test_uri.endswith('info/refs'):
362 test_uri = test_uri.rstrip('/') + '/info/refs'
362 test_uri = test_uri.rstrip('/') + '/info/refs'
363
363
364 o = self._build_opener(url)
364 o = self._build_opener(url)
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
366
366
367 q = {"service": 'git-upload-pack'}
367 q = {"service": 'git-upload-pack'}
368 qs = '?%s' % urllib.urlencode(q)
368 qs = '?%s' % urllib.urlencode(q)
369 cu = "%s%s" % (test_uri, qs)
369 cu = "%s%s" % (test_uri, qs)
370 req = urllib2.Request(cu, None, {})
370 req = urllib2.Request(cu, None, {})
371
371
372 try:
372 try:
373 log.debug("Trying to open URL %s", cleaned_uri)
373 log.debug("Trying to open URL %s", cleaned_uri)
374 resp = o.open(req)
374 resp = o.open(req)
375 if resp.code != 200:
375 if resp.code != 200:
376 raise exceptions.URLError()('Return Code is not 200')
376 raise exceptions.URLError()('Return Code is not 200')
377 except Exception as e:
377 except Exception as e:
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
379 # means it cannot be cloned
379 # means it cannot be cloned
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
381
381
382 # now detect if it's proper git repo
382 # now detect if it's proper git repo
383 gitdata = resp.read()
383 gitdata = resp.read()
384 if 'service=git-upload-pack' in gitdata:
384 if 'service=git-upload-pack' in gitdata:
385 pass
385 pass
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
387 # old style git can return some other format !
387 # old style git can return some other format !
388 pass
388 pass
389 else:
389 else:
390 raise exceptions.URLError()(
390 raise exceptions.URLError()(
391 "url [%s] does not look like an git" % (cleaned_uri,))
391 "url [%s] does not look like an git" % (cleaned_uri,))
392
392
393 return True
393 return True
394
394
395 @reraise_safe_exceptions
395 @reraise_safe_exceptions
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
398 remote_refs = self.pull(wire, url, apply_refs=False)
398 remote_refs = self.pull(wire, url, apply_refs=False)
399 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
400 if isinstance(valid_refs, list):
400 if isinstance(valid_refs, list):
401 valid_refs = tuple(valid_refs)
401 valid_refs = tuple(valid_refs)
402
402
403 for k in remote_refs:
403 for k in remote_refs:
404 # only parse heads/tags and skip so called deferred tags
404 # only parse heads/tags and skip so called deferred tags
405 if k.startswith(valid_refs) and not k.endswith(deferred):
405 if k.startswith(valid_refs) and not k.endswith(deferred):
406 repo[k] = remote_refs[k]
406 repo[k] = remote_refs[k]
407
407
408 if update_after_clone:
408 if update_after_clone:
409 # we want to checkout HEAD
409 # we want to checkout HEAD
410 repo["HEAD"] = remote_refs["HEAD"]
410 repo["HEAD"] = remote_refs["HEAD"]
411 index.build_index_from_tree(repo.path, repo.index_path(),
411 index.build_index_from_tree(repo.path, repo.index_path(),
412 repo.object_store, repo["HEAD"].tree)
412 repo.object_store, repo["HEAD"].tree)
413
413
414 @reraise_safe_exceptions
414 @reraise_safe_exceptions
415 def branch(self, wire, commit_id):
415 def branch(self, wire, commit_id):
416 cache_on, context_uid, repo_id = self._cache_on(wire)
416 cache_on, context_uid, repo_id = self._cache_on(wire)
417 region = self._region(wire)
417 region = self._region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
418 @region.conditional_cache_on_arguments(condition=cache_on)
419 def _branch(_context_uid, _repo_id, _commit_id):
419 def _branch(_context_uid, _repo_id, _commit_id):
420 regex = re.compile('^refs/heads')
420 regex = re.compile('^refs/heads')
421
421
422 def filter_with(ref):
422 def filter_with(ref):
423 return regex.match(ref[0]) and ref[1] == _commit_id
423 return regex.match(ref[0]) and ref[1] == _commit_id
424
424
425 branches = filter(filter_with, self.get_refs(wire).items())
425 branches = filter(filter_with, self.get_refs(wire).items())
426 return [x[0].split('refs/heads/')[-1] for x in branches]
426 return [x[0].split('refs/heads/')[-1] for x in branches]
427
427
428 return _branch(context_uid, repo_id, commit_id)
428 return _branch(context_uid, repo_id, commit_id)
429
429
430 @reraise_safe_exceptions
430 @reraise_safe_exceptions
431 def commit_branches(self, wire, commit_id):
431 def commit_branches(self, wire, commit_id):
432 cache_on, context_uid, repo_id = self._cache_on(wire)
432 cache_on, context_uid, repo_id = self._cache_on(wire)
433 region = self._region(wire)
433 region = self._region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
434 @region.conditional_cache_on_arguments(condition=cache_on)
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
436 repo_init = self._factory.repo_libgit2(wire)
436 repo_init = self._factory.repo_libgit2(wire)
437 with repo_init as repo:
437 with repo_init as repo:
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
439 return branches
439 return branches
440
440
441 return _commit_branches(context_uid, repo_id, commit_id)
441 return _commit_branches(context_uid, repo_id, commit_id)
442
442
443 @reraise_safe_exceptions
443 @reraise_safe_exceptions
444 def add_object(self, wire, content):
444 def add_object(self, wire, content):
445 repo_init = self._factory.repo_libgit2(wire)
445 repo_init = self._factory.repo_libgit2(wire)
446 with repo_init as repo:
446 with repo_init as repo:
447 blob = objects.Blob()
447 blob = objects.Blob()
448 blob.set_raw_string(content)
448 blob.set_raw_string(content)
449 repo.object_store.add_object(blob)
449 repo.object_store.add_object(blob)
450 return blob.id
450 return blob.id
451
451
452 # TODO: this is quite complex, check if that can be simplified
452 # TODO: this is quite complex, check if that can be simplified
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 # Defines the root tree
456 class _Root(object):
457 def __repr__(self):
458 return 'ROOT TREE'
459 ROOT = _Root()
460
455 repo = self._factory.repo(wire)
461 repo = self._factory.repo(wire)
456 object_store = repo.object_store
462 object_store = repo.object_store
457
463
458 # Create tree and populates it with blobs
464 # Create tree and populates it with blobs
459 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
465
466 if commit_tree and repo[commit_tree]:
467 git_commit = repo[commit_data['parents'][0]]
468 commit_tree = repo[git_commit.tree] # root tree
469 else:
470 commit_tree = objects.Tree()
460
471
461 for node in updated:
472 for node in updated:
462 # Compute subdirs if needed
473 # Compute subdirs if needed
463 dirpath, nodename = vcspath.split(node['path'])
474 dirpath, nodename = vcspath.split(node['path'])
464 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
475 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
465 parent = commit_tree
476 parent = commit_tree
466 ancestors = [('', parent)]
477 ancestors = [('', parent)]
467
478
468 # Tries to dig for the deepest existing tree
479 # Tries to dig for the deepest existing tree
469 while dirnames:
480 while dirnames:
470 curdir = dirnames.pop(0)
481 curdir = dirnames.pop(0)
471 try:
482 try:
472 dir_id = parent[curdir][1]
483 dir_id = parent[curdir][1]
473 except KeyError:
484 except KeyError:
474 # put curdir back into dirnames and stops
485 # put curdir back into dirnames and stops
475 dirnames.insert(0, curdir)
486 dirnames.insert(0, curdir)
476 break
487 break
477 else:
488 else:
478 # If found, updates parent
489 # If found, updates parent
479 parent = repo[dir_id]
490 parent = repo[dir_id]
480 ancestors.append((curdir, parent))
491 ancestors.append((curdir, parent))
481 # Now parent is deepest existing tree and we need to create
492 # Now parent is deepest existing tree and we need to create
482 # subtrees for dirnames (in reverse order)
493 # subtrees for dirnames (in reverse order)
483 # [this only applies for nodes from added]
494 # [this only applies for nodes from added]
484 new_trees = []
495 new_trees = []
485
496
486 blob = objects.Blob.from_string(node['content'])
497 blob = objects.Blob.from_string(node['content'])
487
498
488 if dirnames:
499 if dirnames:
489 # If there are trees which should be created we need to build
500 # If there are trees which should be created we need to build
490 # them now (in reverse order)
501 # them now (in reverse order)
491 reversed_dirnames = list(reversed(dirnames))
502 reversed_dirnames = list(reversed(dirnames))
492 curtree = objects.Tree()
503 curtree = objects.Tree()
493 curtree[node['node_path']] = node['mode'], blob.id
504 curtree[node['node_path']] = node['mode'], blob.id
494 new_trees.append(curtree)
505 new_trees.append(curtree)
495 for dirname in reversed_dirnames[:-1]:
506 for dirname in reversed_dirnames[:-1]:
496 newtree = objects.Tree()
507 newtree = objects.Tree()
497 newtree[dirname] = (DIR_STAT, curtree.id)
508 newtree[dirname] = (DIR_STAT, curtree.id)
498 new_trees.append(newtree)
509 new_trees.append(newtree)
499 curtree = newtree
510 curtree = newtree
500 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
511 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
501 else:
512 else:
502 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
513 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
503
514
504 new_trees.append(parent)
515 new_trees.append(parent)
505 # Update ancestors
516 # Update ancestors
506 reversed_ancestors = reversed(
517 reversed_ancestors = reversed(
507 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
518 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
508 for parent, tree, path in reversed_ancestors:
519 for parent, tree, path in reversed_ancestors:
509 parent[path] = (DIR_STAT, tree.id)
520 parent[path] = (DIR_STAT, tree.id)
510 object_store.add_object(tree)
521 object_store.add_object(tree)
511
522
512 object_store.add_object(blob)
523 object_store.add_object(blob)
513 for tree in new_trees:
524 for tree in new_trees:
514 object_store.add_object(tree)
525 object_store.add_object(tree)
515
526
516 for node_path in removed:
527 for node_path in removed:
517 paths = node_path.split('/')
528 paths = node_path.split('/')
518 tree = commit_tree
529 tree = commit_tree # start with top-level
519 trees = [tree]
530 trees = [{'tree': tree, 'path': ROOT}]
520 # Traverse deep into the forest...
531 # Traverse deep into the forest...
532 # resolve final tree by iterating the path.
533 # e.g a/b/c.txt will get
534 # - root as tree then
535 # - 'a' as tree,
536 # - 'b' as tree,
537 # - stop at c as blob.
521 for path in paths:
538 for path in paths:
522 try:
539 try:
523 obj = repo[tree[path][1]]
540 obj = repo[tree[path][1]]
524 if isinstance(obj, objects.Tree):
541 if isinstance(obj, objects.Tree):
525 trees.append(obj)
542 trees.append({'tree': obj, 'path': path})
526 tree = obj
543 tree = obj
527 except KeyError:
544 except KeyError:
528 break
545 break
546 #PROBLEM:
547 """
548 We're not editing same reference tree object
549 """
529 # Cut down the blob and all rotten trees on the way back...
550 # Cut down the blob and all rotten trees on the way back...
530 for path, tree in reversed(zip(paths, trees)):
551 for path, tree_data in reversed(zip(paths, trees)):
531 del tree[path]
552 tree = tree_data['tree']
532 if tree:
553 tree.__delitem__(path)
554 # This operation edits the tree, we need to mark new commit back
555
556 if len(tree) > 0:
533 # This tree still has elements - don't remove it or any
557 # This tree still has elements - don't remove it or any
534 # of it's parents
558 # of it's parents
535 break
559 break
536
560
537 object_store.add_object(commit_tree)
561 object_store.add_object(commit_tree)
538
562
539 # Create commit
563 # Create commit
540 commit = objects.Commit()
564 commit = objects.Commit()
541 commit.tree = commit_tree.id
565 commit.tree = commit_tree.id
542 for k, v in commit_data.iteritems():
566 for k, v in commit_data.items():
543 setattr(commit, k, v)
567 setattr(commit, k, v)
544 object_store.add_object(commit)
568 object_store.add_object(commit)
545
569
546 self.create_branch(wire, branch, commit.id)
570 self.create_branch(wire, branch, commit.id)
547
571
548 # dulwich set-ref
572 # dulwich set-ref
549 ref = 'refs/heads/%s' % branch
573 ref = 'refs/heads/%s' % branch
550 repo.refs[ref] = commit.id
574 repo.refs[ref] = commit.id
551
575
552 return commit.id
576 return commit.id
553
577
554 @reraise_safe_exceptions
578 @reraise_safe_exceptions
555 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
579 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
556 if url != 'default' and '://' not in url:
580 if url != 'default' and '://' not in url:
557 client = LocalGitClient(url)
581 client = LocalGitClient(url)
558 else:
582 else:
559 url_obj = url_parser(url)
583 url_obj = url_parser(url)
560 o = self._build_opener(url)
584 o = self._build_opener(url)
561 url, _ = url_obj.authinfo()
585 url, _ = url_obj.authinfo()
562 client = HttpGitClient(base_url=url, opener=o)
586 client = HttpGitClient(base_url=url, opener=o)
563 repo = self._factory.repo(wire)
587 repo = self._factory.repo(wire)
564
588
565 determine_wants = repo.object_store.determine_wants_all
589 determine_wants = repo.object_store.determine_wants_all
566 if refs:
590 if refs:
567 def determine_wants_requested(references):
591 def determine_wants_requested(references):
568 return [references[r] for r in references if r in refs]
592 return [references[r] for r in references if r in refs]
569 determine_wants = determine_wants_requested
593 determine_wants = determine_wants_requested
570
594
571 try:
595 try:
572 remote_refs = client.fetch(
596 remote_refs = client.fetch(
573 path=url, target=repo, determine_wants=determine_wants)
597 path=url, target=repo, determine_wants=determine_wants)
574 except NotGitRepository as e:
598 except NotGitRepository as e:
575 log.warning(
599 log.warning(
576 'Trying to fetch from "%s" failed, not a Git repository.', url)
600 'Trying to fetch from "%s" failed, not a Git repository.', url)
577 # Exception can contain unicode which we convert
601 # Exception can contain unicode which we convert
578 raise exceptions.AbortException(e)(repr(e))
602 raise exceptions.AbortException(e)(repr(e))
579
603
580 # mikhail: client.fetch() returns all the remote refs, but fetches only
604 # mikhail: client.fetch() returns all the remote refs, but fetches only
581 # refs filtered by `determine_wants` function. We need to filter result
605 # refs filtered by `determine_wants` function. We need to filter result
582 # as well
606 # as well
583 if refs:
607 if refs:
584 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
608 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
585
609
586 if apply_refs:
610 if apply_refs:
587 # TODO: johbo: Needs proper test coverage with a git repository
611 # TODO: johbo: Needs proper test coverage with a git repository
588 # that contains a tag object, so that we would end up with
612 # that contains a tag object, so that we would end up with
589 # a peeled ref at this point.
613 # a peeled ref at this point.
590 for k in remote_refs:
614 for k in remote_refs:
591 if k.endswith(PEELED_REF_MARKER):
615 if k.endswith(PEELED_REF_MARKER):
592 log.debug("Skipping peeled reference %s", k)
616 log.debug("Skipping peeled reference %s", k)
593 continue
617 continue
594 repo[k] = remote_refs[k]
618 repo[k] = remote_refs[k]
595
619
596 if refs and not update_after:
620 if refs and not update_after:
597 # mikhail: explicitly set the head to the last ref.
621 # mikhail: explicitly set the head to the last ref.
598 repo["HEAD"] = remote_refs[refs[-1]]
622 repo["HEAD"] = remote_refs[refs[-1]]
599
623
600 if update_after:
624 if update_after:
601 # we want to checkout HEAD
625 # we want to checkout HEAD
602 repo["HEAD"] = remote_refs["HEAD"]
626 repo["HEAD"] = remote_refs["HEAD"]
603 index.build_index_from_tree(repo.path, repo.index_path(),
627 index.build_index_from_tree(repo.path, repo.index_path(),
604 repo.object_store, repo["HEAD"].tree)
628 repo.object_store, repo["HEAD"].tree)
605 return remote_refs
629 return remote_refs
606
630
607 @reraise_safe_exceptions
631 @reraise_safe_exceptions
608 def sync_fetch(self, wire, url, refs=None, all_refs=False):
632 def sync_fetch(self, wire, url, refs=None, all_refs=False):
609 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
610 if refs and not isinstance(refs, (list, tuple)):
634 if refs and not isinstance(refs, (list, tuple)):
611 refs = [refs]
635 refs = [refs]
612
636
613 config = self._wire_to_config(wire)
637 config = self._wire_to_config(wire)
614 # get all remote refs we'll use to fetch later
638 # get all remote refs we'll use to fetch later
615 cmd = ['ls-remote']
639 cmd = ['ls-remote']
616 if not all_refs:
640 if not all_refs:
617 cmd += ['--heads', '--tags']
641 cmd += ['--heads', '--tags']
618 cmd += [url]
642 cmd += [url]
619 output, __ = self.run_git_command(
643 output, __ = self.run_git_command(
620 wire, cmd, fail_on_stderr=False,
644 wire, cmd, fail_on_stderr=False,
621 _copts=self._remote_conf(config),
645 _copts=self._remote_conf(config),
622 extra_env={'GIT_TERMINAL_PROMPT': '0'})
646 extra_env={'GIT_TERMINAL_PROMPT': '0'})
623
647
624 remote_refs = collections.OrderedDict()
648 remote_refs = collections.OrderedDict()
625 fetch_refs = []
649 fetch_refs = []
626
650
627 for ref_line in output.splitlines():
651 for ref_line in output.splitlines():
628 sha, ref = ref_line.split('\t')
652 sha, ref = ref_line.split('\t')
629 sha = sha.strip()
653 sha = sha.strip()
630 if ref in remote_refs:
654 if ref in remote_refs:
631 # duplicate, skip
655 # duplicate, skip
632 continue
656 continue
633 if ref.endswith(PEELED_REF_MARKER):
657 if ref.endswith(PEELED_REF_MARKER):
634 log.debug("Skipping peeled reference %s", ref)
658 log.debug("Skipping peeled reference %s", ref)
635 continue
659 continue
636 # don't sync HEAD
660 # don't sync HEAD
637 if ref in ['HEAD']:
661 if ref in ['HEAD']:
638 continue
662 continue
639
663
640 remote_refs[ref] = sha
664 remote_refs[ref] = sha
641
665
642 if refs and sha in refs:
666 if refs and sha in refs:
643 # we filter fetch using our specified refs
667 # we filter fetch using our specified refs
644 fetch_refs.append('{}:{}'.format(ref, ref))
668 fetch_refs.append('{}:{}'.format(ref, ref))
645 elif not refs:
669 elif not refs:
646 fetch_refs.append('{}:{}'.format(ref, ref))
670 fetch_refs.append('{}:{}'.format(ref, ref))
647 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
671 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
648
672
649 if fetch_refs:
673 if fetch_refs:
650 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
674 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
651 fetch_refs_chunks = list(chunk)
675 fetch_refs_chunks = list(chunk)
652 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
676 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
653 _out, _err = self.run_git_command(
677 _out, _err = self.run_git_command(
654 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
678 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
655 fail_on_stderr=False,
679 fail_on_stderr=False,
656 _copts=self._remote_conf(config),
680 _copts=self._remote_conf(config),
657 extra_env={'GIT_TERMINAL_PROMPT': '0'})
681 extra_env={'GIT_TERMINAL_PROMPT': '0'})
658
682
659 return remote_refs
683 return remote_refs
660
684
661 @reraise_safe_exceptions
685 @reraise_safe_exceptions
662 def sync_push(self, wire, url, refs=None):
686 def sync_push(self, wire, url, refs=None):
663 if not self.check_url(url, wire):
687 if not self.check_url(url, wire):
664 return
688 return
665 config = self._wire_to_config(wire)
689 config = self._wire_to_config(wire)
666 self._factory.repo(wire)
690 self._factory.repo(wire)
667 self.run_git_command(
691 self.run_git_command(
668 wire, ['push', url, '--mirror'], fail_on_stderr=False,
692 wire, ['push', url, '--mirror'], fail_on_stderr=False,
669 _copts=self._remote_conf(config),
693 _copts=self._remote_conf(config),
670 extra_env={'GIT_TERMINAL_PROMPT': '0'})
694 extra_env={'GIT_TERMINAL_PROMPT': '0'})
671
695
672 @reraise_safe_exceptions
696 @reraise_safe_exceptions
673 def get_remote_refs(self, wire, url):
697 def get_remote_refs(self, wire, url):
674 repo = Repo(url)
698 repo = Repo(url)
675 return repo.get_refs()
699 return repo.get_refs()
676
700
677 @reraise_safe_exceptions
701 @reraise_safe_exceptions
678 def get_description(self, wire):
702 def get_description(self, wire):
679 repo = self._factory.repo(wire)
703 repo = self._factory.repo(wire)
680 return repo.get_description()
704 return repo.get_description()
681
705
682 @reraise_safe_exceptions
706 @reraise_safe_exceptions
683 def get_missing_revs(self, wire, rev1, rev2, path2):
707 def get_missing_revs(self, wire, rev1, rev2, path2):
684 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
685 LocalGitClient(thin_packs=False).fetch(path2, repo)
709 LocalGitClient(thin_packs=False).fetch(path2, repo)
686
710
687 wire_remote = wire.copy()
711 wire_remote = wire.copy()
688 wire_remote['path'] = path2
712 wire_remote['path'] = path2
689 repo_remote = self._factory.repo(wire_remote)
713 repo_remote = self._factory.repo(wire_remote)
690 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
714 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
691
715
692 revs = [
716 revs = [
693 x.commit.id
717 x.commit.id
694 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
718 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
695 return revs
719 return revs
696
720
697 @reraise_safe_exceptions
721 @reraise_safe_exceptions
698 def get_object(self, wire, sha, maybe_unreachable=False):
722 def get_object(self, wire, sha, maybe_unreachable=False):
699 cache_on, context_uid, repo_id = self._cache_on(wire)
723 cache_on, context_uid, repo_id = self._cache_on(wire)
700 region = self._region(wire)
724 region = self._region(wire)
701 @region.conditional_cache_on_arguments(condition=cache_on)
725 @region.conditional_cache_on_arguments(condition=cache_on)
702 def _get_object(_context_uid, _repo_id, _sha):
726 def _get_object(_context_uid, _repo_id, _sha):
703 repo_init = self._factory.repo_libgit2(wire)
727 repo_init = self._factory.repo_libgit2(wire)
704 with repo_init as repo:
728 with repo_init as repo:
705
729
706 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
730 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
707 try:
731 try:
708 commit = repo.revparse_single(sha)
732 commit = repo.revparse_single(sha)
709 except KeyError:
733 except KeyError:
710 # NOTE(marcink): KeyError doesn't give us any meaningful information
734 # NOTE(marcink): KeyError doesn't give us any meaningful information
711 # here, we instead give something more explicit
735 # here, we instead give something more explicit
712 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
736 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
713 raise exceptions.LookupException(e)(missing_commit_err)
737 raise exceptions.LookupException(e)(missing_commit_err)
714 except ValueError as e:
738 except ValueError as e:
715 raise exceptions.LookupException(e)(missing_commit_err)
739 raise exceptions.LookupException(e)(missing_commit_err)
716
740
717 is_tag = False
741 is_tag = False
718 if isinstance(commit, pygit2.Tag):
742 if isinstance(commit, pygit2.Tag):
719 commit = repo.get(commit.target)
743 commit = repo.get(commit.target)
720 is_tag = True
744 is_tag = True
721
745
722 check_dangling = True
746 check_dangling = True
723 if is_tag:
747 if is_tag:
724 check_dangling = False
748 check_dangling = False
725
749
726 if check_dangling and maybe_unreachable:
750 if check_dangling and maybe_unreachable:
727 check_dangling = False
751 check_dangling = False
728
752
729 # we used a reference and it parsed means we're not having a dangling commit
753 # we used a reference and it parsed means we're not having a dangling commit
730 if sha != commit.hex:
754 if sha != commit.hex:
731 check_dangling = False
755 check_dangling = False
732
756
733 if check_dangling:
757 if check_dangling:
734 # check for dangling commit
758 # check for dangling commit
735 for branch in repo.branches.with_commit(commit.hex):
759 for branch in repo.branches.with_commit(commit.hex):
736 if branch:
760 if branch:
737 break
761 break
738 else:
762 else:
739 # NOTE(marcink): Empty error doesn't give us any meaningful information
763 # NOTE(marcink): Empty error doesn't give us any meaningful information
740 # here, we instead give something more explicit
764 # here, we instead give something more explicit
741 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
765 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
742 raise exceptions.LookupException(e)(missing_commit_err)
766 raise exceptions.LookupException(e)(missing_commit_err)
743
767
744 commit_id = commit.hex
768 commit_id = commit.hex
745 type_id = commit.type
769 type_id = commit.type
746
770
747 return {
771 return {
748 'id': commit_id,
772 'id': commit_id,
749 'type': self._type_id_to_name(type_id),
773 'type': self._type_id_to_name(type_id),
750 'commit_id': commit_id,
774 'commit_id': commit_id,
751 'idx': 0
775 'idx': 0
752 }
776 }
753
777
754 return _get_object(context_uid, repo_id, sha)
778 return _get_object(context_uid, repo_id, sha)
755
779
756 @reraise_safe_exceptions
780 @reraise_safe_exceptions
757 def get_refs(self, wire):
781 def get_refs(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
782 cache_on, context_uid, repo_id = self._cache_on(wire)
759 region = self._region(wire)
783 region = self._region(wire)
760 @region.conditional_cache_on_arguments(condition=cache_on)
784 @region.conditional_cache_on_arguments(condition=cache_on)
761 def _get_refs(_context_uid, _repo_id):
785 def _get_refs(_context_uid, _repo_id):
762
786
763 repo_init = self._factory.repo_libgit2(wire)
787 repo_init = self._factory.repo_libgit2(wire)
764 with repo_init as repo:
788 with repo_init as repo:
765 regex = re.compile('^refs/(heads|tags)/')
789 regex = re.compile('^refs/(heads|tags)/')
766 return {x.name: x.target.hex for x in
790 return {x.name: x.target.hex for x in
767 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
791 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
768
792
769 return _get_refs(context_uid, repo_id)
793 return _get_refs(context_uid, repo_id)
770
794
771 @reraise_safe_exceptions
795 @reraise_safe_exceptions
772 def get_branch_pointers(self, wire):
796 def get_branch_pointers(self, wire):
773 cache_on, context_uid, repo_id = self._cache_on(wire)
797 cache_on, context_uid, repo_id = self._cache_on(wire)
774 region = self._region(wire)
798 region = self._region(wire)
775 @region.conditional_cache_on_arguments(condition=cache_on)
799 @region.conditional_cache_on_arguments(condition=cache_on)
776 def _get_branch_pointers(_context_uid, _repo_id):
800 def _get_branch_pointers(_context_uid, _repo_id):
777
801
778 repo_init = self._factory.repo_libgit2(wire)
802 repo_init = self._factory.repo_libgit2(wire)
779 regex = re.compile('^refs/heads')
803 regex = re.compile('^refs/heads')
780 with repo_init as repo:
804 with repo_init as repo:
781 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
805 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
782 return {x.target.hex: x.shorthand for x in branches}
806 return {x.target.hex: x.shorthand for x in branches}
783
807
784 return _get_branch_pointers(context_uid, repo_id)
808 return _get_branch_pointers(context_uid, repo_id)
785
809
786 @reraise_safe_exceptions
810 @reraise_safe_exceptions
787 def head(self, wire, show_exc=True):
811 def head(self, wire, show_exc=True):
788 cache_on, context_uid, repo_id = self._cache_on(wire)
812 cache_on, context_uid, repo_id = self._cache_on(wire)
789 region = self._region(wire)
813 region = self._region(wire)
790 @region.conditional_cache_on_arguments(condition=cache_on)
814 @region.conditional_cache_on_arguments(condition=cache_on)
791 def _head(_context_uid, _repo_id, _show_exc):
815 def _head(_context_uid, _repo_id, _show_exc):
792 repo_init = self._factory.repo_libgit2(wire)
816 repo_init = self._factory.repo_libgit2(wire)
793 with repo_init as repo:
817 with repo_init as repo:
794 try:
818 try:
795 return repo.head.peel().hex
819 return repo.head.peel().hex
796 except Exception:
820 except Exception:
797 if show_exc:
821 if show_exc:
798 raise
822 raise
799 return _head(context_uid, repo_id, show_exc)
823 return _head(context_uid, repo_id, show_exc)
800
824
801 @reraise_safe_exceptions
825 @reraise_safe_exceptions
802 def init(self, wire):
826 def init(self, wire):
803 repo_path = str_to_dulwich(wire['path'])
827 repo_path = str_to_dulwich(wire['path'])
804 self.repo = Repo.init(repo_path)
828 self.repo = Repo.init(repo_path)
805
829
806 @reraise_safe_exceptions
830 @reraise_safe_exceptions
807 def init_bare(self, wire):
831 def init_bare(self, wire):
808 repo_path = str_to_dulwich(wire['path'])
832 repo_path = str_to_dulwich(wire['path'])
809 self.repo = Repo.init_bare(repo_path)
833 self.repo = Repo.init_bare(repo_path)
810
834
811 @reraise_safe_exceptions
835 @reraise_safe_exceptions
812 def revision(self, wire, rev):
836 def revision(self, wire, rev):
813
837
814 cache_on, context_uid, repo_id = self._cache_on(wire)
838 cache_on, context_uid, repo_id = self._cache_on(wire)
815 region = self._region(wire)
839 region = self._region(wire)
816 @region.conditional_cache_on_arguments(condition=cache_on)
840 @region.conditional_cache_on_arguments(condition=cache_on)
817 def _revision(_context_uid, _repo_id, _rev):
841 def _revision(_context_uid, _repo_id, _rev):
818 repo_init = self._factory.repo_libgit2(wire)
842 repo_init = self._factory.repo_libgit2(wire)
819 with repo_init as repo:
843 with repo_init as repo:
820 commit = repo[rev]
844 commit = repo[rev]
821 obj_data = {
845 obj_data = {
822 'id': commit.id.hex,
846 'id': commit.id.hex,
823 }
847 }
824 # tree objects itself don't have tree_id attribute
848 # tree objects itself don't have tree_id attribute
825 if hasattr(commit, 'tree_id'):
849 if hasattr(commit, 'tree_id'):
826 obj_data['tree'] = commit.tree_id.hex
850 obj_data['tree'] = commit.tree_id.hex
827
851
828 return obj_data
852 return obj_data
829 return _revision(context_uid, repo_id, rev)
853 return _revision(context_uid, repo_id, rev)
830
854
831 @reraise_safe_exceptions
855 @reraise_safe_exceptions
832 def date(self, wire, commit_id):
856 def date(self, wire, commit_id):
833 cache_on, context_uid, repo_id = self._cache_on(wire)
857 cache_on, context_uid, repo_id = self._cache_on(wire)
834 region = self._region(wire)
858 region = self._region(wire)
835 @region.conditional_cache_on_arguments(condition=cache_on)
859 @region.conditional_cache_on_arguments(condition=cache_on)
836 def _date(_repo_id, _commit_id):
860 def _date(_repo_id, _commit_id):
837 repo_init = self._factory.repo_libgit2(wire)
861 repo_init = self._factory.repo_libgit2(wire)
838 with repo_init as repo:
862 with repo_init as repo:
839 commit = repo[commit_id]
863 commit = repo[commit_id]
840
864
841 if hasattr(commit, 'commit_time'):
865 if hasattr(commit, 'commit_time'):
842 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
866 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
843 else:
867 else:
844 commit = commit.get_object()
868 commit = commit.get_object()
845 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
869 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
846
870
847 # TODO(marcink): check dulwich difference of offset vs timezone
871 # TODO(marcink): check dulwich difference of offset vs timezone
848 return [commit_time, commit_time_offset]
872 return [commit_time, commit_time_offset]
849 return _date(repo_id, commit_id)
873 return _date(repo_id, commit_id)
850
874
851 @reraise_safe_exceptions
875 @reraise_safe_exceptions
852 def author(self, wire, commit_id):
876 def author(self, wire, commit_id):
853 cache_on, context_uid, repo_id = self._cache_on(wire)
877 cache_on, context_uid, repo_id = self._cache_on(wire)
854 region = self._region(wire)
878 region = self._region(wire)
855 @region.conditional_cache_on_arguments(condition=cache_on)
879 @region.conditional_cache_on_arguments(condition=cache_on)
856 def _author(_repo_id, _commit_id):
880 def _author(_repo_id, _commit_id):
857 repo_init = self._factory.repo_libgit2(wire)
881 repo_init = self._factory.repo_libgit2(wire)
858 with repo_init as repo:
882 with repo_init as repo:
859 commit = repo[commit_id]
883 commit = repo[commit_id]
860
884
861 if hasattr(commit, 'author'):
885 if hasattr(commit, 'author'):
862 author = commit.author
886 author = commit.author
863 else:
887 else:
864 author = commit.get_object().author
888 author = commit.get_object().author
865
889
866 if author.email:
890 if author.email:
867 return u"{} <{}>".format(author.name, author.email)
891 return u"{} <{}>".format(author.name, author.email)
868
892
869 try:
893 try:
870 return u"{}".format(author.name)
894 return u"{}".format(author.name)
871 except Exception:
895 except Exception:
872 return u"{}".format(safe_unicode(author.raw_name))
896 return u"{}".format(safe_unicode(author.raw_name))
873
897
874 return _author(repo_id, commit_id)
898 return _author(repo_id, commit_id)
875
899
876 @reraise_safe_exceptions
900 @reraise_safe_exceptions
877 def message(self, wire, commit_id):
901 def message(self, wire, commit_id):
878 cache_on, context_uid, repo_id = self._cache_on(wire)
902 cache_on, context_uid, repo_id = self._cache_on(wire)
879 region = self._region(wire)
903 region = self._region(wire)
880 @region.conditional_cache_on_arguments(condition=cache_on)
904 @region.conditional_cache_on_arguments(condition=cache_on)
881 def _message(_repo_id, _commit_id):
905 def _message(_repo_id, _commit_id):
882 repo_init = self._factory.repo_libgit2(wire)
906 repo_init = self._factory.repo_libgit2(wire)
883 with repo_init as repo:
907 with repo_init as repo:
884 commit = repo[commit_id]
908 commit = repo[commit_id]
885 return commit.message
909 return commit.message
886 return _message(repo_id, commit_id)
910 return _message(repo_id, commit_id)
887
911
888 @reraise_safe_exceptions
912 @reraise_safe_exceptions
889 def parents(self, wire, commit_id):
913 def parents(self, wire, commit_id):
890 cache_on, context_uid, repo_id = self._cache_on(wire)
914 cache_on, context_uid, repo_id = self._cache_on(wire)
891 region = self._region(wire)
915 region = self._region(wire)
892 @region.conditional_cache_on_arguments(condition=cache_on)
916 @region.conditional_cache_on_arguments(condition=cache_on)
893 def _parents(_repo_id, _commit_id):
917 def _parents(_repo_id, _commit_id):
894 repo_init = self._factory.repo_libgit2(wire)
918 repo_init = self._factory.repo_libgit2(wire)
895 with repo_init as repo:
919 with repo_init as repo:
896 commit = repo[commit_id]
920 commit = repo[commit_id]
897 if hasattr(commit, 'parent_ids'):
921 if hasattr(commit, 'parent_ids'):
898 parent_ids = commit.parent_ids
922 parent_ids = commit.parent_ids
899 else:
923 else:
900 parent_ids = commit.get_object().parent_ids
924 parent_ids = commit.get_object().parent_ids
901
925
902 return [x.hex for x in parent_ids]
926 return [x.hex for x in parent_ids]
903 return _parents(repo_id, commit_id)
927 return _parents(repo_id, commit_id)
904
928
905 @reraise_safe_exceptions
929 @reraise_safe_exceptions
906 def children(self, wire, commit_id):
930 def children(self, wire, commit_id):
907 cache_on, context_uid, repo_id = self._cache_on(wire)
931 cache_on, context_uid, repo_id = self._cache_on(wire)
908 region = self._region(wire)
932 region = self._region(wire)
909 @region.conditional_cache_on_arguments(condition=cache_on)
933 @region.conditional_cache_on_arguments(condition=cache_on)
910 def _children(_repo_id, _commit_id):
934 def _children(_repo_id, _commit_id):
911 output, __ = self.run_git_command(
935 output, __ = self.run_git_command(
912 wire, ['rev-list', '--all', '--children'])
936 wire, ['rev-list', '--all', '--children'])
913
937
914 child_ids = []
938 child_ids = []
915 pat = re.compile(r'^%s' % commit_id)
939 pat = re.compile(r'^%s' % commit_id)
916 for l in output.splitlines():
940 for l in output.splitlines():
917 if pat.match(l):
941 if pat.match(l):
918 found_ids = l.split(' ')[1:]
942 found_ids = l.split(' ')[1:]
919 child_ids.extend(found_ids)
943 child_ids.extend(found_ids)
920
944
921 return child_ids
945 return child_ids
922 return _children(repo_id, commit_id)
946 return _children(repo_id, commit_id)
923
947
924 @reraise_safe_exceptions
948 @reraise_safe_exceptions
925 def set_refs(self, wire, key, value):
949 def set_refs(self, wire, key, value):
926 repo_init = self._factory.repo_libgit2(wire)
950 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
951 with repo_init as repo:
928 repo.references.create(key, value, force=True)
952 repo.references.create(key, value, force=True)
929
953
930 @reraise_safe_exceptions
954 @reraise_safe_exceptions
931 def create_branch(self, wire, branch_name, commit_id, force=False):
955 def create_branch(self, wire, branch_name, commit_id, force=False):
932 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
933 with repo_init as repo:
957 with repo_init as repo:
934 commit = repo[commit_id]
958 commit = repo[commit_id]
935
959
936 if force:
960 if force:
937 repo.branches.local.create(branch_name, commit, force=force)
961 repo.branches.local.create(branch_name, commit, force=force)
938 elif not repo.branches.get(branch_name):
962 elif not repo.branches.get(branch_name):
939 # create only if that branch isn't existing
963 # create only if that branch isn't existing
940 repo.branches.local.create(branch_name, commit, force=force)
964 repo.branches.local.create(branch_name, commit, force=force)
941
965
942 @reraise_safe_exceptions
966 @reraise_safe_exceptions
943 def remove_ref(self, wire, key):
967 def remove_ref(self, wire, key):
944 repo_init = self._factory.repo_libgit2(wire)
968 repo_init = self._factory.repo_libgit2(wire)
945 with repo_init as repo:
969 with repo_init as repo:
946 repo.references.delete(key)
970 repo.references.delete(key)
947
971
948 @reraise_safe_exceptions
972 @reraise_safe_exceptions
949 def tag_remove(self, wire, tag_name):
973 def tag_remove(self, wire, tag_name):
950 repo_init = self._factory.repo_libgit2(wire)
974 repo_init = self._factory.repo_libgit2(wire)
951 with repo_init as repo:
975 with repo_init as repo:
952 key = 'refs/tags/{}'.format(tag_name)
976 key = 'refs/tags/{}'.format(tag_name)
953 repo.references.delete(key)
977 repo.references.delete(key)
954
978
955 @reraise_safe_exceptions
979 @reraise_safe_exceptions
956 def tree_changes(self, wire, source_id, target_id):
980 def tree_changes(self, wire, source_id, target_id):
957 # TODO(marcink): remove this seems it's only used by tests
981 # TODO(marcink): remove this seems it's only used by tests
958 repo = self._factory.repo(wire)
982 repo = self._factory.repo(wire)
959 source = repo[source_id].tree if source_id else None
983 source = repo[source_id].tree if source_id else None
960 target = repo[target_id].tree
984 target = repo[target_id].tree
961 result = repo.object_store.tree_changes(source, target)
985 result = repo.object_store.tree_changes(source, target)
962 return list(result)
986 return list(result)
963
987
964 @reraise_safe_exceptions
988 @reraise_safe_exceptions
965 def tree_and_type_for_path(self, wire, commit_id, path):
989 def tree_and_type_for_path(self, wire, commit_id, path):
966
990
967 cache_on, context_uid, repo_id = self._cache_on(wire)
991 cache_on, context_uid, repo_id = self._cache_on(wire)
968 region = self._region(wire)
992 region = self._region(wire)
969 @region.conditional_cache_on_arguments(condition=cache_on)
993 @region.conditional_cache_on_arguments(condition=cache_on)
970 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
994 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
971 repo_init = self._factory.repo_libgit2(wire)
995 repo_init = self._factory.repo_libgit2(wire)
972
996
973 with repo_init as repo:
997 with repo_init as repo:
974 commit = repo[commit_id]
998 commit = repo[commit_id]
975 try:
999 try:
976 tree = commit.tree[path]
1000 tree = commit.tree[path]
977 except KeyError:
1001 except KeyError:
978 return None, None, None
1002 return None, None, None
979
1003
980 return tree.id.hex, tree.type, tree.filemode
1004 return tree.id.hex, tree.type, tree.filemode
981 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1005 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
982
1006
983 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
984 def tree_items(self, wire, tree_id):
1008 def tree_items(self, wire, tree_id):
985 cache_on, context_uid, repo_id = self._cache_on(wire)
1009 cache_on, context_uid, repo_id = self._cache_on(wire)
986 region = self._region(wire)
1010 region = self._region(wire)
987 @region.conditional_cache_on_arguments(condition=cache_on)
1011 @region.conditional_cache_on_arguments(condition=cache_on)
988 def _tree_items(_repo_id, _tree_id):
1012 def _tree_items(_repo_id, _tree_id):
989
1013
990 repo_init = self._factory.repo_libgit2(wire)
1014 repo_init = self._factory.repo_libgit2(wire)
991 with repo_init as repo:
1015 with repo_init as repo:
992 try:
1016 try:
993 tree = repo[tree_id]
1017 tree = repo[tree_id]
994 except KeyError:
1018 except KeyError:
995 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1019 raise ObjectMissing('No tree with id: {}'.format(tree_id))
996
1020
997 result = []
1021 result = []
998 for item in tree:
1022 for item in tree:
999 item_sha = item.hex
1023 item_sha = item.hex
1000 item_mode = item.filemode
1024 item_mode = item.filemode
1001 item_type = item.type
1025 item_type = item.type
1002
1026
1003 if item_type == 'commit':
1027 if item_type == 'commit':
1004 # NOTE(marcink): submodules we translate to 'link' for backward compat
1028 # NOTE(marcink): submodules we translate to 'link' for backward compat
1005 item_type = 'link'
1029 item_type = 'link'
1006
1030
1007 result.append((item.name, item_mode, item_sha, item_type))
1031 result.append((item.name, item_mode, item_sha, item_type))
1008 return result
1032 return result
1009 return _tree_items(repo_id, tree_id)
1033 return _tree_items(repo_id, tree_id)
1010
1034
1011 @reraise_safe_exceptions
1035 @reraise_safe_exceptions
1012 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1036 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1013 """
1037 """
1014 Old version that uses subprocess to call diff
1038 Old version that uses subprocess to call diff
1015 """
1039 """
1016
1040
1017 flags = [
1041 flags = [
1018 '-U%s' % context, '--patch',
1042 '-U%s' % context, '--patch',
1019 '--binary',
1043 '--binary',
1020 '--find-renames',
1044 '--find-renames',
1021 '--no-indent-heuristic',
1045 '--no-indent-heuristic',
1022 # '--indent-heuristic',
1046 # '--indent-heuristic',
1023 #'--full-index',
1047 #'--full-index',
1024 #'--abbrev=40'
1048 #'--abbrev=40'
1025 ]
1049 ]
1026
1050
1027 if opt_ignorews:
1051 if opt_ignorews:
1028 flags.append('--ignore-all-space')
1052 flags.append('--ignore-all-space')
1029
1053
1030 if commit_id_1 == self.EMPTY_COMMIT:
1054 if commit_id_1 == self.EMPTY_COMMIT:
1031 cmd = ['show'] + flags + [commit_id_2]
1055 cmd = ['show'] + flags + [commit_id_2]
1032 else:
1056 else:
1033 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1057 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1034
1058
1035 if file_filter:
1059 if file_filter:
1036 cmd.extend(['--', file_filter])
1060 cmd.extend(['--', file_filter])
1037
1061
1038 diff, __ = self.run_git_command(wire, cmd)
1062 diff, __ = self.run_git_command(wire, cmd)
1039 # If we used 'show' command, strip first few lines (until actual diff
1063 # If we used 'show' command, strip first few lines (until actual diff
1040 # starts)
1064 # starts)
1041 if commit_id_1 == self.EMPTY_COMMIT:
1065 if commit_id_1 == self.EMPTY_COMMIT:
1042 lines = diff.splitlines()
1066 lines = diff.splitlines()
1043 x = 0
1067 x = 0
1044 for line in lines:
1068 for line in lines:
1045 if line.startswith('diff'):
1069 if line.startswith('diff'):
1046 break
1070 break
1047 x += 1
1071 x += 1
1048 # Append new line just like 'diff' command do
1072 # Append new line just like 'diff' command do
1049 diff = '\n'.join(lines[x:]) + '\n'
1073 diff = '\n'.join(lines[x:]) + '\n'
1050 return diff
1074 return diff
1051
1075
1052 @reraise_safe_exceptions
1076 @reraise_safe_exceptions
1053 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1077 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1054 repo_init = self._factory.repo_libgit2(wire)
1078 repo_init = self._factory.repo_libgit2(wire)
1055 with repo_init as repo:
1079 with repo_init as repo:
1056 swap = True
1080 swap = True
1057 flags = 0
1081 flags = 0
1058 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1082 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1059
1083
1060 if opt_ignorews:
1084 if opt_ignorews:
1061 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1085 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1062
1086
1063 if commit_id_1 == self.EMPTY_COMMIT:
1087 if commit_id_1 == self.EMPTY_COMMIT:
1064 comm1 = repo[commit_id_2]
1088 comm1 = repo[commit_id_2]
1065 diff_obj = comm1.tree.diff_to_tree(
1089 diff_obj = comm1.tree.diff_to_tree(
1066 flags=flags, context_lines=context, swap=swap)
1090 flags=flags, context_lines=context, swap=swap)
1067
1091
1068 else:
1092 else:
1069 comm1 = repo[commit_id_2]
1093 comm1 = repo[commit_id_2]
1070 comm2 = repo[commit_id_1]
1094 comm2 = repo[commit_id_1]
1071 diff_obj = comm1.tree.diff_to_tree(
1095 diff_obj = comm1.tree.diff_to_tree(
1072 comm2.tree, flags=flags, context_lines=context, swap=swap)
1096 comm2.tree, flags=flags, context_lines=context, swap=swap)
1073 similar_flags = 0
1097 similar_flags = 0
1074 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1098 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1075 diff_obj.find_similar(flags=similar_flags)
1099 diff_obj.find_similar(flags=similar_flags)
1076
1100
1077 if file_filter:
1101 if file_filter:
1078 for p in diff_obj:
1102 for p in diff_obj:
1079 if p.delta.old_file.path == file_filter:
1103 if p.delta.old_file.path == file_filter:
1080 return p.patch or ''
1104 return p.patch or ''
1081 # fo matching path == no diff
1105 # fo matching path == no diff
1082 return ''
1106 return ''
1083 return diff_obj.patch or ''
1107 return diff_obj.patch or ''
1084
1108
1085 @reraise_safe_exceptions
1109 @reraise_safe_exceptions
1086 def node_history(self, wire, commit_id, path, limit):
1110 def node_history(self, wire, commit_id, path, limit):
1087 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 cache_on, context_uid, repo_id = self._cache_on(wire)
1088 region = self._region(wire)
1112 region = self._region(wire)
1089 @region.conditional_cache_on_arguments(condition=cache_on)
1113 @region.conditional_cache_on_arguments(condition=cache_on)
1090 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1114 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1091 # optimize for n==1, rev-list is much faster for that use-case
1115 # optimize for n==1, rev-list is much faster for that use-case
1092 if limit == 1:
1116 if limit == 1:
1093 cmd = ['rev-list', '-1', commit_id, '--', path]
1117 cmd = ['rev-list', '-1', commit_id, '--', path]
1094 else:
1118 else:
1095 cmd = ['log']
1119 cmd = ['log']
1096 if limit:
1120 if limit:
1097 cmd.extend(['-n', str(safe_int(limit, 0))])
1121 cmd.extend(['-n', str(safe_int(limit, 0))])
1098 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1122 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1099
1123
1100 output, __ = self.run_git_command(wire, cmd)
1124 output, __ = self.run_git_command(wire, cmd)
1101 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1102
1126
1103 return [x for x in commit_ids]
1127 return [x for x in commit_ids]
1104 return _node_history(context_uid, repo_id, commit_id, path, limit)
1128 return _node_history(context_uid, repo_id, commit_id, path, limit)
1105
1129
1106 @reraise_safe_exceptions
1130 @reraise_safe_exceptions
1107 def node_annotate(self, wire, commit_id, path):
1131 def node_annotate(self, wire, commit_id, path):
1108
1132
1109 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1133 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1110 # -l ==> outputs long shas (and we need all 40 characters)
1134 # -l ==> outputs long shas (and we need all 40 characters)
1111 # --root ==> doesn't put '^' character for boundaries
1135 # --root ==> doesn't put '^' character for boundaries
1112 # -r commit_id ==> blames for the given commit
1136 # -r commit_id ==> blames for the given commit
1113 output, __ = self.run_git_command(wire, cmd)
1137 output, __ = self.run_git_command(wire, cmd)
1114
1138
1115 result = []
1139 result = []
1116 for i, blame_line in enumerate(output.split('\n')[:-1]):
1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1117 line_no = i + 1
1141 line_no = i + 1
1118 commit_id, line = re.split(r' ', blame_line, 1)
1142 commit_id, line = re.split(r' ', blame_line, 1)
1119 result.append((line_no, commit_id, line))
1143 result.append((line_no, commit_id, line))
1120 return result
1144 return result
1121
1145
1122 @reraise_safe_exceptions
1146 @reraise_safe_exceptions
1123 def update_server_info(self, wire):
1147 def update_server_info(self, wire):
1124 repo = self._factory.repo(wire)
1148 repo = self._factory.repo(wire)
1125 update_server_info(repo)
1149 update_server_info(repo)
1126
1150
1127 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1128 def get_all_commit_ids(self, wire):
1152 def get_all_commit_ids(self, wire):
1129
1153
1130 cache_on, context_uid, repo_id = self._cache_on(wire)
1154 cache_on, context_uid, repo_id = self._cache_on(wire)
1131 region = self._region(wire)
1155 region = self._region(wire)
1132 @region.conditional_cache_on_arguments(condition=cache_on)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1133 def _get_all_commit_ids(_context_uid, _repo_id):
1157 def _get_all_commit_ids(_context_uid, _repo_id):
1134
1158
1135 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1159 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1136 try:
1160 try:
1137 output, __ = self.run_git_command(wire, cmd)
1161 output, __ = self.run_git_command(wire, cmd)
1138 return output.splitlines()
1162 return output.splitlines()
1139 except Exception:
1163 except Exception:
1140 # Can be raised for empty repositories
1164 # Can be raised for empty repositories
1141 return []
1165 return []
1142 return _get_all_commit_ids(context_uid, repo_id)
1166 return _get_all_commit_ids(context_uid, repo_id)
1143
1167
1144 @reraise_safe_exceptions
1168 @reraise_safe_exceptions
1145 def run_git_command(self, wire, cmd, **opts):
1169 def run_git_command(self, wire, cmd, **opts):
1146 path = wire.get('path', None)
1170 path = wire.get('path', None)
1147
1171
1148 if path and os.path.isdir(path):
1172 if path and os.path.isdir(path):
1149 opts['cwd'] = path
1173 opts['cwd'] = path
1150
1174
1151 if '_bare' in opts:
1175 if '_bare' in opts:
1152 _copts = []
1176 _copts = []
1153 del opts['_bare']
1177 del opts['_bare']
1154 else:
1178 else:
1155 _copts = ['-c', 'core.quotepath=false', ]
1179 _copts = ['-c', 'core.quotepath=false', ]
1156 safe_call = False
1180 safe_call = False
1157 if '_safe' in opts:
1181 if '_safe' in opts:
1158 # no exc on failure
1182 # no exc on failure
1159 del opts['_safe']
1183 del opts['_safe']
1160 safe_call = True
1184 safe_call = True
1161
1185
1162 if '_copts' in opts:
1186 if '_copts' in opts:
1163 _copts.extend(opts['_copts'] or [])
1187 _copts.extend(opts['_copts'] or [])
1164 del opts['_copts']
1188 del opts['_copts']
1165
1189
1166 gitenv = os.environ.copy()
1190 gitenv = os.environ.copy()
1167 gitenv.update(opts.pop('extra_env', {}))
1191 gitenv.update(opts.pop('extra_env', {}))
1168 # need to clean fix GIT_DIR !
1192 # need to clean fix GIT_DIR !
1169 if 'GIT_DIR' in gitenv:
1193 if 'GIT_DIR' in gitenv:
1170 del gitenv['GIT_DIR']
1194 del gitenv['GIT_DIR']
1171 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1195 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1172 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1196 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1173
1197
1174 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1198 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1175 _opts = {'env': gitenv, 'shell': False}
1199 _opts = {'env': gitenv, 'shell': False}
1176
1200
1177 proc = None
1201 proc = None
1178 try:
1202 try:
1179 _opts.update(opts)
1203 _opts.update(opts)
1180 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1204 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1181
1205
1182 return ''.join(proc), ''.join(proc.error)
1206 return ''.join(proc), ''.join(proc.error)
1183 except (EnvironmentError, OSError) as err:
1207 except (EnvironmentError, OSError) as err:
1184 cmd = ' '.join(cmd) # human friendly CMD
1208 cmd = ' '.join(cmd) # human friendly CMD
1185 tb_err = ("Couldn't run git command (%s).\n"
1209 tb_err = ("Couldn't run git command (%s).\n"
1186 "Original error was:%s\n"
1210 "Original error was:%s\n"
1187 "Call options:%s\n"
1211 "Call options:%s\n"
1188 % (cmd, err, _opts))
1212 % (cmd, err, _opts))
1189 log.exception(tb_err)
1213 log.exception(tb_err)
1190 if safe_call:
1214 if safe_call:
1191 return '', err
1215 return '', err
1192 else:
1216 else:
1193 raise exceptions.VcsException()(tb_err)
1217 raise exceptions.VcsException()(tb_err)
1194 finally:
1218 finally:
1195 if proc:
1219 if proc:
1196 proc.close()
1220 proc.close()
1197
1221
1198 @reraise_safe_exceptions
1222 @reraise_safe_exceptions
1199 def install_hooks(self, wire, force=False):
1223 def install_hooks(self, wire, force=False):
1200 from vcsserver.hook_utils import install_git_hooks
1224 from vcsserver.hook_utils import install_git_hooks
1201 bare = self.bare(wire)
1225 bare = self.bare(wire)
1202 path = wire['path']
1226 path = wire['path']
1203 return install_git_hooks(path, bare, force_create=force)
1227 return install_git_hooks(path, bare, force_create=force)
1204
1228
1205 @reraise_safe_exceptions
1229 @reraise_safe_exceptions
1206 def set_head_ref(self, wire, head_name):
1207 log.debug('Setting refs/head to `%s`', head_name)
1208 cmd = ['symbolic-ref', 'HEAD', 'refs/heads/%s' % head_name]
1209 output, __ = self.run_git_command(wire, cmd)
1210 return [head_name] + output.splitlines()
1211
1212 @reraise_safe_exceptions
1213 def get_hooks_info(self, wire):
1230 def get_hooks_info(self, wire):
1214 from vcsserver.hook_utils import (
1231 from vcsserver.hook_utils import (
1215 get_git_pre_hook_version, get_git_post_hook_version)
1232 get_git_pre_hook_version, get_git_post_hook_version)
1216 bare = self.bare(wire)
1233 bare = self.bare(wire)
1217 path = wire['path']
1234 path = wire['path']
1218 return {
1235 return {
1219 'pre_version': get_git_pre_hook_version(path, bare),
1236 'pre_version': get_git_pre_hook_version(path, bare),
1220 'post_version': get_git_post_hook_version(path, bare),
1237 'post_version': get_git_post_hook_version(path, bare),
1221 }
1238 }
1222
1239
1223 @reraise_safe_exceptions
1240 @reraise_safe_exceptions
1241 def set_head_ref(self, wire, head_name):
1242 log.debug('Setting refs/head to `%s`', head_name)
1243 cmd = ['symbolic-ref', 'HEAD', 'refs/heads/%s' % head_name]
1244 output, __ = self.run_git_command(wire, cmd)
1245 return [head_name] + output.splitlines()
1246
1247 @reraise_safe_exceptions
1224 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1248 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1225 archive_dir_name, commit_id):
1249 archive_dir_name, commit_id):
1226
1250
1227 def file_walker(_commit_id, path):
1251 def file_walker(_commit_id, path):
1228 repo_init = self._factory.repo_libgit2(wire)
1252 repo_init = self._factory.repo_libgit2(wire)
1229
1253
1230 with repo_init as repo:
1254 with repo_init as repo:
1231 commit = repo[commit_id]
1255 commit = repo[commit_id]
1232
1256
1233 if path in ['', '/']:
1257 if path in ['', '/']:
1234 tree = commit.tree
1258 tree = commit.tree
1235 else:
1259 else:
1236 tree = commit.tree[path.rstrip('/')]
1260 tree = commit.tree[path.rstrip('/')]
1237 tree_id = tree.id.hex
1261 tree_id = tree.id.hex
1238 try:
1262 try:
1239 tree = repo[tree_id]
1263 tree = repo[tree_id]
1240 except KeyError:
1264 except KeyError:
1241 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1265 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1242
1266
1243 index = LibGit2Index.Index()
1267 index = LibGit2Index.Index()
1244 index.read_tree(tree)
1268 index.read_tree(tree)
1245 file_iter = index
1269 file_iter = index
1246
1270
1247 for fn in file_iter:
1271 for fn in file_iter:
1248 file_path = fn.path
1272 file_path = fn.path
1249 mode = fn.mode
1273 mode = fn.mode
1250 is_link = stat.S_ISLNK(mode)
1274 is_link = stat.S_ISLNK(mode)
1251 if mode == pygit2.GIT_FILEMODE_COMMIT:
1275 if mode == pygit2.GIT_FILEMODE_COMMIT:
1252 log.debug('Skipping path %s as a commit node', file_path)
1276 log.debug('Skipping path %s as a commit node', file_path)
1253 continue
1277 continue
1254 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1278 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1255
1279
1256 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1280 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1257 archive_dir_name, commit_id)
1281 archive_dir_name, commit_id)
@@ -1,1043 +1,1047 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import os
21 import stat
21 import stat
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24 import traceback
24 import traceback
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
42 alwaysmatcher, patternmatcher, hgutil)
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
54
54
55 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
58
58
59 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
62
62
63 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
66
66
67 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
69 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
70
70
71 baseui = LoggingUI()
71 baseui = LoggingUI()
72
72
73 # clean the baseui object
73 # clean the baseui object
74 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
77
77
78 for section, option, value in repo_config:
78 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
80
80
81 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
83
83
84 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
85 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
87
87
88 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
91
91
92 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
95 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
99
99
100 return baseui
100 return baseui
101
101
102
102
103 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
104 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
105
105
106 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
107 try:
107 try:
108 return func(*args, **kwargs)
108 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
111 except RepoLookupError as e:
111 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
113 except RequirementError as e:
113 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
115 except RepoError as e:
115 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
117 except LookupError as e:
117 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
119 except Exception as e:
119 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
123
123
124 raise
124 raise
125 return wrapper
125 return wrapper
126
126
127
127
128 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
129 repo_type = 'hg'
129 repo_type = 'hg'
130
130
131 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
132 if not hooks:
132 if not hooks:
133 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 new_config = []
136 new_config = []
137 for section, option, value in config:
137 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
139 continue
139 continue
140 new_config.append((section, option, value))
140 new_config.append((section, option, value))
141 config = new_config
141 config = new_config
142
142
143 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
144 return baseui
144 return baseui
145
145
146 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
149
149
150 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
151 """
151 """
152 Get a repository instance for the given path.
152 Get a repository instance for the given path.
153 """
153 """
154 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
155
155
156
156
157 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
159 output = io.BytesIO()
159 output = io.BytesIO()
160
160
161 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
162 output.write(data)
162 output.write(data)
163
163
164 baseui.status = write
164 baseui.status = write
165 baseui.write = write
165 baseui.write = write
166 baseui.warn = write
166 baseui.warn = write
167 baseui.debug = write
167 baseui.debug = write
168
168
169 return baseui, output
169 return baseui, output
170
170
171
171
172 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
173
173
174 def __init__(self, factory):
174 def __init__(self, factory):
175 self._factory = factory
175 self._factory = factory
176 self._bulk_methods = {
176 self._bulk_methods = {
177 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
178 "author": self.ctx_user,
178 "author": self.ctx_user,
179 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
180 "children": self.ctx_children,
180 "children": self.ctx_children,
181 "date": self.ctx_date,
181 "date": self.ctx_date,
182 "message": self.ctx_description,
182 "message": self.ctx_description,
183 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
184 "status": self.ctx_status,
184 "status": self.ctx_status,
185 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
186 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
187 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
189 }
189 }
190
190
191 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def discover_hg_version(self):
195 def discover_hg_version(self):
196 from mercurial import util
196 from mercurial import util
197 return util.version()
197 return util.version()
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def is_empty(self, wire):
200 def is_empty(self, wire):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202
202
203 try:
203 try:
204 return len(repo) == 0
204 return len(repo) == 0
205 except Exception:
205 except Exception:
206 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
207 return False
207 return False
208
208
209 @reraise_safe_exceptions
209 @reraise_safe_exceptions
210 def bookmarks(self, wire):
210 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 region = self._region(wire)
212 region = self._region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
213 @region.conditional_cache_on_arguments(condition=cache_on)
214 def _bookmarks(_context_uid, _repo_id):
214 def _bookmarks(_context_uid, _repo_id):
215 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
216 return dict(repo._bookmarks)
216 return dict(repo._bookmarks)
217
217
218 return _bookmarks(context_uid, repo_id)
218 return _bookmarks(context_uid, repo_id)
219
219
220 @reraise_safe_exceptions
220 @reraise_safe_exceptions
221 def branches(self, wire, normal, closed):
221 def branches(self, wire, normal, closed):
222 cache_on, context_uid, repo_id = self._cache_on(wire)
222 cache_on, context_uid, repo_id = self._cache_on(wire)
223 region = self._region(wire)
223 region = self._region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
224 @region.conditional_cache_on_arguments(condition=cache_on)
225 def _branches(_context_uid, _repo_id, _normal, _closed):
225 def _branches(_context_uid, _repo_id, _normal, _closed):
226 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
227 iter_branches = repo.branchmap().iterbranches()
227 iter_branches = repo.branchmap().iterbranches()
228 bt = {}
228 bt = {}
229 for branch_name, _heads, tip, is_closed in iter_branches:
229 for branch_name, _heads, tip, is_closed in iter_branches:
230 if normal and not is_closed:
230 if normal and not is_closed:
231 bt[branch_name] = tip
231 bt[branch_name] = tip
232 if closed and is_closed:
232 if closed and is_closed:
233 bt[branch_name] = tip
233 bt[branch_name] = tip
234
234
235 return bt
235 return bt
236
236
237 return _branches(context_uid, repo_id, normal, closed)
237 return _branches(context_uid, repo_id, normal, closed)
238
238
239 @reraise_safe_exceptions
239 @reraise_safe_exceptions
240 def bulk_request(self, wire, commit_id, pre_load):
240 def bulk_request(self, wire, commit_id, pre_load):
241 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
242 region = self._region(wire)
242 region = self._region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
243 @region.conditional_cache_on_arguments(condition=cache_on)
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
245 result = {}
245 result = {}
246 for attr in pre_load:
246 for attr in pre_load:
247 try:
247 try:
248 method = self._bulk_methods[attr]
248 method = self._bulk_methods[attr]
249 result[attr] = method(wire, commit_id)
249 result[attr] = method(wire, commit_id)
250 except KeyError as e:
250 except KeyError as e:
251 raise exceptions.VcsException(e)(
251 raise exceptions.VcsException(e)(
252 'Unknown bulk attribute: "%s"' % attr)
252 'Unknown bulk attribute: "%s"' % attr)
253 return result
253 return result
254
254
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
256
256
257 @reraise_safe_exceptions
257 @reraise_safe_exceptions
258 def ctx_branch(self, wire, commit_id):
258 def ctx_branch(self, wire, commit_id):
259 cache_on, context_uid, repo_id = self._cache_on(wire)
259 cache_on, context_uid, repo_id = self._cache_on(wire)
260 region = self._region(wire)
260 region = self._region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
261 @region.conditional_cache_on_arguments(condition=cache_on)
262 def _ctx_branch(_repo_id, _commit_id):
262 def _ctx_branch(_repo_id, _commit_id):
263 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
264 ctx = self._get_ctx(repo, commit_id)
264 ctx = self._get_ctx(repo, commit_id)
265 return ctx.branch()
265 return ctx.branch()
266 return _ctx_branch(repo_id, commit_id)
266 return _ctx_branch(repo_id, commit_id)
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_date(self, wire, commit_id):
269 def ctx_date(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 region = self._region(wire)
271 region = self._region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
272 @region.conditional_cache_on_arguments(condition=cache_on)
273 def _ctx_date(_repo_id, _commit_id):
273 def _ctx_date(_repo_id, _commit_id):
274 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
275 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
276 return ctx.date()
276 return ctx.date()
277 return _ctx_date(repo_id, commit_id)
277 return _ctx_date(repo_id, commit_id)
278
278
279 @reraise_safe_exceptions
279 @reraise_safe_exceptions
280 def ctx_description(self, wire, revision):
280 def ctx_description(self, wire, revision):
281 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
282 ctx = self._get_ctx(repo, revision)
282 ctx = self._get_ctx(repo, revision)
283 return ctx.description()
283 return ctx.description()
284
284
285 @reraise_safe_exceptions
285 @reraise_safe_exceptions
286 def ctx_files(self, wire, commit_id):
286 def ctx_files(self, wire, commit_id):
287 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
288 region = self._region(wire)
288 region = self._region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
289 @region.conditional_cache_on_arguments(condition=cache_on)
290 def _ctx_files(_repo_id, _commit_id):
290 def _ctx_files(_repo_id, _commit_id):
291 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
292 ctx = self._get_ctx(repo, commit_id)
292 ctx = self._get_ctx(repo, commit_id)
293 return ctx.files()
293 return ctx.files()
294
294
295 return _ctx_files(repo_id, commit_id)
295 return _ctx_files(repo_id, commit_id)
296
296
297 @reraise_safe_exceptions
297 @reraise_safe_exceptions
298 def ctx_list(self, path, revision):
298 def ctx_list(self, path, revision):
299 repo = self._factory.repo(path)
299 repo = self._factory.repo(path)
300 ctx = self._get_ctx(repo, revision)
300 ctx = self._get_ctx(repo, revision)
301 return list(ctx)
301 return list(ctx)
302
302
303 @reraise_safe_exceptions
303 @reraise_safe_exceptions
304 def ctx_parents(self, wire, commit_id):
304 def ctx_parents(self, wire, commit_id):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self._region(wire)
306 region = self._region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
307 @region.conditional_cache_on_arguments(condition=cache_on)
308 def _ctx_parents(_repo_id, _commit_id):
308 def _ctx_parents(_repo_id, _commit_id):
309 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
310 ctx = self._get_ctx(repo, commit_id)
310 ctx = self._get_ctx(repo, commit_id)
311 return [parent.hex() for parent in ctx.parents()
311 return [parent.hex() for parent in ctx.parents()
312 if not (parent.hidden() or parent.obsolete())]
312 if not (parent.hidden() or parent.obsolete())]
313
313
314 return _ctx_parents(repo_id, commit_id)
314 return _ctx_parents(repo_id, commit_id)
315
315
316 @reraise_safe_exceptions
316 @reraise_safe_exceptions
317 def ctx_children(self, wire, commit_id):
317 def ctx_children(self, wire, commit_id):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self._region(wire)
319 region = self._region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
320 @region.conditional_cache_on_arguments(condition=cache_on)
321 def _ctx_children(_repo_id, _commit_id):
321 def _ctx_children(_repo_id, _commit_id):
322 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
323 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
324 return [child.hex() for child in ctx.children()
324 return [child.hex() for child in ctx.children()
325 if not (child.hidden() or child.obsolete())]
325 if not (child.hidden() or child.obsolete())]
326
326
327 return _ctx_children(repo_id, commit_id)
327 return _ctx_children(repo_id, commit_id)
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def ctx_phase(self, wire, commit_id):
330 def ctx_phase(self, wire, commit_id):
331 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
332 region = self._region(wire)
332 region = self._region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
333 @region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
337 # public=0, draft=1, secret=3
337 # public=0, draft=1, secret=3
338 return ctx.phase()
338 return ctx.phase()
339 return _ctx_phase(context_uid, repo_id, commit_id)
339 return _ctx_phase(context_uid, repo_id, commit_id)
340
340
341 @reraise_safe_exceptions
341 @reraise_safe_exceptions
342 def ctx_obsolete(self, wire, commit_id):
342 def ctx_obsolete(self, wire, commit_id):
343 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
344 region = self._region(wire)
344 region = self._region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
345 @region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
348 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
349 return ctx.obsolete()
349 return ctx.obsolete()
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def ctx_hidden(self, wire, commit_id):
353 def ctx_hidden(self, wire, commit_id):
354 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 region = self._region(wire)
355 region = self._region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
356 @region.conditional_cache_on_arguments(condition=cache_on)
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
358 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
359 ctx = self._get_ctx(repo, commit_id)
359 ctx = self._get_ctx(repo, commit_id)
360 return ctx.hidden()
360 return ctx.hidden()
361 return _ctx_hidden(context_uid, repo_id, commit_id)
361 return _ctx_hidden(context_uid, repo_id, commit_id)
362
362
363 @reraise_safe_exceptions
363 @reraise_safe_exceptions
364 def ctx_substate(self, wire, revision):
364 def ctx_substate(self, wire, revision):
365 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
366 ctx = self._get_ctx(repo, revision)
366 ctx = self._get_ctx(repo, revision)
367 return ctx.substate
367 return ctx.substate
368
368
369 @reraise_safe_exceptions
369 @reraise_safe_exceptions
370 def ctx_status(self, wire, revision):
370 def ctx_status(self, wire, revision):
371 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
372 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
373 status = repo[ctx.p1().node()].status(other=ctx.node())
373 status = repo[ctx.p1().node()].status(other=ctx.node())
374 # object of status (odd, custom named tuple in mercurial) is not
374 # object of status (odd, custom named tuple in mercurial) is not
375 # correctly serializable, we make it a list, as the underling
375 # correctly serializable, we make it a list, as the underling
376 # API expects this to be a list
376 # API expects this to be a list
377 return list(status)
377 return list(status)
378
378
379 @reraise_safe_exceptions
379 @reraise_safe_exceptions
380 def ctx_user(self, wire, revision):
380 def ctx_user(self, wire, revision):
381 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
382 ctx = self._get_ctx(repo, revision)
382 ctx = self._get_ctx(repo, revision)
383 return ctx.user()
383 return ctx.user()
384
384
385 @reraise_safe_exceptions
385 @reraise_safe_exceptions
386 def check_url(self, url, config):
386 def check_url(self, url, config):
387 _proto = None
387 _proto = None
388 if '+' in url[:url.find('://')]:
388 if '+' in url[:url.find('://')]:
389 _proto = url[0:url.find('+')]
389 _proto = url[0:url.find('+')]
390 url = url[url.find('+') + 1:]
390 url = url[url.find('+') + 1:]
391 handlers = []
391 handlers = []
392 url_obj = url_parser(url)
392 url_obj = url_parser(url)
393 test_uri, authinfo = url_obj.authinfo()
393 test_uri, authinfo = url_obj.authinfo()
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
395 url_obj.query = obfuscate_qs(url_obj.query)
395 url_obj.query = obfuscate_qs(url_obj.query)
396
396
397 cleaned_uri = str(url_obj)
397 cleaned_uri = str(url_obj)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
399
399
400 if authinfo:
400 if authinfo:
401 # create a password manager
401 # create a password manager
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
403 passmgr.add_password(*authinfo)
403 passmgr.add_password(*authinfo)
404
404
405 handlers.extend((httpbasicauthhandler(passmgr),
405 handlers.extend((httpbasicauthhandler(passmgr),
406 httpdigestauthhandler(passmgr)))
406 httpdigestauthhandler(passmgr)))
407
407
408 o = urllib2.build_opener(*handlers)
408 o = urllib2.build_opener(*handlers)
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
410 ('Accept', 'application/mercurial-0.1')]
410 ('Accept', 'application/mercurial-0.1')]
411
411
412 q = {"cmd": 'between'}
412 q = {"cmd": 'between'}
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
414 qs = '?%s' % urllib.urlencode(q)
414 qs = '?%s' % urllib.urlencode(q)
415 cu = "%s%s" % (test_uri, qs)
415 cu = "%s%s" % (test_uri, qs)
416 req = urllib2.Request(cu, None, {})
416 req = urllib2.Request(cu, None, {})
417
417
418 try:
418 try:
419 log.debug("Trying to open URL %s", cleaned_uri)
419 log.debug("Trying to open URL %s", cleaned_uri)
420 resp = o.open(req)
420 resp = o.open(req)
421 if resp.code != 200:
421 if resp.code != 200:
422 raise exceptions.URLError()('Return Code is not 200')
422 raise exceptions.URLError()('Return Code is not 200')
423 except Exception as e:
423 except Exception as e:
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
425 # means it cannot be cloned
425 # means it cannot be cloned
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
427
427
428 # now check if it's a proper hg repo, but don't do it for svn
428 # now check if it's a proper hg repo, but don't do it for svn
429 try:
429 try:
430 if _proto == 'svn':
430 if _proto == 'svn':
431 pass
431 pass
432 else:
432 else:
433 # check for pure hg repos
433 # check for pure hg repos
434 log.debug(
434 log.debug(
435 "Verifying if URL is a Mercurial repository: %s",
435 "Verifying if URL is a Mercurial repository: %s",
436 cleaned_uri)
436 cleaned_uri)
437 ui = make_ui_from_config(config)
437 ui = make_ui_from_config(config)
438 peer_checker = makepeer(ui, url)
438 peer_checker = makepeer(ui, url)
439 peer_checker.lookup('tip')
439 peer_checker.lookup('tip')
440 except Exception as e:
440 except Exception as e:
441 log.warning("URL is not a valid Mercurial repository: %s",
441 log.warning("URL is not a valid Mercurial repository: %s",
442 cleaned_uri)
442 cleaned_uri)
443 raise exceptions.URLError(e)(
443 raise exceptions.URLError(e)(
444 "url [%s] does not look like an hg repo org_exc: %s"
444 "url [%s] does not look like an hg repo org_exc: %s"
445 % (cleaned_uri, e))
445 % (cleaned_uri, e))
446
446
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
448 return True
448 return True
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
452 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
453
453
454 if file_filter:
454 if file_filter:
455 match_filter = match(file_filter[0], '', [file_filter[1]])
455 match_filter = match(file_filter[0], '', [file_filter[1]])
456 else:
456 else:
457 match_filter = file_filter
457 match_filter = file_filter
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
459
459
460 try:
460 try:
461 return "".join(patch.diff(
461 return "".join(patch.diff(
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
463 except RepoLookupError as e:
463 except RepoLookupError as e:
464 raise exceptions.LookupException(e)()
464 raise exceptions.LookupException(e)()
465
465
466 @reraise_safe_exceptions
466 @reraise_safe_exceptions
467 def node_history(self, wire, revision, path, limit):
467 def node_history(self, wire, revision, path, limit):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
469 region = self._region(wire)
469 region = self._region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
470 @region.conditional_cache_on_arguments(condition=cache_on)
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
472 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
473
473
474 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
475 fctx = ctx.filectx(path)
475 fctx = ctx.filectx(path)
476
476
477 def history_iter():
477 def history_iter():
478 limit_rev = fctx.rev()
478 limit_rev = fctx.rev()
479 for obj in reversed(list(fctx.filelog())):
479 for obj in reversed(list(fctx.filelog())):
480 obj = fctx.filectx(obj)
480 obj = fctx.filectx(obj)
481 ctx = obj.changectx()
481 ctx = obj.changectx()
482 if ctx.hidden() or ctx.obsolete():
482 if ctx.hidden() or ctx.obsolete():
483 continue
483 continue
484
484
485 if limit_rev >= obj.rev():
485 if limit_rev >= obj.rev():
486 yield obj
486 yield obj
487
487
488 history = []
488 history = []
489 for cnt, obj in enumerate(history_iter()):
489 for cnt, obj in enumerate(history_iter()):
490 if limit and cnt >= limit:
490 if limit and cnt >= limit:
491 break
491 break
492 history.append(hex(obj.node()))
492 history.append(hex(obj.node()))
493
493
494 return [x for x in history]
494 return [x for x in history]
495 return _node_history(context_uid, repo_id, revision, path, limit)
495 return _node_history(context_uid, repo_id, revision, path, limit)
496
496
497 @reraise_safe_exceptions
497 @reraise_safe_exceptions
498 def node_history_untill(self, wire, revision, path, limit):
498 def node_history_untill(self, wire, revision, path, limit):
499 cache_on, context_uid, repo_id = self._cache_on(wire)
499 cache_on, context_uid, repo_id = self._cache_on(wire)
500 region = self._region(wire)
500 region = self._region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
501 @region.conditional_cache_on_arguments(condition=cache_on)
502 def _node_history_until(_context_uid, _repo_id):
502 def _node_history_until(_context_uid, _repo_id):
503 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
504 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
505 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
506
506
507 file_log = list(fctx.filelog())
507 file_log = list(fctx.filelog())
508 if limit:
508 if limit:
509 # Limit to the last n items
509 # Limit to the last n items
510 file_log = file_log[-limit:]
510 file_log = file_log[-limit:]
511
511
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
517 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
518 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
519 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
520
520
521 result = []
521 result = []
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 ln_no = i
523 ln_no = i
524 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
525 content = annotate_obj.text
525 content = annotate_obj.text
526 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
527 return result
527 return result
528
528
529 @reraise_safe_exceptions
529 @reraise_safe_exceptions
530 def fctx_node_data(self, wire, revision, path):
530 def fctx_node_data(self, wire, revision, path):
531 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
532 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
533 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
534 return fctx.data()
534 return fctx.data()
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def fctx_flags(self, wire, commit_id, path):
537 def fctx_flags(self, wire, commit_id, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
539 region = self._region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
540 @region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 region = self._region(wire)
552 region = self._region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
553 @region.conditional_cache_on_arguments(condition=cache_on)
554 def _fctx_size(_repo_id, _revision, _path):
554 def _fctx_size(_repo_id, _revision, _path):
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556 ctx = self._get_ctx(repo, commit_id)
556 ctx = self._get_ctx(repo, commit_id)
557 fctx = ctx.filectx(path)
557 fctx = ctx.filectx(path)
558 return fctx.size()
558 return fctx.size()
559 return _fctx_size(repo_id, commit_id, path)
559 return _fctx_size(repo_id, commit_id, path)
560
560
561 @reraise_safe_exceptions
561 @reraise_safe_exceptions
562 def get_all_commit_ids(self, wire, name):
562 def get_all_commit_ids(self, wire, name):
563 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
564 region = self._region(wire)
564 region = self._region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
565 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
567 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
568 repo = repo.filtered(name)
568 repo = repo.filtered(name)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
570 return revs
570 return revs
571 return _get_all_commit_ids(context_uid, repo_id, name)
571 return _get_all_commit_ids(context_uid, repo_id, name)
572
572
573 @reraise_safe_exceptions
573 @reraise_safe_exceptions
574 def get_config_value(self, wire, section, name, untrusted=False):
574 def get_config_value(self, wire, section, name, untrusted=False):
575 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
576 return repo.ui.config(section, name, untrusted=untrusted)
576 return repo.ui.config(section, name, untrusted=untrusted)
577
577
578 @reraise_safe_exceptions
578 @reraise_safe_exceptions
579 def is_large_file(self, wire, commit_id, path):
579 def is_large_file(self, wire, commit_id, path):
580 cache_on, context_uid, repo_id = self._cache_on(wire)
580 cache_on, context_uid, repo_id = self._cache_on(wire)
581 region = self._region(wire)
581 region = self._region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
582 @region.conditional_cache_on_arguments(condition=cache_on)
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
584 return largefiles.lfutil.isstandin(path)
584 return largefiles.lfutil.isstandin(path)
585
585
586 return _is_large_file(context_uid, repo_id, commit_id, path)
586 return _is_large_file(context_uid, repo_id, commit_id, path)
587
587
588 @reraise_safe_exceptions
588 @reraise_safe_exceptions
589 def is_binary(self, wire, revision, path):
589 def is_binary(self, wire, revision, path):
590 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
591
591
592 region = self._region(wire)
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
593 @region.conditional_cache_on_arguments(condition=cache_on)
594 def _is_binary(_repo_id, _sha, _path):
594 def _is_binary(_repo_id, _sha, _path):
595 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
596 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
597 fctx = ctx.filectx(path)
597 fctx = ctx.filectx(path)
598 return fctx.isbinary()
598 return fctx.isbinary()
599
599
600 return _is_binary(repo_id, revision, path)
600 return _is_binary(repo_id, revision, path)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def in_largefiles_store(self, wire, sha):
603 def in_largefiles_store(self, wire, sha):
604 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
605 return largefiles.lfutil.instore(repo, sha)
605 return largefiles.lfutil.instore(repo, sha)
606
606
607 @reraise_safe_exceptions
607 @reraise_safe_exceptions
608 def in_user_cache(self, wire, sha):
608 def in_user_cache(self, wire, sha):
609 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
611
611
612 @reraise_safe_exceptions
612 @reraise_safe_exceptions
613 def store_path(self, wire, sha):
613 def store_path(self, wire, sha):
614 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
615 return largefiles.lfutil.storepath(repo, sha)
615 return largefiles.lfutil.storepath(repo, sha)
616
616
617 @reraise_safe_exceptions
617 @reraise_safe_exceptions
618 def link(self, wire, sha, path):
618 def link(self, wire, sha, path):
619 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
620 largefiles.lfutil.link(
620 largefiles.lfutil.link(
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
622
622
623 @reraise_safe_exceptions
623 @reraise_safe_exceptions
624 def localrepository(self, wire, create=False):
624 def localrepository(self, wire, create=False):
625 self._factory.repo(wire, create=create)
625 self._factory.repo(wire, create=create)
626
626
627 @reraise_safe_exceptions
627 @reraise_safe_exceptions
628 def lookup(self, wire, revision, both):
628 def lookup(self, wire, revision, both):
629 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
630
630
631 region = self._region(wire)
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
632 @region.conditional_cache_on_arguments(condition=cache_on)
633 def _lookup(_context_uid, _repo_id, _revision, _both):
633 def _lookup(_context_uid, _repo_id, _revision, _both):
634
634
635 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
636 rev = _revision
636 rev = _revision
637 if isinstance(rev, int):
637 if isinstance(rev, int):
638 # NOTE(marcink):
638 # NOTE(marcink):
639 # since Mercurial doesn't support negative indexes properly
639 # since Mercurial doesn't support negative indexes properly
640 # we need to shift accordingly by one to get proper index, e.g
640 # we need to shift accordingly by one to get proper index, e.g
641 # repo[-1] => repo[-2]
641 # repo[-1] => repo[-2]
642 # repo[0] => repo[-1]
642 # repo[0] => repo[-1]
643 if rev <= 0:
643 if rev <= 0:
644 rev = rev + -1
644 rev = rev + -1
645 try:
645 try:
646 ctx = self._get_ctx(repo, rev)
646 ctx = self._get_ctx(repo, rev)
647 except (TypeError, RepoLookupError) as e:
647 except (TypeError, RepoLookupError) as e:
648 e._org_exc_tb = traceback.format_exc()
648 e._org_exc_tb = traceback.format_exc()
649 raise exceptions.LookupException(e)(rev)
649 raise exceptions.LookupException(e)(rev)
650 except LookupError as e:
650 except LookupError as e:
651 e._org_exc_tb = traceback.format_exc()
651 e._org_exc_tb = traceback.format_exc()
652 raise exceptions.LookupException(e)(e.name)
652 raise exceptions.LookupException(e)(e.name)
653
653
654 if not both:
654 if not both:
655 return ctx.hex()
655 return ctx.hex()
656
656
657 ctx = repo[ctx.hex()]
657 ctx = repo[ctx.hex()]
658 return ctx.hex(), ctx.rev()
658 return ctx.hex(), ctx.rev()
659
659
660 return _lookup(context_uid, repo_id, revision, both)
660 return _lookup(context_uid, repo_id, revision, both)
661
661
662 @reraise_safe_exceptions
662 @reraise_safe_exceptions
663 def sync_push(self, wire, url):
663 def sync_push(self, wire, url):
664 if not self.check_url(url, wire['config']):
664 if not self.check_url(url, wire['config']):
665 return
665 return
666
666
667 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
668
668
669 # Disable any prompts for this repo
669 # Disable any prompts for this repo
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
671
671
672 bookmarks = dict(repo._bookmarks).keys()
672 bookmarks = dict(repo._bookmarks).keys()
673 remote = peer(repo, {}, url)
673 remote = peer(repo, {}, url)
674 # Disable any prompts for this remote
674 # Disable any prompts for this remote
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
676
676
677 return exchange.push(
677 return exchange.push(
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def revision(self, wire, rev):
681 def revision(self, wire, rev):
682 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
683 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
684 return ctx.rev()
684 return ctx.rev()
685
685
686 @reraise_safe_exceptions
686 @reraise_safe_exceptions
687 def rev_range(self, wire, commit_filter):
687 def rev_range(self, wire, commit_filter):
688 cache_on, context_uid, repo_id = self._cache_on(wire)
688 cache_on, context_uid, repo_id = self._cache_on(wire)
689
689
690 region = self._region(wire)
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
691 @region.conditional_cache_on_arguments(condition=cache_on)
692 def _rev_range(_context_uid, _repo_id, _filter):
692 def _rev_range(_context_uid, _repo_id, _filter):
693 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
694 revisions = [rev for rev in revrange(repo, commit_filter)]
694 revisions = [rev for rev in revrange(repo, commit_filter)]
695 return revisions
695 return revisions
696
696
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
698
698
699 @reraise_safe_exceptions
699 @reraise_safe_exceptions
700 def rev_range_hash(self, wire, node):
700 def rev_range_hash(self, wire, node):
701 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
702
702
703 def get_revs(repo, rev_opt):
703 def get_revs(repo, rev_opt):
704 if rev_opt:
704 if rev_opt:
705 revs = revrange(repo, rev_opt)
705 revs = revrange(repo, rev_opt)
706 if len(revs) == 0:
706 if len(revs) == 0:
707 return (nullrev, nullrev)
707 return (nullrev, nullrev)
708 return max(revs), min(revs)
708 return max(revs), min(revs)
709 else:
709 else:
710 return len(repo) - 1, 0
710 return len(repo) - 1, 0
711
711
712 stop, start = get_revs(repo, [node + ':'])
712 stop, start = get_revs(repo, [node + ':'])
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
714 return revs
714 return revs
715
715
716 @reraise_safe_exceptions
716 @reraise_safe_exceptions
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
718 other_path = kwargs.pop('other_path', None)
718 other_path = kwargs.pop('other_path', None)
719
719
720 # case when we want to compare two independent repositories
720 # case when we want to compare two independent repositories
721 if other_path and other_path != wire["path"]:
721 if other_path and other_path != wire["path"]:
722 baseui = self._factory._create_config(wire["config"])
722 baseui = self._factory._create_config(wire["config"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
724 else:
724 else:
725 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
726 return list(repo.revs(rev_spec, *args))
726 return list(repo.revs(rev_spec, *args))
727
727
728 @reraise_safe_exceptions
728 @reraise_safe_exceptions
729 def verify(self, wire,):
729 def verify(self, wire,):
730 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
731 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
732
732
733 baseui, output = patch_ui_message_output(baseui)
733 baseui, output = patch_ui_message_output(baseui)
734
734
735 repo.ui = baseui
735 repo.ui = baseui
736 verify.verify(repo)
736 verify.verify(repo)
737 return output.getvalue()
737 return output.getvalue()
738
738
739 @reraise_safe_exceptions
739 @reraise_safe_exceptions
740 def hg_update_cache(self, wire,):
740 def hg_update_cache(self, wire,):
741 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
742 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
743 baseui, output = patch_ui_message_output(baseui)
743 baseui, output = patch_ui_message_output(baseui)
744
744
745 repo.ui = baseui
745 repo.ui = baseui
746 with repo.wlock(), repo.lock():
746 with repo.wlock(), repo.lock():
747 repo.updatecaches(full=True)
747 repo.updatecaches(full=True)
748
748
749 return output.getvalue()
749 return output.getvalue()
750
750
751 @reraise_safe_exceptions
751 @reraise_safe_exceptions
752 def hg_rebuild_fn_cache(self, wire,):
752 def hg_rebuild_fn_cache(self, wire,):
753 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
754 baseui = self._factory._create_config(wire['config'])
754 baseui = self._factory._create_config(wire['config'])
755 baseui, output = patch_ui_message_output(baseui)
755 baseui, output = patch_ui_message_output(baseui)
756
756
757 repo.ui = baseui
757 repo.ui = baseui
758
758
759 repair.rebuildfncache(baseui, repo)
759 repair.rebuildfncache(baseui, repo)
760
760
761 return output.getvalue()
761 return output.getvalue()
762
762
763 @reraise_safe_exceptions
763 @reraise_safe_exceptions
764 def tags(self, wire):
764 def tags(self, wire):
765 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
766 region = self._region(wire)
766 region = self._region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
767 @region.conditional_cache_on_arguments(condition=cache_on)
768 def _tags(_context_uid, _repo_id):
768 def _tags(_context_uid, _repo_id):
769 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
770 return repo.tags()
770 return repo.tags()
771
771
772 return _tags(context_uid, repo_id)
772 return _tags(context_uid, repo_id)
773
773
774 @reraise_safe_exceptions
774 @reraise_safe_exceptions
775 def update(self, wire, node=None, clean=False):
775 def update(self, wire, node=None, clean=False):
776 repo = self._factory.repo(wire)
776 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
777 baseui = self._factory._create_config(wire['config'])
778 commands.update(baseui, repo, node=node, clean=clean)
778 commands.update(baseui, repo, node=node, clean=clean)
779
779
780 @reraise_safe_exceptions
780 @reraise_safe_exceptions
781 def identify(self, wire):
781 def identify(self, wire):
782 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
783 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
784 output = io.BytesIO()
784 output = io.BytesIO()
785 baseui.write = output.write
785 baseui.write = output.write
786 # This is required to get a full node id
786 # This is required to get a full node id
787 baseui.debugflag = True
787 baseui.debugflag = True
788 commands.identify(baseui, repo, id=True)
788 commands.identify(baseui, repo, id=True)
789
789
790 return output.getvalue()
790 return output.getvalue()
791
791
792 @reraise_safe_exceptions
792 @reraise_safe_exceptions
793 def heads(self, wire, branch=None):
793 def heads(self, wire, branch=None):
794 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
795 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
796 output = io.BytesIO()
796 output = io.BytesIO()
797
797
798 def write(data, **unused_kwargs):
798 def write(data, **unused_kwargs):
799 output.write(data)
799 output.write(data)
800
800
801 baseui.write = write
801 baseui.write = write
802 if branch:
802 if branch:
803 args = [branch]
803 args = [branch]
804 else:
804 else:
805 args = []
805 args = []
806 commands.heads(baseui, repo, template='{node} ', *args)
806 commands.heads(baseui, repo, template='{node} ', *args)
807
807
808 return output.getvalue()
808 return output.getvalue()
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def ancestor(self, wire, revision1, revision2):
811 def ancestor(self, wire, revision1, revision2):
812 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
813 changelog = repo.changelog
813 changelog = repo.changelog
814 lookup = repo.lookup
814 lookup = repo.lookup
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
816 return hex(a)
816 return hex(a)
817
817
818 @reraise_safe_exceptions
818 @reraise_safe_exceptions
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
822
822
823 @reraise_safe_exceptions
823 @reraise_safe_exceptions
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
825
825
826 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
827 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
828 publishing = baseui.configbool('phases', 'publish')
828 publishing = baseui.configbool('phases', 'publish')
829 if publishing:
829 if publishing:
830 new_commit = 'public'
830 new_commit = 'public'
831 else:
831 else:
832 new_commit = 'draft'
832 new_commit = 'draft'
833
833
834 def _filectxfn(_repo, ctx, path):
834 def _filectxfn(_repo, ctx, path):
835 """
835 """
836 Marks given path as added/changed/removed in a given _repo. This is
836 Marks given path as added/changed/removed in a given _repo. This is
837 for internal mercurial commit function.
837 for internal mercurial commit function.
838 """
838 """
839
839
840 # check if this path is removed
840 # check if this path is removed
841 if path in removed:
841 if path in removed:
842 # returning None is a way to mark node for removal
842 # returning None is a way to mark node for removal
843 return None
843 return None
844
844
845 # check if this path is added
845 # check if this path is added
846 for node in updated:
846 for node in updated:
847 if node['path'] == path:
847 if node['path'] == path:
848 return memfilectx(
848 return memfilectx(
849 _repo,
849 _repo,
850 changectx=ctx,
850 changectx=ctx,
851 path=node['path'],
851 path=node['path'],
852 data=node['content'],
852 data=node['content'],
853 islink=False,
853 islink=False,
854 isexec=bool(node['mode'] & stat.S_IXUSR),
854 isexec=bool(node['mode'] & stat.S_IXUSR),
855 copysource=False)
855 copysource=False)
856
856
857 raise exceptions.AbortException()(
857 raise exceptions.AbortException()(
858 "Given path haven't been marked as added, "
858 "Given path haven't been marked as added, "
859 "changed or removed (%s)" % path)
859 "changed or removed (%s)" % path)
860
860
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
862
862
863 commit_ctx = memctx(
863 commit_ctx = memctx(
864 repo=repo,
864 repo=repo,
865 parents=parents,
865 parents=parents,
866 text=message,
866 text=message,
867 files=files,
867 files=files,
868 filectxfn=_filectxfn,
868 filectxfn=_filectxfn,
869 user=user,
869 user=user,
870 date=(commit_time, commit_timezone),
870 date=(commit_time, commit_timezone),
871 extra=extra)
871 extra=extra)
872
872
873 n = repo.commitctx(commit_ctx)
873 n = repo.commitctx(commit_ctx)
874 new_id = hex(n)
874 new_id = hex(n)
875
875
876 return new_id
876 return new_id
877
877
878 @reraise_safe_exceptions
878 @reraise_safe_exceptions
879 def pull(self, wire, url, commit_ids=None):
879 def pull(self, wire, url, commit_ids=None):
880 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
881 # Disable any prompts for this repo
881 # Disable any prompts for this repo
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
883
883
884 remote = peer(repo, {}, url)
884 remote = peer(repo, {}, url)
885 # Disable any prompts for this remote
885 # Disable any prompts for this remote
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
887
887
888 if commit_ids:
888 if commit_ids:
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
890
890
891 return exchange.pull(
891 return exchange.pull(
892 repo, remote, heads=commit_ids, force=None).cgresult
892 repo, remote, heads=commit_ids, force=None).cgresult
893
893
894 @reraise_safe_exceptions
894 @reraise_safe_exceptions
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
896 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
898
898
899 # Mercurial internally has a lot of logic that checks ONLY if
899 # Mercurial internally has a lot of logic that checks ONLY if
900 # option is defined, we just pass those if they are defined then
900 # option is defined, we just pass those if they are defined then
901 opts = {}
901 opts = {}
902 if bookmark:
902 if bookmark:
903 opts['bookmark'] = bookmark
903 opts['bookmark'] = bookmark
904 if branch:
904 if branch:
905 opts['branch'] = branch
905 opts['branch'] = branch
906 if revision:
906 if revision:
907 opts['rev'] = revision
907 opts['rev'] = revision
908
908
909 commands.pull(baseui, repo, source, **opts)
909 commands.pull(baseui, repo, source, **opts)
910
910
911 @reraise_safe_exceptions
911 @reraise_safe_exceptions
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
913 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
916 new_branch=push_branches)
916 new_branch=push_branches)
917
917
918 @reraise_safe_exceptions
918 @reraise_safe_exceptions
919 def strip(self, wire, revision, update, backup):
919 def strip(self, wire, revision, update, backup):
920 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
921 ctx = self._get_ctx(repo, revision)
921 ctx = self._get_ctx(repo, revision)
922 hgext_strip(
922 hgext_strip(
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
924
924
925 @reraise_safe_exceptions
925 @reraise_safe_exceptions
926 def get_unresolved_files(self, wire):
926 def get_unresolved_files(self, wire):
927 repo = self._factory.repo(wire)
927 repo = self._factory.repo(wire)
928
928
929 log.debug('Calculating unresolved files for repo: %s', repo)
929 log.debug('Calculating unresolved files for repo: %s', repo)
930 output = io.BytesIO()
930 output = io.BytesIO()
931
931
932 def write(data, **unused_kwargs):
932 def write(data, **unused_kwargs):
933 output.write(data)
933 output.write(data)
934
934
935 baseui = self._factory._create_config(wire['config'])
935 baseui = self._factory._create_config(wire['config'])
936 baseui.write = write
936 baseui.write = write
937
937
938 commands.resolve(baseui, repo, list=True)
938 commands.resolve(baseui, repo, list=True)
939 unresolved = output.getvalue().splitlines(0)
939 unresolved = output.getvalue().splitlines(0)
940 return unresolved
940 return unresolved
941
941
942 @reraise_safe_exceptions
942 @reraise_safe_exceptions
943 def merge(self, wire, revision):
943 def merge(self, wire, revision):
944 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
945 baseui = self._factory._create_config(wire['config'])
945 baseui = self._factory._create_config(wire['config'])
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
947
947
948 # In case of sub repositories are used mercurial prompts the user in
948 # In case of sub repositories are used mercurial prompts the user in
949 # case of merge conflicts or different sub repository sources. By
949 # case of merge conflicts or different sub repository sources. By
950 # setting the interactive flag to `False` mercurial doesn't prompt the
950 # setting the interactive flag to `False` mercurial doesn't prompt the
951 # used but instead uses a default value.
951 # used but instead uses a default value.
952 repo.ui.setconfig('ui', 'interactive', False)
952 repo.ui.setconfig('ui', 'interactive', False)
953 commands.merge(baseui, repo, rev=revision)
953 commands.merge(baseui, repo, rev=revision)
954
954
955 @reraise_safe_exceptions
955 @reraise_safe_exceptions
956 def merge_state(self, wire):
956 def merge_state(self, wire):
957 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
959
959
960 # In case of sub repositories are used mercurial prompts the user in
960 # In case of sub repositories are used mercurial prompts the user in
961 # case of merge conflicts or different sub repository sources. By
961 # case of merge conflicts or different sub repository sources. By
962 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # setting the interactive flag to `False` mercurial doesn't prompt the
963 # used but instead uses a default value.
963 # used but instead uses a default value.
964 repo.ui.setconfig('ui', 'interactive', False)
964 repo.ui.setconfig('ui', 'interactive', False)
965 ms = hg_merge.mergestate(repo)
965 ms = hg_merge.mergestate(repo)
966 return [x for x in ms.unresolved()]
966 return [x for x in ms.unresolved()]
967
967
968 @reraise_safe_exceptions
968 @reraise_safe_exceptions
969 def commit(self, wire, message, username, close_branch=False):
969 def commit(self, wire, message, username, close_branch=False):
970 repo = self._factory.repo(wire)
970 repo = self._factory.repo(wire)
971 baseui = self._factory._create_config(wire['config'])
971 baseui = self._factory._create_config(wire['config'])
972 repo.ui.setconfig('ui', 'username', username)
972 repo.ui.setconfig('ui', 'username', username)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
974
974
975 @reraise_safe_exceptions
975 @reraise_safe_exceptions
976 def rebase(self, wire, source=None, dest=None, abort=False):
976 def rebase(self, wire, source=None, dest=None, abort=False):
977 repo = self._factory.repo(wire)
977 repo = self._factory.repo(wire)
978 baseui = self._factory._create_config(wire['config'])
978 baseui = self._factory._create_config(wire['config'])
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
980 # In case of sub repositories are used mercurial prompts the user in
980 # In case of sub repositories are used mercurial prompts the user in
981 # case of merge conflicts or different sub repository sources. By
981 # case of merge conflicts or different sub repository sources. By
982 # setting the interactive flag to `False` mercurial doesn't prompt the
982 # setting the interactive flag to `False` mercurial doesn't prompt the
983 # used but instead uses a default value.
983 # used but instead uses a default value.
984 repo.ui.setconfig('ui', 'interactive', False)
984 repo.ui.setconfig('ui', 'interactive', False)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
986
986
987 @reraise_safe_exceptions
987 @reraise_safe_exceptions
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
989 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
990 ctx = self._get_ctx(repo, revision)
990 ctx = self._get_ctx(repo, revision)
991 node = ctx.node()
991 node = ctx.node()
992
992
993 date = (tag_time, tag_timezone)
993 date = (tag_time, tag_timezone)
994 try:
994 try:
995 hg_tag.tag(repo, name, node, message, local, user, date)
995 hg_tag.tag(repo, name, node, message, local, user, date)
996 except Abort as e:
996 except Abort as e:
997 log.exception("Tag operation aborted")
997 log.exception("Tag operation aborted")
998 # Exception can contain unicode which we convert
998 # Exception can contain unicode which we convert
999 raise exceptions.AbortException(e)(repr(e))
999 raise exceptions.AbortException(e)(repr(e))
1000
1000
1001 @reraise_safe_exceptions
1001 @reraise_safe_exceptions
1002 def bookmark(self, wire, bookmark, revision=None):
1002 def bookmark(self, wire, bookmark, revision=None):
1003 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
1004 baseui = self._factory._create_config(wire['config'])
1004 baseui = self._factory._create_config(wire['config'])
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1006
1006
1007 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
1008 def install_hooks(self, wire, force=False):
1008 def install_hooks(self, wire, force=False):
1009 # we don't need any special hooks for Mercurial
1009 # we don't need any special hooks for Mercurial
1010 pass
1010 pass
1011
1011
1012 @reraise_safe_exceptions
1012 @reraise_safe_exceptions
1013 def get_hooks_info(self, wire):
1013 def get_hooks_info(self, wire):
1014 return {
1014 return {
1015 'pre_version': vcsserver.__version__,
1015 'pre_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
1017 }
1017 }
1018
1018
1019 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
1020 def set_head_ref(self, wire, head_name):
1021 pass
1022
1023 @reraise_safe_exceptions
1020 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1024 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1021 archive_dir_name, commit_id):
1025 archive_dir_name, commit_id):
1022
1026
1023 def file_walker(_commit_id, path):
1027 def file_walker(_commit_id, path):
1024 repo = self._factory.repo(wire)
1028 repo = self._factory.repo(wire)
1025 ctx = repo[_commit_id]
1029 ctx = repo[_commit_id]
1026 is_root = path in ['', '/']
1030 is_root = path in ['', '/']
1027 if is_root:
1031 if is_root:
1028 matcher = alwaysmatcher(badfn=None)
1032 matcher = alwaysmatcher(badfn=None)
1029 else:
1033 else:
1030 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1034 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1031 file_iter = ctx.manifest().walk(matcher)
1035 file_iter = ctx.manifest().walk(matcher)
1032
1036
1033 for fn in file_iter:
1037 for fn in file_iter:
1034 file_path = fn
1038 file_path = fn
1035 flags = ctx.flags(fn)
1039 flags = ctx.flags(fn)
1036 mode = b'x' in flags and 0o755 or 0o644
1040 mode = b'x' in flags and 0o755 or 0o644
1037 is_link = b'l' in flags
1041 is_link = b'l' in flags
1038
1042
1039 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1043 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1040
1044
1041 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1045 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1042 archive_dir_name, commit_id)
1046 archive_dir_name, commit_id)
1043
1047
@@ -1,862 +1,866 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 import time
22 import time
23 from urllib2 import URLError
23 from urllib2 import URLError
24 import urlparse
24 import urlparse
25 import logging
25 import logging
26 import posixpath as vcspath
26 import posixpath as vcspath
27 import StringIO
27 import StringIO
28 import urllib
28 import urllib
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.12'
55 current_compatible_version = '1.12'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110 # TODO: Remove once we do not use internal Mercurial objects anymore
110 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # for subversion
111 # for subversion
112 self._hg_factory = hg_factory
112 self._hg_factory = hg_factory
113
113
114 @reraise_safe_exceptions
114 @reraise_safe_exceptions
115 def discover_svn_version(self):
115 def discover_svn_version(self):
116 try:
116 try:
117 import svn.core
117 import svn.core
118 svn_ver = svn.core.SVN_VERSION
118 svn_ver = svn.core.SVN_VERSION
119 except ImportError:
119 except ImportError:
120 svn_ver = None
120 svn_ver = None
121 return svn_ver
121 return svn_ver
122
122
123 @reraise_safe_exceptions
123 @reraise_safe_exceptions
124 def is_empty(self, wire):
124 def is_empty(self, wire):
125
125
126 try:
126 try:
127 return self.lookup(wire, -1) == 0
127 return self.lookup(wire, -1) == 0
128 except Exception:
128 except Exception:
129 log.exception("failed to read object_store")
129 log.exception("failed to read object_store")
130 return False
130 return False
131
131
132 def check_url(self, url, config_items):
132 def check_url(self, url, config_items):
133 # this can throw exception if not installed, but we detect this
133 # this can throw exception if not installed, but we detect this
134 from hgsubversion import svnrepo
134 from hgsubversion import svnrepo
135
135
136 baseui = self._hg_factory._create_config(config_items)
136 baseui = self._hg_factory._create_config(config_items)
137 # uuid function get's only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
138 # throws exception
138 # throws exception
139 try:
139 try:
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
141 except Exception:
141 except Exception:
142 tb = traceback.format_exc()
142 tb = traceback.format_exc()
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 raise URLError(
144 raise URLError(
145 '"%s" is not a valid Subversion source url.' % (url, ))
145 '"%s" is not a valid Subversion source url.' % (url, ))
146 return True
146 return True
147
147
148 def is_path_valid_repository(self, wire, path):
148 def is_path_valid_repository(self, wire, path):
149
149
150 # NOTE(marcink): short circuit the check for SVN repo
150 # NOTE(marcink): short circuit the check for SVN repo
151 # the repos.open might be expensive to check, but we have one cheap
151 # the repos.open might be expensive to check, but we have one cheap
152 # pre condition that we can use, to check for 'format' file
152 # pre condition that we can use, to check for 'format' file
153
153
154 if not os.path.isfile(os.path.join(path, 'format')):
154 if not os.path.isfile(os.path.join(path, 'format')):
155 return False
155 return False
156
156
157 try:
157 try:
158 svn.repos.open(path)
158 svn.repos.open(path)
159 except svn.core.SubversionException:
159 except svn.core.SubversionException:
160 tb = traceback.format_exc()
160 tb = traceback.format_exc()
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
162 return False
162 return False
163 return True
163 return True
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def verify(self, wire,):
166 def verify(self, wire,):
167 repo_path = wire['path']
167 repo_path = wire['path']
168 if not self.is_path_valid_repository(wire, repo_path):
168 if not self.is_path_valid_repository(wire, repo_path):
169 raise Exception(
169 raise Exception(
170 "Path %s is not a valid Subversion repository." % repo_path)
170 "Path %s is not a valid Subversion repository." % repo_path)
171
171
172 cmd = ['svnadmin', 'info', repo_path]
172 cmd = ['svnadmin', 'info', repo_path]
173 stdout, stderr = subprocessio.run_command(cmd)
173 stdout, stderr = subprocessio.run_command(cmd)
174 return stdout
174 return stdout
175
175
176 def lookup(self, wire, revision):
176 def lookup(self, wire, revision):
177 if revision not in [-1, None, 'HEAD']:
177 if revision not in [-1, None, 'HEAD']:
178 raise NotImplementedError
178 raise NotImplementedError
179 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
180 fs_ptr = svn.repos.fs(repo)
180 fs_ptr = svn.repos.fs(repo)
181 head = svn.fs.youngest_rev(fs_ptr)
181 head = svn.fs.youngest_rev(fs_ptr)
182 return head
182 return head
183
183
184 def lookup_interval(self, wire, start_ts, end_ts):
184 def lookup_interval(self, wire, start_ts, end_ts):
185 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
186 fsobj = svn.repos.fs(repo)
186 fsobj = svn.repos.fs(repo)
187 start_rev = None
187 start_rev = None
188 end_rev = None
188 end_rev = None
189 if start_ts:
189 if start_ts:
190 start_ts_svn = apr_time_t(start_ts)
190 start_ts_svn = apr_time_t(start_ts)
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
192 else:
192 else:
193 start_rev = 1
193 start_rev = 1
194 if end_ts:
194 if end_ts:
195 end_ts_svn = apr_time_t(end_ts)
195 end_ts_svn = apr_time_t(end_ts)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
197 else:
197 else:
198 end_rev = svn.fs.youngest_rev(fsobj)
198 end_rev = svn.fs.youngest_rev(fsobj)
199 return start_rev, end_rev
199 return start_rev, end_rev
200
200
201 def revision_properties(self, wire, revision):
201 def revision_properties(self, wire, revision):
202
202
203 cache_on, context_uid, repo_id = self._cache_on(wire)
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 region = self._region(wire)
204 region = self._region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
205 @region.conditional_cache_on_arguments(condition=cache_on)
206 def _revision_properties(_repo_id, _revision):
206 def _revision_properties(_repo_id, _revision):
207 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
208 fs_ptr = svn.repos.fs(repo)
208 fs_ptr = svn.repos.fs(repo)
209 return svn.fs.revision_proplist(fs_ptr, revision)
209 return svn.fs.revision_proplist(fs_ptr, revision)
210 return _revision_properties(repo_id, revision)
210 return _revision_properties(repo_id, revision)
211
211
212 def revision_changes(self, wire, revision):
212 def revision_changes(self, wire, revision):
213
213
214 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
215 fsobj = svn.repos.fs(repo)
215 fsobj = svn.repos.fs(repo)
216 rev_root = svn.fs.revision_root(fsobj, revision)
216 rev_root = svn.fs.revision_root(fsobj, revision)
217
217
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
220 base_dir = ""
220 base_dir = ""
221 send_deltas = False
221 send_deltas = False
222 svn.repos.replay2(
222 svn.repos.replay2(
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
224 editor_ptr, editor_baton, None)
224 editor_ptr, editor_baton, None)
225
225
226 added = []
226 added = []
227 changed = []
227 changed = []
228 removed = []
228 removed = []
229
229
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
231 for path, change in editor.changes.iteritems():
231 for path, change in editor.changes.iteritems():
232 # TODO: Decide what to do with directory nodes. Subversion can add
232 # TODO: Decide what to do with directory nodes. Subversion can add
233 # empty directories.
233 # empty directories.
234
234
235 if change.item_kind == svn.core.svn_node_dir:
235 if change.item_kind == svn.core.svn_node_dir:
236 continue
236 continue
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
238 added.append(path)
238 added.append(path)
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
240 svn.repos.CHANGE_ACTION_REPLACE]:
240 svn.repos.CHANGE_ACTION_REPLACE]:
241 changed.append(path)
241 changed.append(path)
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
243 removed.append(path)
243 removed.append(path)
244 else:
244 else:
245 raise NotImplementedError(
245 raise NotImplementedError(
246 "Action %s not supported on path %s" % (
246 "Action %s not supported on path %s" % (
247 change.action, path))
247 change.action, path))
248
248
249 changes = {
249 changes = {
250 'added': added,
250 'added': added,
251 'changed': changed,
251 'changed': changed,
252 'removed': removed,
252 'removed': removed,
253 }
253 }
254 return changes
254 return changes
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def node_history(self, wire, path, revision, limit):
257 def node_history(self, wire, path, revision, limit):
258 cache_on, context_uid, repo_id = self._cache_on(wire)
258 cache_on, context_uid, repo_id = self._cache_on(wire)
259 region = self._region(wire)
259 region = self._region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
260 @region.conditional_cache_on_arguments(condition=cache_on)
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
262 cross_copies = False
262 cross_copies = False
263 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
264 fsobj = svn.repos.fs(repo)
264 fsobj = svn.repos.fs(repo)
265 rev_root = svn.fs.revision_root(fsobj, revision)
265 rev_root = svn.fs.revision_root(fsobj, revision)
266
266
267 history_revisions = []
267 history_revisions = []
268 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.node_history(rev_root, path)
269 history = svn.fs.history_prev(history, cross_copies)
269 history = svn.fs.history_prev(history, cross_copies)
270 while history:
270 while history:
271 __, node_revision = svn.fs.history_location(history)
271 __, node_revision = svn.fs.history_location(history)
272 history_revisions.append(node_revision)
272 history_revisions.append(node_revision)
273 if limit and len(history_revisions) >= limit:
273 if limit and len(history_revisions) >= limit:
274 break
274 break
275 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
276 return history_revisions
276 return history_revisions
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
278
278
279 def node_properties(self, wire, path, revision):
279 def node_properties(self, wire, path, revision):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self._region(wire)
281 region = self._region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
282 @region.conditional_cache_on_arguments(condition=cache_on)
283 def _node_properties(_repo_id, _path, _revision):
283 def _node_properties(_repo_id, _path, _revision):
284 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
285 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
286 rev_root = svn.fs.revision_root(fsobj, revision)
286 rev_root = svn.fs.revision_root(fsobj, revision)
287 return svn.fs.node_proplist(rev_root, path)
287 return svn.fs.node_proplist(rev_root, path)
288 return _node_properties(repo_id, path, revision)
288 return _node_properties(repo_id, path, revision)
289
289
290 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
291 abs_path = 'file://' + urllib.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
292 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
294
294
295 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
296 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
297 end_rev = peg_rev
297 end_rev = peg_rev
298
298
299 annotations = []
299 annotations = []
300
300
301 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
302 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
303
303
304 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
305 try:
305 try:
306 svn.client.blame2(
306 svn.client.blame2(
307 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
308 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
309 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
310 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
311 raise Exception(
311 raise Exception(
312 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
313 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
314
314
315 return annotations
315 return annotations
316
316
317 def get_node_type(self, wire, path, revision=None):
317 def get_node_type(self, wire, path, revision=None):
318
318
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
320 region = self._region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
321 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _get_node_type(_repo_id, _path, _revision):
322 def _get_node_type(_repo_id, _path, _revision):
323 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
324 fs_ptr = svn.repos.fs(repo)
324 fs_ptr = svn.repos.fs(repo)
325 if _revision is None:
325 if _revision is None:
326 _revision = svn.fs.youngest_rev(fs_ptr)
326 _revision = svn.fs.youngest_rev(fs_ptr)
327 root = svn.fs.revision_root(fs_ptr, _revision)
327 root = svn.fs.revision_root(fs_ptr, _revision)
328 node = svn.fs.check_path(root, path)
328 node = svn.fs.check_path(root, path)
329 return NODE_TYPE_MAPPING.get(node, None)
329 return NODE_TYPE_MAPPING.get(node, None)
330 return _get_node_type(repo_id, path, revision)
330 return _get_node_type(repo_id, path, revision)
331
331
332 def get_nodes(self, wire, path, revision=None):
332 def get_nodes(self, wire, path, revision=None):
333
333
334 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
335 region = self._region(wire)
335 region = self._region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
336 @region.conditional_cache_on_arguments(condition=cache_on)
337 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
338 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
339 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
340 if _revision is None:
340 if _revision is None:
341 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
342 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
343 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
344 result = []
344 result = []
345 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
346 result.append(
346 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
348 return result
349 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
350
350
351 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
352 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
354 if rev is None:
354 if rev is None:
355 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
356 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 return content.read()
358 return content.read()
359
359
360 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
361
361
362 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
363 region = self._region(wire)
363 region = self._region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
364 @region.conditional_cache_on_arguments(condition=cache_on)
365 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
366 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
367 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
368 if _revision is None:
368 if _revision is None:
369 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
370 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
371 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
372 return size
372 return size
373 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
374
374
375 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
377 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
378 compatible_version=compatible_version)
378 compatible_version=compatible_version)
379
379
380 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
381 obj = urlparse.urlparse(src_url)
381 obj = urlparse.urlparse(src_url)
382 username = obj.username or None
382 username = obj.username or None
383 password = obj.password or None
383 password = obj.password or None
384 return username, password, src_url
384 return username, password, src_url
385
385
386 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
387 repo_path = wire['path']
387 repo_path = wire['path']
388 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
389 raise Exception(
389 raise Exception(
390 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
391
391
392 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
394 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
395 if username and password:
395 if username and password:
396 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
397 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
398
398
399 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
400 rdump_cmd,
400 rdump_cmd,
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
402 load = subprocess.Popen(
402 load = subprocess.Popen(
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
404
404
405 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
406 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
407 # import is done.
407 # import is done.
408 rdump.wait()
408 rdump.wait()
409 load.wait()
409 load.wait()
410
410
411 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
412 if rdump.returncode != 0:
412 if rdump.returncode != 0:
413 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
415 rdump.returncode, errors)
415 rdump.returncode, errors)
416 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
417 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
418 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
419
419
420 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
421 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
422 raise Exception(
422 raise Exception(
423 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
424 src_url, reason))
424 src_url, reason))
425 if load.returncode != 0:
425 if load.returncode != 0:
426 raise Exception(
426 raise Exception(
427 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
428 (src_url, ))
428 (src_url, ))
429
429
430 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
431 assert isinstance(message, str)
431 assert isinstance(message, str)
432 assert isinstance(author, str)
432 assert isinstance(author, str)
433
433
434 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
435 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
436
436
437 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
439 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
440
440
441 for node in updated:
441 for node in updated:
442 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
443 for node in removed:
443 for node in removed:
444 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
445
445
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
447
447
448 if timestamp:
448 if timestamp:
449 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
452
452
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
454 return commit_id
454 return commit_id
455
455
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
457 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
458
458
459 wire.update(cache=False)
459 wire.update(cache=False)
460 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
461 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
463 try:
463 try:
464 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
465 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
466 log.exception(
466 log.exception(
467 "Error during diff operation operation. "
467 "Error during diff operation operation. "
468 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
469 return ""
469 return ""
470
470
471 @reraise_safe_exceptions
471 @reraise_safe_exceptions
472 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
473 return False
473 return False
474
474
475 @reraise_safe_exceptions
475 @reraise_safe_exceptions
476 def is_binary(self, wire, rev, path):
476 def is_binary(self, wire, rev, path):
477 cache_on, context_uid, repo_id = self._cache_on(wire)
477 cache_on, context_uid, repo_id = self._cache_on(wire)
478
478
479 region = self._region(wire)
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
480 @region.conditional_cache_on_arguments(condition=cache_on)
481 def _is_binary(_repo_id, _rev, _path):
481 def _is_binary(_repo_id, _rev, _path):
482 raw_bytes = self.get_file_content(wire, path, rev)
482 raw_bytes = self.get_file_content(wire, path, rev)
483 return raw_bytes and '\0' in raw_bytes
483 return raw_bytes and '\0' in raw_bytes
484
484
485 return _is_binary(repo_id, rev, path)
485 return _is_binary(repo_id, rev, path)
486
486
487 @reraise_safe_exceptions
487 @reraise_safe_exceptions
488 def run_svn_command(self, wire, cmd, **opts):
488 def run_svn_command(self, wire, cmd, **opts):
489 path = wire.get('path', None)
489 path = wire.get('path', None)
490
490
491 if path and os.path.isdir(path):
491 if path and os.path.isdir(path):
492 opts['cwd'] = path
492 opts['cwd'] = path
493
493
494 safe_call = opts.pop('_safe', False)
494 safe_call = opts.pop('_safe', False)
495
495
496 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
497 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
498
498
499 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
500
500
501 try:
501 try:
502 _opts.update(opts)
502 _opts.update(opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
504
504
505 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
506 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
507 if safe_call:
507 if safe_call:
508 return '', safe_str(err).strip()
508 return '', safe_str(err).strip()
509 else:
509 else:
510 cmd = ' '.join(cmd) # human friendly CMD
510 cmd = ' '.join(cmd) # human friendly CMD
511 tb_err = ("Couldn't run svn command (%s).\n"
511 tb_err = ("Couldn't run svn command (%s).\n"
512 "Original error was:%s\n"
512 "Original error was:%s\n"
513 "Call options:%s\n"
513 "Call options:%s\n"
514 % (cmd, err, _opts))
514 % (cmd, err, _opts))
515 log.exception(tb_err)
515 log.exception(tb_err)
516 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
520 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
521 repo_path = wire['path']
521 repo_path = wire['path']
522 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
523 executable = None
523 executable = None
524 if binary_dir:
524 if binary_dir:
525 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
526 return install_svn_hooks(
526 return install_svn_hooks(
527 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
528
528
529 @reraise_safe_exceptions
529 @reraise_safe_exceptions
530 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
531 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
532 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
533 repo_path = wire['path']
533 repo_path = wire['path']
534 return {
534 return {
535 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
537 }
537 }
538
538
539 @reraise_safe_exceptions
539 @reraise_safe_exceptions
540 def set_head_ref(self, wire, head_name):
541 pass
542
543 @reraise_safe_exceptions
540 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
544 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
541 archive_dir_name, commit_id):
545 archive_dir_name, commit_id):
542
546
543 def walk_tree(root, root_dir, _commit_id):
547 def walk_tree(root, root_dir, _commit_id):
544 """
548 """
545 Special recursive svn repo walker
549 Special recursive svn repo walker
546 """
550 """
547
551
548 filemode_default = 0o100644
552 filemode_default = 0o100644
549 filemode_executable = 0o100755
553 filemode_executable = 0o100755
550
554
551 file_iter = svn.fs.dir_entries(root, root_dir)
555 file_iter = svn.fs.dir_entries(root, root_dir)
552 for f_name in file_iter:
556 for f_name in file_iter:
553 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
557 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
554
558
555 if f_type == 'dir':
559 if f_type == 'dir':
556 # return only DIR, and then all entries in that dir
560 # return only DIR, and then all entries in that dir
557 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
561 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
558 new_root = os.path.join(root_dir, f_name)
562 new_root = os.path.join(root_dir, f_name)
559 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
563 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
560 yield _f_name, _f_data, _f_type
564 yield _f_name, _f_data, _f_type
561 else:
565 else:
562 f_path = os.path.join(root_dir, f_name).rstrip('/')
566 f_path = os.path.join(root_dir, f_name).rstrip('/')
563 prop_list = svn.fs.node_proplist(root, f_path)
567 prop_list = svn.fs.node_proplist(root, f_path)
564
568
565 f_mode = filemode_default
569 f_mode = filemode_default
566 if prop_list.get('svn:executable'):
570 if prop_list.get('svn:executable'):
567 f_mode = filemode_executable
571 f_mode = filemode_executable
568
572
569 f_is_link = False
573 f_is_link = False
570 if prop_list.get('svn:special'):
574 if prop_list.get('svn:special'):
571 f_is_link = True
575 f_is_link = True
572
576
573 data = {
577 data = {
574 'is_link': f_is_link,
578 'is_link': f_is_link,
575 'mode': f_mode,
579 'mode': f_mode,
576 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
580 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
577 }
581 }
578
582
579 yield f_path, data, f_type
583 yield f_path, data, f_type
580
584
581 def file_walker(_commit_id, path):
585 def file_walker(_commit_id, path):
582 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
583 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
587 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
584
588
585 def no_content():
589 def no_content():
586 raise NoContentException()
590 raise NoContentException()
587
591
588 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
592 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
589 file_path = f_name
593 file_path = f_name
590
594
591 if f_type == 'dir':
595 if f_type == 'dir':
592 mode = f_data['mode']
596 mode = f_data['mode']
593 yield ArchiveNode(file_path, mode, False, no_content)
597 yield ArchiveNode(file_path, mode, False, no_content)
594 else:
598 else:
595 mode = f_data['mode']
599 mode = f_data['mode']
596 is_link = f_data['is_link']
600 is_link = f_data['is_link']
597 data_stream = f_data['content_stream']
601 data_stream = f_data['content_stream']
598 yield ArchiveNode(file_path, mode, is_link, data_stream)
602 yield ArchiveNode(file_path, mode, is_link, data_stream)
599
603
600 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
604 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
601 archive_dir_name, commit_id)
605 archive_dir_name, commit_id)
602
606
603
607
604 class SvnDiffer(object):
608 class SvnDiffer(object):
605 """
609 """
606 Utility to create diffs based on difflib and the Subversion api
610 Utility to create diffs based on difflib and the Subversion api
607 """
611 """
608
612
609 binary_content = False
613 binary_content = False
610
614
611 def __init__(
615 def __init__(
612 self, repo, src_rev, src_path, tgt_rev, tgt_path,
616 self, repo, src_rev, src_path, tgt_rev, tgt_path,
613 ignore_whitespace, context):
617 ignore_whitespace, context):
614 self.repo = repo
618 self.repo = repo
615 self.ignore_whitespace = ignore_whitespace
619 self.ignore_whitespace = ignore_whitespace
616 self.context = context
620 self.context = context
617
621
618 fsobj = svn.repos.fs(repo)
622 fsobj = svn.repos.fs(repo)
619
623
620 self.tgt_rev = tgt_rev
624 self.tgt_rev = tgt_rev
621 self.tgt_path = tgt_path or ''
625 self.tgt_path = tgt_path or ''
622 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
626 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
623 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
627 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
624
628
625 self.src_rev = src_rev
629 self.src_rev = src_rev
626 self.src_path = src_path or self.tgt_path
630 self.src_path = src_path or self.tgt_path
627 self.src_root = svn.fs.revision_root(fsobj, src_rev)
631 self.src_root = svn.fs.revision_root(fsobj, src_rev)
628 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
632 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
629
633
630 self._validate()
634 self._validate()
631
635
632 def _validate(self):
636 def _validate(self):
633 if (self.tgt_kind != svn.core.svn_node_none and
637 if (self.tgt_kind != svn.core.svn_node_none and
634 self.src_kind != svn.core.svn_node_none and
638 self.src_kind != svn.core.svn_node_none and
635 self.src_kind != self.tgt_kind):
639 self.src_kind != self.tgt_kind):
636 # TODO: johbo: proper error handling
640 # TODO: johbo: proper error handling
637 raise Exception(
641 raise Exception(
638 "Source and target are not compatible for diff generation. "
642 "Source and target are not compatible for diff generation. "
639 "Source type: %s, target type: %s" %
643 "Source type: %s, target type: %s" %
640 (self.src_kind, self.tgt_kind))
644 (self.src_kind, self.tgt_kind))
641
645
642 def generate_diff(self):
646 def generate_diff(self):
643 buf = StringIO.StringIO()
647 buf = StringIO.StringIO()
644 if self.tgt_kind == svn.core.svn_node_dir:
648 if self.tgt_kind == svn.core.svn_node_dir:
645 self._generate_dir_diff(buf)
649 self._generate_dir_diff(buf)
646 else:
650 else:
647 self._generate_file_diff(buf)
651 self._generate_file_diff(buf)
648 return buf.getvalue()
652 return buf.getvalue()
649
653
650 def _generate_dir_diff(self, buf):
654 def _generate_dir_diff(self, buf):
651 editor = DiffChangeEditor()
655 editor = DiffChangeEditor()
652 editor_ptr, editor_baton = svn.delta.make_editor(editor)
656 editor_ptr, editor_baton = svn.delta.make_editor(editor)
653 svn.repos.dir_delta2(
657 svn.repos.dir_delta2(
654 self.src_root,
658 self.src_root,
655 self.src_path,
659 self.src_path,
656 '', # src_entry
660 '', # src_entry
657 self.tgt_root,
661 self.tgt_root,
658 self.tgt_path,
662 self.tgt_path,
659 editor_ptr, editor_baton,
663 editor_ptr, editor_baton,
660 authorization_callback_allow_all,
664 authorization_callback_allow_all,
661 False, # text_deltas
665 False, # text_deltas
662 svn.core.svn_depth_infinity, # depth
666 svn.core.svn_depth_infinity, # depth
663 False, # entry_props
667 False, # entry_props
664 False, # ignore_ancestry
668 False, # ignore_ancestry
665 )
669 )
666
670
667 for path, __, change in sorted(editor.changes):
671 for path, __, change in sorted(editor.changes):
668 self._generate_node_diff(
672 self._generate_node_diff(
669 buf, change, path, self.tgt_path, path, self.src_path)
673 buf, change, path, self.tgt_path, path, self.src_path)
670
674
671 def _generate_file_diff(self, buf):
675 def _generate_file_diff(self, buf):
672 change = None
676 change = None
673 if self.src_kind == svn.core.svn_node_none:
677 if self.src_kind == svn.core.svn_node_none:
674 change = "add"
678 change = "add"
675 elif self.tgt_kind == svn.core.svn_node_none:
679 elif self.tgt_kind == svn.core.svn_node_none:
676 change = "delete"
680 change = "delete"
677 tgt_base, tgt_path = vcspath.split(self.tgt_path)
681 tgt_base, tgt_path = vcspath.split(self.tgt_path)
678 src_base, src_path = vcspath.split(self.src_path)
682 src_base, src_path = vcspath.split(self.src_path)
679 self._generate_node_diff(
683 self._generate_node_diff(
680 buf, change, tgt_path, tgt_base, src_path, src_base)
684 buf, change, tgt_path, tgt_base, src_path, src_base)
681
685
682 def _generate_node_diff(
686 def _generate_node_diff(
683 self, buf, change, tgt_path, tgt_base, src_path, src_base):
687 self, buf, change, tgt_path, tgt_base, src_path, src_base):
684
688
685 if self.src_rev == self.tgt_rev and tgt_base == src_base:
689 if self.src_rev == self.tgt_rev and tgt_base == src_base:
686 # makes consistent behaviour with git/hg to return empty diff if
690 # makes consistent behaviour with git/hg to return empty diff if
687 # we compare same revisions
691 # we compare same revisions
688 return
692 return
689
693
690 tgt_full_path = vcspath.join(tgt_base, tgt_path)
694 tgt_full_path = vcspath.join(tgt_base, tgt_path)
691 src_full_path = vcspath.join(src_base, src_path)
695 src_full_path = vcspath.join(src_base, src_path)
692
696
693 self.binary_content = False
697 self.binary_content = False
694 mime_type = self._get_mime_type(tgt_full_path)
698 mime_type = self._get_mime_type(tgt_full_path)
695
699
696 if mime_type and not mime_type.startswith('text'):
700 if mime_type and not mime_type.startswith('text'):
697 self.binary_content = True
701 self.binary_content = True
698 buf.write("=" * 67 + '\n')
702 buf.write("=" * 67 + '\n')
699 buf.write("Cannot display: file marked as a binary type.\n")
703 buf.write("Cannot display: file marked as a binary type.\n")
700 buf.write("svn:mime-type = %s\n" % mime_type)
704 buf.write("svn:mime-type = %s\n" % mime_type)
701 buf.write("Index: %s\n" % (tgt_path, ))
705 buf.write("Index: %s\n" % (tgt_path, ))
702 buf.write("=" * 67 + '\n')
706 buf.write("=" * 67 + '\n')
703 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
707 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
704 'tgt_path': tgt_path})
708 'tgt_path': tgt_path})
705
709
706 if change == 'add':
710 if change == 'add':
707 # TODO: johbo: SVN is missing a zero here compared to git
711 # TODO: johbo: SVN is missing a zero here compared to git
708 buf.write("new file mode 10644\n")
712 buf.write("new file mode 10644\n")
709
713
710 #TODO(marcink): intro to binary detection of svn patches
714 #TODO(marcink): intro to binary detection of svn patches
711 # if self.binary_content:
715 # if self.binary_content:
712 # buf.write('GIT binary patch\n')
716 # buf.write('GIT binary patch\n')
713
717
714 buf.write("--- /dev/null\t(revision 0)\n")
718 buf.write("--- /dev/null\t(revision 0)\n")
715 src_lines = []
719 src_lines = []
716 else:
720 else:
717 if change == 'delete':
721 if change == 'delete':
718 buf.write("deleted file mode 10644\n")
722 buf.write("deleted file mode 10644\n")
719
723
720 #TODO(marcink): intro to binary detection of svn patches
724 #TODO(marcink): intro to binary detection of svn patches
721 # if self.binary_content:
725 # if self.binary_content:
722 # buf.write('GIT binary patch\n')
726 # buf.write('GIT binary patch\n')
723
727
724 buf.write("--- a/%s\t(revision %s)\n" % (
728 buf.write("--- a/%s\t(revision %s)\n" % (
725 src_path, self.src_rev))
729 src_path, self.src_rev))
726 src_lines = self._svn_readlines(self.src_root, src_full_path)
730 src_lines = self._svn_readlines(self.src_root, src_full_path)
727
731
728 if change == 'delete':
732 if change == 'delete':
729 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
733 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
730 tgt_lines = []
734 tgt_lines = []
731 else:
735 else:
732 buf.write("+++ b/%s\t(revision %s)\n" % (
736 buf.write("+++ b/%s\t(revision %s)\n" % (
733 tgt_path, self.tgt_rev))
737 tgt_path, self.tgt_rev))
734 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
738 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
735
739
736 if not self.binary_content:
740 if not self.binary_content:
737 udiff = svn_diff.unified_diff(
741 udiff = svn_diff.unified_diff(
738 src_lines, tgt_lines, context=self.context,
742 src_lines, tgt_lines, context=self.context,
739 ignore_blank_lines=self.ignore_whitespace,
743 ignore_blank_lines=self.ignore_whitespace,
740 ignore_case=False,
744 ignore_case=False,
741 ignore_space_changes=self.ignore_whitespace)
745 ignore_space_changes=self.ignore_whitespace)
742 buf.writelines(udiff)
746 buf.writelines(udiff)
743
747
744 def _get_mime_type(self, path):
748 def _get_mime_type(self, path):
745 try:
749 try:
746 mime_type = svn.fs.node_prop(
750 mime_type = svn.fs.node_prop(
747 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
748 except svn.core.SubversionException:
752 except svn.core.SubversionException:
749 mime_type = svn.fs.node_prop(
753 mime_type = svn.fs.node_prop(
750 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
754 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 return mime_type
755 return mime_type
752
756
753 def _svn_readlines(self, fs_root, node_path):
757 def _svn_readlines(self, fs_root, node_path):
754 if self.binary_content:
758 if self.binary_content:
755 return []
759 return []
756 node_kind = svn.fs.check_path(fs_root, node_path)
760 node_kind = svn.fs.check_path(fs_root, node_path)
757 if node_kind not in (
761 if node_kind not in (
758 svn.core.svn_node_file, svn.core.svn_node_symlink):
762 svn.core.svn_node_file, svn.core.svn_node_symlink):
759 return []
763 return []
760 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
764 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
761 return content.splitlines(True)
765 return content.splitlines(True)
762
766
763
767
764 class DiffChangeEditor(svn.delta.Editor):
768 class DiffChangeEditor(svn.delta.Editor):
765 """
769 """
766 Records changes between two given revisions
770 Records changes between two given revisions
767 """
771 """
768
772
769 def __init__(self):
773 def __init__(self):
770 self.changes = []
774 self.changes = []
771
775
772 def delete_entry(self, path, revision, parent_baton, pool=None):
776 def delete_entry(self, path, revision, parent_baton, pool=None):
773 self.changes.append((path, None, 'delete'))
777 self.changes.append((path, None, 'delete'))
774
778
775 def add_file(
779 def add_file(
776 self, path, parent_baton, copyfrom_path, copyfrom_revision,
780 self, path, parent_baton, copyfrom_path, copyfrom_revision,
777 file_pool=None):
781 file_pool=None):
778 self.changes.append((path, 'file', 'add'))
782 self.changes.append((path, 'file', 'add'))
779
783
780 def open_file(self, path, parent_baton, base_revision, file_pool=None):
784 def open_file(self, path, parent_baton, base_revision, file_pool=None):
781 self.changes.append((path, 'file', 'change'))
785 self.changes.append((path, 'file', 'change'))
782
786
783
787
784 def authorization_callback_allow_all(root, path, pool):
788 def authorization_callback_allow_all(root, path, pool):
785 return True
789 return True
786
790
787
791
788 class TxnNodeProcessor(object):
792 class TxnNodeProcessor(object):
789 """
793 """
790 Utility to process the change of one node within a transaction root.
794 Utility to process the change of one node within a transaction root.
791
795
792 It encapsulates the knowledge of how to add, update or remove
796 It encapsulates the knowledge of how to add, update or remove
793 a node for a given transaction root. The purpose is to support the method
797 a node for a given transaction root. The purpose is to support the method
794 `SvnRemote.commit`.
798 `SvnRemote.commit`.
795 """
799 """
796
800
797 def __init__(self, node, txn_root):
801 def __init__(self, node, txn_root):
798 assert isinstance(node['path'], str)
802 assert isinstance(node['path'], str)
799
803
800 self.node = node
804 self.node = node
801 self.txn_root = txn_root
805 self.txn_root = txn_root
802
806
803 def update(self):
807 def update(self):
804 self._ensure_parent_dirs()
808 self._ensure_parent_dirs()
805 self._add_file_if_node_does_not_exist()
809 self._add_file_if_node_does_not_exist()
806 self._update_file_content()
810 self._update_file_content()
807 self._update_file_properties()
811 self._update_file_properties()
808
812
809 def remove(self):
813 def remove(self):
810 svn.fs.delete(self.txn_root, self.node['path'])
814 svn.fs.delete(self.txn_root, self.node['path'])
811 # TODO: Clean up directory if empty
815 # TODO: Clean up directory if empty
812
816
813 def _ensure_parent_dirs(self):
817 def _ensure_parent_dirs(self):
814 curdir = vcspath.dirname(self.node['path'])
818 curdir = vcspath.dirname(self.node['path'])
815 dirs_to_create = []
819 dirs_to_create = []
816 while not self._svn_path_exists(curdir):
820 while not self._svn_path_exists(curdir):
817 dirs_to_create.append(curdir)
821 dirs_to_create.append(curdir)
818 curdir = vcspath.dirname(curdir)
822 curdir = vcspath.dirname(curdir)
819
823
820 for curdir in reversed(dirs_to_create):
824 for curdir in reversed(dirs_to_create):
821 log.debug('Creating missing directory "%s"', curdir)
825 log.debug('Creating missing directory "%s"', curdir)
822 svn.fs.make_dir(self.txn_root, curdir)
826 svn.fs.make_dir(self.txn_root, curdir)
823
827
824 def _svn_path_exists(self, path):
828 def _svn_path_exists(self, path):
825 path_status = svn.fs.check_path(self.txn_root, path)
829 path_status = svn.fs.check_path(self.txn_root, path)
826 return path_status != svn.core.svn_node_none
830 return path_status != svn.core.svn_node_none
827
831
828 def _add_file_if_node_does_not_exist(self):
832 def _add_file_if_node_does_not_exist(self):
829 kind = svn.fs.check_path(self.txn_root, self.node['path'])
833 kind = svn.fs.check_path(self.txn_root, self.node['path'])
830 if kind == svn.core.svn_node_none:
834 if kind == svn.core.svn_node_none:
831 svn.fs.make_file(self.txn_root, self.node['path'])
835 svn.fs.make_file(self.txn_root, self.node['path'])
832
836
833 def _update_file_content(self):
837 def _update_file_content(self):
834 assert isinstance(self.node['content'], str)
838 assert isinstance(self.node['content'], str)
835 handler, baton = svn.fs.apply_textdelta(
839 handler, baton = svn.fs.apply_textdelta(
836 self.txn_root, self.node['path'], None, None)
840 self.txn_root, self.node['path'], None, None)
837 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
841 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
838
842
839 def _update_file_properties(self):
843 def _update_file_properties(self):
840 properties = self.node.get('properties', {})
844 properties = self.node.get('properties', {})
841 for key, value in properties.iteritems():
845 for key, value in properties.iteritems():
842 svn.fs.change_node_prop(
846 svn.fs.change_node_prop(
843 self.txn_root, self.node['path'], key, value)
847 self.txn_root, self.node['path'], key, value)
844
848
845
849
846 def apr_time_t(timestamp):
850 def apr_time_t(timestamp):
847 """
851 """
848 Convert a Python timestamp into APR timestamp type apr_time_t
852 Convert a Python timestamp into APR timestamp type apr_time_t
849 """
853 """
850 return timestamp * 1E6
854 return timestamp * 1E6
851
855
852
856
853 def svn_opt_revision_value_t(num):
857 def svn_opt_revision_value_t(num):
854 """
858 """
855 Put `num` into a `svn_opt_revision_value_t` structure.
859 Put `num` into a `svn_opt_revision_value_t` structure.
856 """
860 """
857 value = svn.core.svn_opt_revision_value_t()
861 value = svn.core.svn_opt_revision_value_t()
858 value.number = num
862 value.number = num
859 revision = svn.core.svn_opt_revision_t()
863 revision = svn.core.svn_opt_revision_t()
860 revision.kind = svn.core.svn_opt_revision_number
864 revision.kind = svn.core.svn_opt_revision_number
861 revision.value = value
865 revision.value = value
862 return revision
866 return revision
General Comments 0
You need to be logged in to leave comments. Login now