##// END OF EJS Templates
caches: small naming refactor to fix tests.
super-admin -
r964:1fc3584c default
parent child Browse files
Show More
@@ -1,1250 +1,1250 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = '^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def str_to_dulwich(value):
59 def str_to_dulwich(value):
60 """
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
62 """
63 return value.decode(settings.WIRE_ENCODING)
63 return value.decode(settings.WIRE_ENCODING)
64
64
65
65
66 def reraise_safe_exceptions(func):
66 def reraise_safe_exceptions(func):
67 """Converts Dulwich exceptions to something neutral."""
67 """Converts Dulwich exceptions to something neutral."""
68
68
69 @wraps(func)
69 @wraps(func)
70 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
71 try:
71 try:
72 return func(*args, **kwargs)
72 return func(*args, **kwargs)
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
74 exc = exceptions.LookupException(org_exc=e)
74 exc = exceptions.LookupException(org_exc=e)
75 raise exc(safe_str(e))
75 raise exc(safe_str(e))
76 except (HangupException, UnexpectedCommandError) as e:
76 except (HangupException, UnexpectedCommandError) as e:
77 exc = exceptions.VcsException(org_exc=e)
77 exc = exceptions.VcsException(org_exc=e)
78 raise exc(safe_str(e))
78 raise exc(safe_str(e))
79 except Exception as e:
79 except Exception as e:
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
81 # (KeyError on empty repos), we cannot track this and catch all
81 # (KeyError on empty repos), we cannot track this and catch all
82 # exceptions, it's an exceptions from other handlers
82 # exceptions, it's an exceptions from other handlers
83 #if not hasattr(e, '_vcs_kind'):
83 #if not hasattr(e, '_vcs_kind'):
84 #log.exception("Unhandled exception in git remote call")
84 #log.exception("Unhandled exception in git remote call")
85 #raise_from_original(exceptions.UnhandledException)
85 #raise_from_original(exceptions.UnhandledException)
86 raise
86 raise
87 return wrapper
87 return wrapper
88
88
89
89
90 class Repo(DulwichRepo):
90 class Repo(DulwichRepo):
91 """
91 """
92 A wrapper for dulwich Repo class.
92 A wrapper for dulwich Repo class.
93
93
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
95 "Too many open files" error. We need to close all opened file descriptors
95 "Too many open files" error. We need to close all opened file descriptors
96 once the repo object is destroyed.
96 once the repo object is destroyed.
97 """
97 """
98 def __del__(self):
98 def __del__(self):
99 if hasattr(self, 'object_store'):
99 if hasattr(self, 'object_store'):
100 self.close()
100 self.close()
101
101
102
102
103 class Repository(LibGit2Repo):
103 class Repository(LibGit2Repo):
104
104
105 def __enter__(self):
105 def __enter__(self):
106 return self
106 return self
107
107
108 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
109 self.free()
109 self.free()
110
110
111
111
112 class GitFactory(RepoFactory):
112 class GitFactory(RepoFactory):
113 repo_type = 'git'
113 repo_type = 'git'
114
114
115 def _create_repo(self, wire, create, use_libgit2=False):
115 def _create_repo(self, wire, create, use_libgit2=False):
116 if use_libgit2:
116 if use_libgit2:
117 return Repository(wire['path'])
117 return Repository(wire['path'])
118 else:
118 else:
119 repo_path = str_to_dulwich(wire['path'])
119 repo_path = str_to_dulwich(wire['path'])
120 return Repo(repo_path)
120 return Repo(repo_path)
121
121
122 def repo(self, wire, create=False, use_libgit2=False):
122 def repo(self, wire, create=False, use_libgit2=False):
123 """
123 """
124 Get a repository instance for the given path.
124 Get a repository instance for the given path.
125 """
125 """
126 return self._create_repo(wire, create, use_libgit2)
126 return self._create_repo(wire, create, use_libgit2)
127
127
128 def repo_libgit2(self, wire):
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
129 return self.repo(wire, use_libgit2=True)
130
130
131
131
132 class GitRemote(RemoteBase):
132 class GitRemote(RemoteBase):
133
133
134 def __init__(self, factory):
134 def __init__(self, factory):
135 self._factory = factory
135 self._factory = factory
136 self._bulk_methods = {
136 self._bulk_methods = {
137 "date": self.date,
137 "date": self.date,
138 "author": self.author,
138 "author": self.author,
139 "branch": self.branch,
139 "branch": self.branch,
140 "message": self.message,
140 "message": self.message,
141 "parents": self.parents,
141 "parents": self.parents,
142 "_commit": self.revision,
142 "_commit": self.revision,
143 }
143 }
144
144
145 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
146 if 'config' in wire:
146 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
148 return {}
149
149
150 def _remote_conf(self, config):
150 def _remote_conf(self, config):
151 params = [
151 params = [
152 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
153 ]
153 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
155 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
157 return params
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def discover_git_version(self):
160 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
163 prefix = 'git version'
164 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
166 return stdout.strip()
166 return stdout.strip()
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def is_empty(self, wire):
169 def is_empty(self, wire):
170 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
171 with repo_init as repo:
171 with repo_init as repo:
172
172
173 try:
173 try:
174 has_head = repo.head.name
174 has_head = repo.head.name
175 if has_head:
175 if has_head:
176 return False
176 return False
177
177
178 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
179 return repo.is_empty
179 return repo.is_empty
180 except Exception:
180 except Exception:
181 pass
181 pass
182
182
183 return True
183 return True
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
188 region = self.region(wire)
188 region = self._region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
189 @region.conditional_cache_on_arguments(condition=cache_on)
190 def _assert_correct_path(_context_uid, _repo_id):
190 def _assert_correct_path(_context_uid, _repo_id):
191 try:
191 try:
192 repo_init = self._factory.repo_libgit2(wire)
192 repo_init = self._factory.repo_libgit2(wire)
193 with repo_init as repo:
193 with repo_init as repo:
194 pass
194 pass
195 except pygit2.GitError:
195 except pygit2.GitError:
196 path = wire.get('path')
196 path = wire.get('path')
197 tb = traceback.format_exc()
197 tb = traceback.format_exc()
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
199 return False
199 return False
200
200
201 return True
201 return True
202 return _assert_correct_path(context_uid, repo_id)
202 return _assert_correct_path(context_uid, repo_id)
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def bare(self, wire):
205 def bare(self, wire):
206 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
207 with repo_init as repo:
208 return repo.is_bare
208 return repo.is_bare
209
209
210 @reraise_safe_exceptions
210 @reraise_safe_exceptions
211 def blob_as_pretty_string(self, wire, sha):
211 def blob_as_pretty_string(self, wire, sha):
212 repo_init = self._factory.repo_libgit2(wire)
212 repo_init = self._factory.repo_libgit2(wire)
213 with repo_init as repo:
213 with repo_init as repo:
214 blob_obj = repo[sha]
214 blob_obj = repo[sha]
215 blob = blob_obj.data
215 blob = blob_obj.data
216 return blob
216 return blob
217
217
218 @reraise_safe_exceptions
218 @reraise_safe_exceptions
219 def blob_raw_length(self, wire, sha):
219 def blob_raw_length(self, wire, sha):
220 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
221 region = self.region(wire)
221 region = self._region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
222 @region.conditional_cache_on_arguments(condition=cache_on)
223 def _blob_raw_length(_repo_id, _sha):
223 def _blob_raw_length(_repo_id, _sha):
224
224
225 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
226 with repo_init as repo:
226 with repo_init as repo:
227 blob = repo[sha]
227 blob = repo[sha]
228 return blob.size
228 return blob.size
229
229
230 return _blob_raw_length(repo_id, sha)
230 return _blob_raw_length(repo_id, sha)
231
231
232 def _parse_lfs_pointer(self, raw_content):
232 def _parse_lfs_pointer(self, raw_content):
233
233
234 spec_string = 'version https://git-lfs.github.com/spec'
234 spec_string = 'version https://git-lfs.github.com/spec'
235 if raw_content and raw_content.startswith(spec_string):
235 if raw_content and raw_content.startswith(spec_string):
236 pattern = re.compile(r"""
236 pattern = re.compile(r"""
237 (?:\n)?
237 (?:\n)?
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
241 (?:\n)?
241 (?:\n)?
242 """, re.VERBOSE | re.MULTILINE)
242 """, re.VERBOSE | re.MULTILINE)
243 match = pattern.match(raw_content)
243 match = pattern.match(raw_content)
244 if match:
244 if match:
245 return match.groupdict()
245 return match.groupdict()
246
246
247 return {}
247 return {}
248
248
249 @reraise_safe_exceptions
249 @reraise_safe_exceptions
250 def is_large_file(self, wire, commit_id):
250 def is_large_file(self, wire, commit_id):
251 cache_on, context_uid, repo_id = self._cache_on(wire)
251 cache_on, context_uid, repo_id = self._cache_on(wire)
252
252
253 region = self.region(wire)
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
254 @region.conditional_cache_on_arguments(condition=cache_on)
255 def _is_large_file(_repo_id, _sha):
255 def _is_large_file(_repo_id, _sha):
256 repo_init = self._factory.repo_libgit2(wire)
256 repo_init = self._factory.repo_libgit2(wire)
257 with repo_init as repo:
257 with repo_init as repo:
258 blob = repo[commit_id]
258 blob = repo[commit_id]
259 if blob.is_binary:
259 if blob.is_binary:
260 return {}
260 return {}
261
261
262 return self._parse_lfs_pointer(blob.data)
262 return self._parse_lfs_pointer(blob.data)
263
263
264 return _is_large_file(repo_id, commit_id)
264 return _is_large_file(repo_id, commit_id)
265
265
266 @reraise_safe_exceptions
266 @reraise_safe_exceptions
267 def is_binary(self, wire, tree_id):
267 def is_binary(self, wire, tree_id):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
269
269
270 region = self.region(wire)
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
271 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _is_binary(_repo_id, _tree_id):
272 def _is_binary(_repo_id, _tree_id):
273 repo_init = self._factory.repo_libgit2(wire)
273 repo_init = self._factory.repo_libgit2(wire)
274 with repo_init as repo:
274 with repo_init as repo:
275 blob_obj = repo[tree_id]
275 blob_obj = repo[tree_id]
276 return blob_obj.is_binary
276 return blob_obj.is_binary
277
277
278 return _is_binary(repo_id, tree_id)
278 return _is_binary(repo_id, tree_id)
279
279
280 @reraise_safe_exceptions
280 @reraise_safe_exceptions
281 def in_largefiles_store(self, wire, oid):
281 def in_largefiles_store(self, wire, oid):
282 conf = self._wire_to_config(wire)
282 conf = self._wire_to_config(wire)
283 repo_init = self._factory.repo_libgit2(wire)
283 repo_init = self._factory.repo_libgit2(wire)
284 with repo_init as repo:
284 with repo_init as repo:
285 repo_name = repo.path
285 repo_name = repo.path
286
286
287 store_location = conf.get('vcs_git_lfs_store_location')
287 store_location = conf.get('vcs_git_lfs_store_location')
288 if store_location:
288 if store_location:
289
289
290 store = LFSOidStore(
290 store = LFSOidStore(
291 oid=oid, repo=repo_name, store_location=store_location)
291 oid=oid, repo=repo_name, store_location=store_location)
292 return store.has_oid()
292 return store.has_oid()
293
293
294 return False
294 return False
295
295
296 @reraise_safe_exceptions
296 @reraise_safe_exceptions
297 def store_path(self, wire, oid):
297 def store_path(self, wire, oid):
298 conf = self._wire_to_config(wire)
298 conf = self._wire_to_config(wire)
299 repo_init = self._factory.repo_libgit2(wire)
299 repo_init = self._factory.repo_libgit2(wire)
300 with repo_init as repo:
300 with repo_init as repo:
301 repo_name = repo.path
301 repo_name = repo.path
302
302
303 store_location = conf.get('vcs_git_lfs_store_location')
303 store_location = conf.get('vcs_git_lfs_store_location')
304 if store_location:
304 if store_location:
305 store = LFSOidStore(
305 store = LFSOidStore(
306 oid=oid, repo=repo_name, store_location=store_location)
306 oid=oid, repo=repo_name, store_location=store_location)
307 return store.oid_path
307 return store.oid_path
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
309
309
310 @reraise_safe_exceptions
310 @reraise_safe_exceptions
311 def bulk_request(self, wire, rev, pre_load):
311 def bulk_request(self, wire, rev, pre_load):
312 cache_on, context_uid, repo_id = self._cache_on(wire)
312 cache_on, context_uid, repo_id = self._cache_on(wire)
313 region = self.region(wire)
313 region = self._region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
314 @region.conditional_cache_on_arguments(condition=cache_on)
315 def _bulk_request(_repo_id, _rev, _pre_load):
315 def _bulk_request(_repo_id, _rev, _pre_load):
316 result = {}
316 result = {}
317 for attr in pre_load:
317 for attr in pre_load:
318 try:
318 try:
319 method = self._bulk_methods[attr]
319 method = self._bulk_methods[attr]
320 args = [wire, rev]
320 args = [wire, rev]
321 result[attr] = method(*args)
321 result[attr] = method(*args)
322 except KeyError as e:
322 except KeyError as e:
323 raise exceptions.VcsException(e)(
323 raise exceptions.VcsException(e)(
324 "Unknown bulk attribute: %s" % attr)
324 "Unknown bulk attribute: %s" % attr)
325 return result
325 return result
326
326
327 return _bulk_request(repo_id, rev, sorted(pre_load))
327 return _bulk_request(repo_id, rev, sorted(pre_load))
328
328
329 def _build_opener(self, url):
329 def _build_opener(self, url):
330 handlers = []
330 handlers = []
331 url_obj = url_parser(url)
331 url_obj = url_parser(url)
332 _, authinfo = url_obj.authinfo()
332 _, authinfo = url_obj.authinfo()
333
333
334 if authinfo:
334 if authinfo:
335 # create a password manager
335 # create a password manager
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
337 passmgr.add_password(*authinfo)
337 passmgr.add_password(*authinfo)
338
338
339 handlers.extend((httpbasicauthhandler(passmgr),
339 handlers.extend((httpbasicauthhandler(passmgr),
340 httpdigestauthhandler(passmgr)))
340 httpdigestauthhandler(passmgr)))
341
341
342 return urllib2.build_opener(*handlers)
342 return urllib2.build_opener(*handlers)
343
343
344 def _type_id_to_name(self, type_id):
344 def _type_id_to_name(self, type_id):
345 return {
345 return {
346 1: b'commit',
346 1: b'commit',
347 2: b'tree',
347 2: b'tree',
348 3: b'blob',
348 3: b'blob',
349 4: b'tag'
349 4: b'tag'
350 }[type_id]
350 }[type_id]
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def check_url(self, url, config):
353 def check_url(self, url, config):
354 url_obj = url_parser(url)
354 url_obj = url_parser(url)
355 test_uri, _ = url_obj.authinfo()
355 test_uri, _ = url_obj.authinfo()
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
357 url_obj.query = obfuscate_qs(url_obj.query)
357 url_obj.query = obfuscate_qs(url_obj.query)
358 cleaned_uri = str(url_obj)
358 cleaned_uri = str(url_obj)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
360
360
361 if not test_uri.endswith('info/refs'):
361 if not test_uri.endswith('info/refs'):
362 test_uri = test_uri.rstrip('/') + '/info/refs'
362 test_uri = test_uri.rstrip('/') + '/info/refs'
363
363
364 o = self._build_opener(url)
364 o = self._build_opener(url)
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
366
366
367 q = {"service": 'git-upload-pack'}
367 q = {"service": 'git-upload-pack'}
368 qs = '?%s' % urllib.urlencode(q)
368 qs = '?%s' % urllib.urlencode(q)
369 cu = "%s%s" % (test_uri, qs)
369 cu = "%s%s" % (test_uri, qs)
370 req = urllib2.Request(cu, None, {})
370 req = urllib2.Request(cu, None, {})
371
371
372 try:
372 try:
373 log.debug("Trying to open URL %s", cleaned_uri)
373 log.debug("Trying to open URL %s", cleaned_uri)
374 resp = o.open(req)
374 resp = o.open(req)
375 if resp.code != 200:
375 if resp.code != 200:
376 raise exceptions.URLError()('Return Code is not 200')
376 raise exceptions.URLError()('Return Code is not 200')
377 except Exception as e:
377 except Exception as e:
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
379 # means it cannot be cloned
379 # means it cannot be cloned
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
381
381
382 # now detect if it's proper git repo
382 # now detect if it's proper git repo
383 gitdata = resp.read()
383 gitdata = resp.read()
384 if 'service=git-upload-pack' in gitdata:
384 if 'service=git-upload-pack' in gitdata:
385 pass
385 pass
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
387 # old style git can return some other format !
387 # old style git can return some other format !
388 pass
388 pass
389 else:
389 else:
390 raise exceptions.URLError()(
390 raise exceptions.URLError()(
391 "url [%s] does not look like an git" % (cleaned_uri,))
391 "url [%s] does not look like an git" % (cleaned_uri,))
392
392
393 return True
393 return True
394
394
395 @reraise_safe_exceptions
395 @reraise_safe_exceptions
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
398 remote_refs = self.pull(wire, url, apply_refs=False)
398 remote_refs = self.pull(wire, url, apply_refs=False)
399 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
400 if isinstance(valid_refs, list):
400 if isinstance(valid_refs, list):
401 valid_refs = tuple(valid_refs)
401 valid_refs = tuple(valid_refs)
402
402
403 for k in remote_refs:
403 for k in remote_refs:
404 # only parse heads/tags and skip so called deferred tags
404 # only parse heads/tags and skip so called deferred tags
405 if k.startswith(valid_refs) and not k.endswith(deferred):
405 if k.startswith(valid_refs) and not k.endswith(deferred):
406 repo[k] = remote_refs[k]
406 repo[k] = remote_refs[k]
407
407
408 if update_after_clone:
408 if update_after_clone:
409 # we want to checkout HEAD
409 # we want to checkout HEAD
410 repo["HEAD"] = remote_refs["HEAD"]
410 repo["HEAD"] = remote_refs["HEAD"]
411 index.build_index_from_tree(repo.path, repo.index_path(),
411 index.build_index_from_tree(repo.path, repo.index_path(),
412 repo.object_store, repo["HEAD"].tree)
412 repo.object_store, repo["HEAD"].tree)
413
413
414 @reraise_safe_exceptions
414 @reraise_safe_exceptions
415 def branch(self, wire, commit_id):
415 def branch(self, wire, commit_id):
416 cache_on, context_uid, repo_id = self._cache_on(wire)
416 cache_on, context_uid, repo_id = self._cache_on(wire)
417 region = self.region(wire)
417 region = self._region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
418 @region.conditional_cache_on_arguments(condition=cache_on)
419 def _branch(_context_uid, _repo_id, _commit_id):
419 def _branch(_context_uid, _repo_id, _commit_id):
420 regex = re.compile('^refs/heads')
420 regex = re.compile('^refs/heads')
421
421
422 def filter_with(ref):
422 def filter_with(ref):
423 return regex.match(ref[0]) and ref[1] == _commit_id
423 return regex.match(ref[0]) and ref[1] == _commit_id
424
424
425 branches = filter(filter_with, self.get_refs(wire).items())
425 branches = filter(filter_with, self.get_refs(wire).items())
426 return [x[0].split('refs/heads/')[-1] for x in branches]
426 return [x[0].split('refs/heads/')[-1] for x in branches]
427
427
428 return _branch(context_uid, repo_id, commit_id)
428 return _branch(context_uid, repo_id, commit_id)
429
429
430 @reraise_safe_exceptions
430 @reraise_safe_exceptions
431 def commit_branches(self, wire, commit_id):
431 def commit_branches(self, wire, commit_id):
432 cache_on, context_uid, repo_id = self._cache_on(wire)
432 cache_on, context_uid, repo_id = self._cache_on(wire)
433 region = self.region(wire)
433 region = self._region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
434 @region.conditional_cache_on_arguments(condition=cache_on)
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
436 repo_init = self._factory.repo_libgit2(wire)
436 repo_init = self._factory.repo_libgit2(wire)
437 with repo_init as repo:
437 with repo_init as repo:
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
439 return branches
439 return branches
440
440
441 return _commit_branches(context_uid, repo_id, commit_id)
441 return _commit_branches(context_uid, repo_id, commit_id)
442
442
443 @reraise_safe_exceptions
443 @reraise_safe_exceptions
444 def add_object(self, wire, content):
444 def add_object(self, wire, content):
445 repo_init = self._factory.repo_libgit2(wire)
445 repo_init = self._factory.repo_libgit2(wire)
446 with repo_init as repo:
446 with repo_init as repo:
447 blob = objects.Blob()
447 blob = objects.Blob()
448 blob.set_raw_string(content)
448 blob.set_raw_string(content)
449 repo.object_store.add_object(blob)
449 repo.object_store.add_object(blob)
450 return blob.id
450 return blob.id
451
451
452 # TODO: this is quite complex, check if that can be simplified
452 # TODO: this is quite complex, check if that can be simplified
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
456 object_store = repo.object_store
456 object_store = repo.object_store
457
457
458 # Create tree and populates it with blobs
458 # Create tree and populates it with blobs
459 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
459 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
460
460
461 for node in updated:
461 for node in updated:
462 # Compute subdirs if needed
462 # Compute subdirs if needed
463 dirpath, nodename = vcspath.split(node['path'])
463 dirpath, nodename = vcspath.split(node['path'])
464 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
464 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
465 parent = commit_tree
465 parent = commit_tree
466 ancestors = [('', parent)]
466 ancestors = [('', parent)]
467
467
468 # Tries to dig for the deepest existing tree
468 # Tries to dig for the deepest existing tree
469 while dirnames:
469 while dirnames:
470 curdir = dirnames.pop(0)
470 curdir = dirnames.pop(0)
471 try:
471 try:
472 dir_id = parent[curdir][1]
472 dir_id = parent[curdir][1]
473 except KeyError:
473 except KeyError:
474 # put curdir back into dirnames and stops
474 # put curdir back into dirnames and stops
475 dirnames.insert(0, curdir)
475 dirnames.insert(0, curdir)
476 break
476 break
477 else:
477 else:
478 # If found, updates parent
478 # If found, updates parent
479 parent = repo[dir_id]
479 parent = repo[dir_id]
480 ancestors.append((curdir, parent))
480 ancestors.append((curdir, parent))
481 # Now parent is deepest existing tree and we need to create
481 # Now parent is deepest existing tree and we need to create
482 # subtrees for dirnames (in reverse order)
482 # subtrees for dirnames (in reverse order)
483 # [this only applies for nodes from added]
483 # [this only applies for nodes from added]
484 new_trees = []
484 new_trees = []
485
485
486 blob = objects.Blob.from_string(node['content'])
486 blob = objects.Blob.from_string(node['content'])
487
487
488 if dirnames:
488 if dirnames:
489 # If there are trees which should be created we need to build
489 # If there are trees which should be created we need to build
490 # them now (in reverse order)
490 # them now (in reverse order)
491 reversed_dirnames = list(reversed(dirnames))
491 reversed_dirnames = list(reversed(dirnames))
492 curtree = objects.Tree()
492 curtree = objects.Tree()
493 curtree[node['node_path']] = node['mode'], blob.id
493 curtree[node['node_path']] = node['mode'], blob.id
494 new_trees.append(curtree)
494 new_trees.append(curtree)
495 for dirname in reversed_dirnames[:-1]:
495 for dirname in reversed_dirnames[:-1]:
496 newtree = objects.Tree()
496 newtree = objects.Tree()
497 newtree[dirname] = (DIR_STAT, curtree.id)
497 newtree[dirname] = (DIR_STAT, curtree.id)
498 new_trees.append(newtree)
498 new_trees.append(newtree)
499 curtree = newtree
499 curtree = newtree
500 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
500 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
501 else:
501 else:
502 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
502 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
503
503
504 new_trees.append(parent)
504 new_trees.append(parent)
505 # Update ancestors
505 # Update ancestors
506 reversed_ancestors = reversed(
506 reversed_ancestors = reversed(
507 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
507 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
508 for parent, tree, path in reversed_ancestors:
508 for parent, tree, path in reversed_ancestors:
509 parent[path] = (DIR_STAT, tree.id)
509 parent[path] = (DIR_STAT, tree.id)
510 object_store.add_object(tree)
510 object_store.add_object(tree)
511
511
512 object_store.add_object(blob)
512 object_store.add_object(blob)
513 for tree in new_trees:
513 for tree in new_trees:
514 object_store.add_object(tree)
514 object_store.add_object(tree)
515
515
516 for node_path in removed:
516 for node_path in removed:
517 paths = node_path.split('/')
517 paths = node_path.split('/')
518 tree = commit_tree
518 tree = commit_tree
519 trees = [tree]
519 trees = [tree]
520 # Traverse deep into the forest...
520 # Traverse deep into the forest...
521 for path in paths:
521 for path in paths:
522 try:
522 try:
523 obj = repo[tree[path][1]]
523 obj = repo[tree[path][1]]
524 if isinstance(obj, objects.Tree):
524 if isinstance(obj, objects.Tree):
525 trees.append(obj)
525 trees.append(obj)
526 tree = obj
526 tree = obj
527 except KeyError:
527 except KeyError:
528 break
528 break
529 # Cut down the blob and all rotten trees on the way back...
529 # Cut down the blob and all rotten trees on the way back...
530 for path, tree in reversed(zip(paths, trees)):
530 for path, tree in reversed(zip(paths, trees)):
531 del tree[path]
531 del tree[path]
532 if tree:
532 if tree:
533 # This tree still has elements - don't remove it or any
533 # This tree still has elements - don't remove it or any
534 # of it's parents
534 # of it's parents
535 break
535 break
536
536
537 object_store.add_object(commit_tree)
537 object_store.add_object(commit_tree)
538
538
539 # Create commit
539 # Create commit
540 commit = objects.Commit()
540 commit = objects.Commit()
541 commit.tree = commit_tree.id
541 commit.tree = commit_tree.id
542 for k, v in commit_data.iteritems():
542 for k, v in commit_data.iteritems():
543 setattr(commit, k, v)
543 setattr(commit, k, v)
544 object_store.add_object(commit)
544 object_store.add_object(commit)
545
545
546 self.create_branch(wire, branch, commit.id)
546 self.create_branch(wire, branch, commit.id)
547
547
548 # dulwich set-ref
548 # dulwich set-ref
549 ref = 'refs/heads/%s' % branch
549 ref = 'refs/heads/%s' % branch
550 repo.refs[ref] = commit.id
550 repo.refs[ref] = commit.id
551
551
552 return commit.id
552 return commit.id
553
553
554 @reraise_safe_exceptions
554 @reraise_safe_exceptions
555 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
555 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
556 if url != 'default' and '://' not in url:
556 if url != 'default' and '://' not in url:
557 client = LocalGitClient(url)
557 client = LocalGitClient(url)
558 else:
558 else:
559 url_obj = url_parser(url)
559 url_obj = url_parser(url)
560 o = self._build_opener(url)
560 o = self._build_opener(url)
561 url, _ = url_obj.authinfo()
561 url, _ = url_obj.authinfo()
562 client = HttpGitClient(base_url=url, opener=o)
562 client = HttpGitClient(base_url=url, opener=o)
563 repo = self._factory.repo(wire)
563 repo = self._factory.repo(wire)
564
564
565 determine_wants = repo.object_store.determine_wants_all
565 determine_wants = repo.object_store.determine_wants_all
566 if refs:
566 if refs:
567 def determine_wants_requested(references):
567 def determine_wants_requested(references):
568 return [references[r] for r in references if r in refs]
568 return [references[r] for r in references if r in refs]
569 determine_wants = determine_wants_requested
569 determine_wants = determine_wants_requested
570
570
571 try:
571 try:
572 remote_refs = client.fetch(
572 remote_refs = client.fetch(
573 path=url, target=repo, determine_wants=determine_wants)
573 path=url, target=repo, determine_wants=determine_wants)
574 except NotGitRepository as e:
574 except NotGitRepository as e:
575 log.warning(
575 log.warning(
576 'Trying to fetch from "%s" failed, not a Git repository.', url)
576 'Trying to fetch from "%s" failed, not a Git repository.', url)
577 # Exception can contain unicode which we convert
577 # Exception can contain unicode which we convert
578 raise exceptions.AbortException(e)(repr(e))
578 raise exceptions.AbortException(e)(repr(e))
579
579
580 # mikhail: client.fetch() returns all the remote refs, but fetches only
580 # mikhail: client.fetch() returns all the remote refs, but fetches only
581 # refs filtered by `determine_wants` function. We need to filter result
581 # refs filtered by `determine_wants` function. We need to filter result
582 # as well
582 # as well
583 if refs:
583 if refs:
584 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
584 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
585
585
586 if apply_refs:
586 if apply_refs:
587 # TODO: johbo: Needs proper test coverage with a git repository
587 # TODO: johbo: Needs proper test coverage with a git repository
588 # that contains a tag object, so that we would end up with
588 # that contains a tag object, so that we would end up with
589 # a peeled ref at this point.
589 # a peeled ref at this point.
590 for k in remote_refs:
590 for k in remote_refs:
591 if k.endswith(PEELED_REF_MARKER):
591 if k.endswith(PEELED_REF_MARKER):
592 log.debug("Skipping peeled reference %s", k)
592 log.debug("Skipping peeled reference %s", k)
593 continue
593 continue
594 repo[k] = remote_refs[k]
594 repo[k] = remote_refs[k]
595
595
596 if refs and not update_after:
596 if refs and not update_after:
597 # mikhail: explicitly set the head to the last ref.
597 # mikhail: explicitly set the head to the last ref.
598 repo['HEAD'] = remote_refs[refs[-1]]
598 repo['HEAD'] = remote_refs[refs[-1]]
599
599
600 if update_after:
600 if update_after:
601 # we want to checkout HEAD
601 # we want to checkout HEAD
602 repo["HEAD"] = remote_refs["HEAD"]
602 repo["HEAD"] = remote_refs["HEAD"]
603 index.build_index_from_tree(repo.path, repo.index_path(),
603 index.build_index_from_tree(repo.path, repo.index_path(),
604 repo.object_store, repo["HEAD"].tree)
604 repo.object_store, repo["HEAD"].tree)
605 return remote_refs
605 return remote_refs
606
606
607 @reraise_safe_exceptions
607 @reraise_safe_exceptions
608 def sync_fetch(self, wire, url, refs=None, all_refs=False):
608 def sync_fetch(self, wire, url, refs=None, all_refs=False):
609 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
610 if refs and not isinstance(refs, (list, tuple)):
610 if refs and not isinstance(refs, (list, tuple)):
611 refs = [refs]
611 refs = [refs]
612
612
613 config = self._wire_to_config(wire)
613 config = self._wire_to_config(wire)
614 # get all remote refs we'll use to fetch later
614 # get all remote refs we'll use to fetch later
615 cmd = ['ls-remote']
615 cmd = ['ls-remote']
616 if not all_refs:
616 if not all_refs:
617 cmd += ['--heads', '--tags']
617 cmd += ['--heads', '--tags']
618 cmd += [url]
618 cmd += [url]
619 output, __ = self.run_git_command(
619 output, __ = self.run_git_command(
620 wire, cmd, fail_on_stderr=False,
620 wire, cmd, fail_on_stderr=False,
621 _copts=self._remote_conf(config),
621 _copts=self._remote_conf(config),
622 extra_env={'GIT_TERMINAL_PROMPT': '0'})
622 extra_env={'GIT_TERMINAL_PROMPT': '0'})
623
623
624 remote_refs = collections.OrderedDict()
624 remote_refs = collections.OrderedDict()
625 fetch_refs = []
625 fetch_refs = []
626
626
627 for ref_line in output.splitlines():
627 for ref_line in output.splitlines():
628 sha, ref = ref_line.split('\t')
628 sha, ref = ref_line.split('\t')
629 sha = sha.strip()
629 sha = sha.strip()
630 if ref in remote_refs:
630 if ref in remote_refs:
631 # duplicate, skip
631 # duplicate, skip
632 continue
632 continue
633 if ref.endswith(PEELED_REF_MARKER):
633 if ref.endswith(PEELED_REF_MARKER):
634 log.debug("Skipping peeled reference %s", ref)
634 log.debug("Skipping peeled reference %s", ref)
635 continue
635 continue
636 # don't sync HEAD
636 # don't sync HEAD
637 if ref in ['HEAD']:
637 if ref in ['HEAD']:
638 continue
638 continue
639
639
640 remote_refs[ref] = sha
640 remote_refs[ref] = sha
641
641
642 if refs and sha in refs:
642 if refs and sha in refs:
643 # we filter fetch using our specified refs
643 # we filter fetch using our specified refs
644 fetch_refs.append('{}:{}'.format(ref, ref))
644 fetch_refs.append('{}:{}'.format(ref, ref))
645 elif not refs:
645 elif not refs:
646 fetch_refs.append('{}:{}'.format(ref, ref))
646 fetch_refs.append('{}:{}'.format(ref, ref))
647 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
647 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
648
648
649 if fetch_refs:
649 if fetch_refs:
650 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
650 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
651 fetch_refs_chunks = list(chunk)
651 fetch_refs_chunks = list(chunk)
652 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
652 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
653 _out, _err = self.run_git_command(
653 _out, _err = self.run_git_command(
654 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
654 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
655 fail_on_stderr=False,
655 fail_on_stderr=False,
656 _copts=self._remote_conf(config),
656 _copts=self._remote_conf(config),
657 extra_env={'GIT_TERMINAL_PROMPT': '0'})
657 extra_env={'GIT_TERMINAL_PROMPT': '0'})
658
658
659 return remote_refs
659 return remote_refs
660
660
661 @reraise_safe_exceptions
661 @reraise_safe_exceptions
662 def sync_push(self, wire, url, refs=None):
662 def sync_push(self, wire, url, refs=None):
663 if not self.check_url(url, wire):
663 if not self.check_url(url, wire):
664 return
664 return
665 config = self._wire_to_config(wire)
665 config = self._wire_to_config(wire)
666 self._factory.repo(wire)
666 self._factory.repo(wire)
667 self.run_git_command(
667 self.run_git_command(
668 wire, ['push', url, '--mirror'], fail_on_stderr=False,
668 wire, ['push', url, '--mirror'], fail_on_stderr=False,
669 _copts=self._remote_conf(config),
669 _copts=self._remote_conf(config),
670 extra_env={'GIT_TERMINAL_PROMPT': '0'})
670 extra_env={'GIT_TERMINAL_PROMPT': '0'})
671
671
672 @reraise_safe_exceptions
672 @reraise_safe_exceptions
673 def get_remote_refs(self, wire, url):
673 def get_remote_refs(self, wire, url):
674 repo = Repo(url)
674 repo = Repo(url)
675 return repo.get_refs()
675 return repo.get_refs()
676
676
677 @reraise_safe_exceptions
677 @reraise_safe_exceptions
678 def get_description(self, wire):
678 def get_description(self, wire):
679 repo = self._factory.repo(wire)
679 repo = self._factory.repo(wire)
680 return repo.get_description()
680 return repo.get_description()
681
681
682 @reraise_safe_exceptions
682 @reraise_safe_exceptions
683 def get_missing_revs(self, wire, rev1, rev2, path2):
683 def get_missing_revs(self, wire, rev1, rev2, path2):
684 repo = self._factory.repo(wire)
684 repo = self._factory.repo(wire)
685 LocalGitClient(thin_packs=False).fetch(path2, repo)
685 LocalGitClient(thin_packs=False).fetch(path2, repo)
686
686
687 wire_remote = wire.copy()
687 wire_remote = wire.copy()
688 wire_remote['path'] = path2
688 wire_remote['path'] = path2
689 repo_remote = self._factory.repo(wire_remote)
689 repo_remote = self._factory.repo(wire_remote)
690 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
690 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
691
691
692 revs = [
692 revs = [
693 x.commit.id
693 x.commit.id
694 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
694 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
695 return revs
695 return revs
696
696
697 @reraise_safe_exceptions
697 @reraise_safe_exceptions
698 def get_object(self, wire, sha, maybe_unreachable=False):
698 def get_object(self, wire, sha, maybe_unreachable=False):
699 cache_on, context_uid, repo_id = self._cache_on(wire)
699 cache_on, context_uid, repo_id = self._cache_on(wire)
700 region = self.region(wire)
700 region = self._region(wire)
701 @region.conditional_cache_on_arguments(condition=cache_on)
701 @region.conditional_cache_on_arguments(condition=cache_on)
702 def _get_object(_context_uid, _repo_id, _sha):
702 def _get_object(_context_uid, _repo_id, _sha):
703 repo_init = self._factory.repo_libgit2(wire)
703 repo_init = self._factory.repo_libgit2(wire)
704 with repo_init as repo:
704 with repo_init as repo:
705
705
706 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
706 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
707 try:
707 try:
708 commit = repo.revparse_single(sha)
708 commit = repo.revparse_single(sha)
709 except KeyError:
709 except KeyError:
710 # NOTE(marcink): KeyError doesn't give us any meaningful information
710 # NOTE(marcink): KeyError doesn't give us any meaningful information
711 # here, we instead give something more explicit
711 # here, we instead give something more explicit
712 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
712 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
713 raise exceptions.LookupException(e)(missing_commit_err)
713 raise exceptions.LookupException(e)(missing_commit_err)
714 except ValueError as e:
714 except ValueError as e:
715 raise exceptions.LookupException(e)(missing_commit_err)
715 raise exceptions.LookupException(e)(missing_commit_err)
716
716
717 is_tag = False
717 is_tag = False
718 if isinstance(commit, pygit2.Tag):
718 if isinstance(commit, pygit2.Tag):
719 commit = repo.get(commit.target)
719 commit = repo.get(commit.target)
720 is_tag = True
720 is_tag = True
721
721
722 check_dangling = True
722 check_dangling = True
723 if is_tag:
723 if is_tag:
724 check_dangling = False
724 check_dangling = False
725
725
726 if check_dangling and maybe_unreachable:
726 if check_dangling and maybe_unreachable:
727 check_dangling = False
727 check_dangling = False
728
728
729 # we used a reference and it parsed means we're not having a dangling commit
729 # we used a reference and it parsed means we're not having a dangling commit
730 if sha != commit.hex:
730 if sha != commit.hex:
731 check_dangling = False
731 check_dangling = False
732
732
733 if check_dangling:
733 if check_dangling:
734 # check for dangling commit
734 # check for dangling commit
735 for branch in repo.branches.with_commit(commit.hex):
735 for branch in repo.branches.with_commit(commit.hex):
736 if branch:
736 if branch:
737 break
737 break
738 else:
738 else:
739 # NOTE(marcink): Empty error doesn't give us any meaningful information
739 # NOTE(marcink): Empty error doesn't give us any meaningful information
740 # here, we instead give something more explicit
740 # here, we instead give something more explicit
741 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
741 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
742 raise exceptions.LookupException(e)(missing_commit_err)
742 raise exceptions.LookupException(e)(missing_commit_err)
743
743
744 commit_id = commit.hex
744 commit_id = commit.hex
745 type_id = commit.type
745 type_id = commit.type
746
746
747 return {
747 return {
748 'id': commit_id,
748 'id': commit_id,
749 'type': self._type_id_to_name(type_id),
749 'type': self._type_id_to_name(type_id),
750 'commit_id': commit_id,
750 'commit_id': commit_id,
751 'idx': 0
751 'idx': 0
752 }
752 }
753
753
754 return _get_object(context_uid, repo_id, sha)
754 return _get_object(context_uid, repo_id, sha)
755
755
756 @reraise_safe_exceptions
756 @reraise_safe_exceptions
757 def get_refs(self, wire):
757 def get_refs(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 region = self.region(wire)
759 region = self._region(wire)
760 @region.conditional_cache_on_arguments(condition=cache_on)
760 @region.conditional_cache_on_arguments(condition=cache_on)
761 def _get_refs(_context_uid, _repo_id):
761 def _get_refs(_context_uid, _repo_id):
762
762
763 repo_init = self._factory.repo_libgit2(wire)
763 repo_init = self._factory.repo_libgit2(wire)
764 with repo_init as repo:
764 with repo_init as repo:
765 regex = re.compile('^refs/(heads|tags)/')
765 regex = re.compile('^refs/(heads|tags)/')
766 return {x.name: x.target.hex for x in
766 return {x.name: x.target.hex for x in
767 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
767 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
768
768
769 return _get_refs(context_uid, repo_id)
769 return _get_refs(context_uid, repo_id)
770
770
771 @reraise_safe_exceptions
771 @reraise_safe_exceptions
772 def get_branch_pointers(self, wire):
772 def get_branch_pointers(self, wire):
773 cache_on, context_uid, repo_id = self._cache_on(wire)
773 cache_on, context_uid, repo_id = self._cache_on(wire)
774 region = self.region(wire)
774 region = self._region(wire)
775 @region.conditional_cache_on_arguments(condition=cache_on)
775 @region.conditional_cache_on_arguments(condition=cache_on)
776 def _get_branch_pointers(_context_uid, _repo_id):
776 def _get_branch_pointers(_context_uid, _repo_id):
777
777
778 repo_init = self._factory.repo_libgit2(wire)
778 repo_init = self._factory.repo_libgit2(wire)
779 regex = re.compile('^refs/heads')
779 regex = re.compile('^refs/heads')
780 with repo_init as repo:
780 with repo_init as repo:
781 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
781 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
782 return {x.target.hex: x.shorthand for x in branches}
782 return {x.target.hex: x.shorthand for x in branches}
783
783
784 return _get_branch_pointers(context_uid, repo_id)
784 return _get_branch_pointers(context_uid, repo_id)
785
785
786 @reraise_safe_exceptions
786 @reraise_safe_exceptions
787 def head(self, wire, show_exc=True):
787 def head(self, wire, show_exc=True):
788 cache_on, context_uid, repo_id = self._cache_on(wire)
788 cache_on, context_uid, repo_id = self._cache_on(wire)
789 region = self.region(wire)
789 region = self._region(wire)
790 @region.conditional_cache_on_arguments(condition=cache_on)
790 @region.conditional_cache_on_arguments(condition=cache_on)
791 def _head(_context_uid, _repo_id, _show_exc):
791 def _head(_context_uid, _repo_id, _show_exc):
792 repo_init = self._factory.repo_libgit2(wire)
792 repo_init = self._factory.repo_libgit2(wire)
793 with repo_init as repo:
793 with repo_init as repo:
794 try:
794 try:
795 return repo.head.peel().hex
795 return repo.head.peel().hex
796 except Exception:
796 except Exception:
797 if show_exc:
797 if show_exc:
798 raise
798 raise
799 return _head(context_uid, repo_id, show_exc)
799 return _head(context_uid, repo_id, show_exc)
800
800
801 @reraise_safe_exceptions
801 @reraise_safe_exceptions
802 def init(self, wire):
802 def init(self, wire):
803 repo_path = str_to_dulwich(wire['path'])
803 repo_path = str_to_dulwich(wire['path'])
804 self.repo = Repo.init(repo_path)
804 self.repo = Repo.init(repo_path)
805
805
806 @reraise_safe_exceptions
806 @reraise_safe_exceptions
807 def init_bare(self, wire):
807 def init_bare(self, wire):
808 repo_path = str_to_dulwich(wire['path'])
808 repo_path = str_to_dulwich(wire['path'])
809 self.repo = Repo.init_bare(repo_path)
809 self.repo = Repo.init_bare(repo_path)
810
810
811 @reraise_safe_exceptions
811 @reraise_safe_exceptions
812 def revision(self, wire, rev):
812 def revision(self, wire, rev):
813
813
814 cache_on, context_uid, repo_id = self._cache_on(wire)
814 cache_on, context_uid, repo_id = self._cache_on(wire)
815 region = self.region(wire)
815 region = self._region(wire)
816 @region.conditional_cache_on_arguments(condition=cache_on)
816 @region.conditional_cache_on_arguments(condition=cache_on)
817 def _revision(_context_uid, _repo_id, _rev):
817 def _revision(_context_uid, _repo_id, _rev):
818 repo_init = self._factory.repo_libgit2(wire)
818 repo_init = self._factory.repo_libgit2(wire)
819 with repo_init as repo:
819 with repo_init as repo:
820 commit = repo[rev]
820 commit = repo[rev]
821 obj_data = {
821 obj_data = {
822 'id': commit.id.hex,
822 'id': commit.id.hex,
823 }
823 }
824 # tree objects itself don't have tree_id attribute
824 # tree objects itself don't have tree_id attribute
825 if hasattr(commit, 'tree_id'):
825 if hasattr(commit, 'tree_id'):
826 obj_data['tree'] = commit.tree_id.hex
826 obj_data['tree'] = commit.tree_id.hex
827
827
828 return obj_data
828 return obj_data
829 return _revision(context_uid, repo_id, rev)
829 return _revision(context_uid, repo_id, rev)
830
830
831 @reraise_safe_exceptions
831 @reraise_safe_exceptions
832 def date(self, wire, commit_id):
832 def date(self, wire, commit_id):
833 cache_on, context_uid, repo_id = self._cache_on(wire)
833 cache_on, context_uid, repo_id = self._cache_on(wire)
834 region = self.region(wire)
834 region = self._region(wire)
835 @region.conditional_cache_on_arguments(condition=cache_on)
835 @region.conditional_cache_on_arguments(condition=cache_on)
836 def _date(_repo_id, _commit_id):
836 def _date(_repo_id, _commit_id):
837 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
838 with repo_init as repo:
838 with repo_init as repo:
839 commit = repo[commit_id]
839 commit = repo[commit_id]
840
840
841 if hasattr(commit, 'commit_time'):
841 if hasattr(commit, 'commit_time'):
842 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
842 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
843 else:
843 else:
844 commit = commit.get_object()
844 commit = commit.get_object()
845 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
845 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
846
846
847 # TODO(marcink): check dulwich difference of offset vs timezone
847 # TODO(marcink): check dulwich difference of offset vs timezone
848 return [commit_time, commit_time_offset]
848 return [commit_time, commit_time_offset]
849 return _date(repo_id, commit_id)
849 return _date(repo_id, commit_id)
850
850
851 @reraise_safe_exceptions
851 @reraise_safe_exceptions
852 def author(self, wire, commit_id):
852 def author(self, wire, commit_id):
853 cache_on, context_uid, repo_id = self._cache_on(wire)
853 cache_on, context_uid, repo_id = self._cache_on(wire)
854 region = self.region(wire)
854 region = self._region(wire)
855 @region.conditional_cache_on_arguments(condition=cache_on)
855 @region.conditional_cache_on_arguments(condition=cache_on)
856 def _author(_repo_id, _commit_id):
856 def _author(_repo_id, _commit_id):
857 repo_init = self._factory.repo_libgit2(wire)
857 repo_init = self._factory.repo_libgit2(wire)
858 with repo_init as repo:
858 with repo_init as repo:
859 commit = repo[commit_id]
859 commit = repo[commit_id]
860
860
861 if hasattr(commit, 'author'):
861 if hasattr(commit, 'author'):
862 author = commit.author
862 author = commit.author
863 else:
863 else:
864 author = commit.get_object().author
864 author = commit.get_object().author
865
865
866 if author.email:
866 if author.email:
867 return u"{} <{}>".format(author.name, author.email)
867 return u"{} <{}>".format(author.name, author.email)
868
868
869 try:
869 try:
870 return u"{}".format(author.name)
870 return u"{}".format(author.name)
871 except Exception:
871 except Exception:
872 return u"{}".format(safe_unicode(author.raw_name))
872 return u"{}".format(safe_unicode(author.raw_name))
873
873
874 return _author(repo_id, commit_id)
874 return _author(repo_id, commit_id)
875
875
876 @reraise_safe_exceptions
876 @reraise_safe_exceptions
877 def message(self, wire, commit_id):
877 def message(self, wire, commit_id):
878 cache_on, context_uid, repo_id = self._cache_on(wire)
878 cache_on, context_uid, repo_id = self._cache_on(wire)
879 region = self.region(wire)
879 region = self._region(wire)
880 @region.conditional_cache_on_arguments(condition=cache_on)
880 @region.conditional_cache_on_arguments(condition=cache_on)
881 def _message(_repo_id, _commit_id):
881 def _message(_repo_id, _commit_id):
882 repo_init = self._factory.repo_libgit2(wire)
882 repo_init = self._factory.repo_libgit2(wire)
883 with repo_init as repo:
883 with repo_init as repo:
884 commit = repo[commit_id]
884 commit = repo[commit_id]
885 return commit.message
885 return commit.message
886 return _message(repo_id, commit_id)
886 return _message(repo_id, commit_id)
887
887
888 @reraise_safe_exceptions
888 @reraise_safe_exceptions
889 def parents(self, wire, commit_id):
889 def parents(self, wire, commit_id):
890 cache_on, context_uid, repo_id = self._cache_on(wire)
890 cache_on, context_uid, repo_id = self._cache_on(wire)
891 region = self.region(wire)
891 region = self._region(wire)
892 @region.conditional_cache_on_arguments(condition=cache_on)
892 @region.conditional_cache_on_arguments(condition=cache_on)
893 def _parents(_repo_id, _commit_id):
893 def _parents(_repo_id, _commit_id):
894 repo_init = self._factory.repo_libgit2(wire)
894 repo_init = self._factory.repo_libgit2(wire)
895 with repo_init as repo:
895 with repo_init as repo:
896 commit = repo[commit_id]
896 commit = repo[commit_id]
897 if hasattr(commit, 'parent_ids'):
897 if hasattr(commit, 'parent_ids'):
898 parent_ids = commit.parent_ids
898 parent_ids = commit.parent_ids
899 else:
899 else:
900 parent_ids = commit.get_object().parent_ids
900 parent_ids = commit.get_object().parent_ids
901
901
902 return [x.hex for x in parent_ids]
902 return [x.hex for x in parent_ids]
903 return _parents(repo_id, commit_id)
903 return _parents(repo_id, commit_id)
904
904
905 @reraise_safe_exceptions
905 @reraise_safe_exceptions
906 def children(self, wire, commit_id):
906 def children(self, wire, commit_id):
907 cache_on, context_uid, repo_id = self._cache_on(wire)
907 cache_on, context_uid, repo_id = self._cache_on(wire)
908 region = self.region(wire)
908 region = self._region(wire)
909 @region.conditional_cache_on_arguments(condition=cache_on)
909 @region.conditional_cache_on_arguments(condition=cache_on)
910 def _children(_repo_id, _commit_id):
910 def _children(_repo_id, _commit_id):
911 output, __ = self.run_git_command(
911 output, __ = self.run_git_command(
912 wire, ['rev-list', '--all', '--children'])
912 wire, ['rev-list', '--all', '--children'])
913
913
914 child_ids = []
914 child_ids = []
915 pat = re.compile(r'^%s' % commit_id)
915 pat = re.compile(r'^%s' % commit_id)
916 for l in output.splitlines():
916 for l in output.splitlines():
917 if pat.match(l):
917 if pat.match(l):
918 found_ids = l.split(' ')[1:]
918 found_ids = l.split(' ')[1:]
919 child_ids.extend(found_ids)
919 child_ids.extend(found_ids)
920
920
921 return child_ids
921 return child_ids
922 return _children(repo_id, commit_id)
922 return _children(repo_id, commit_id)
923
923
924 @reraise_safe_exceptions
924 @reraise_safe_exceptions
925 def set_refs(self, wire, key, value):
925 def set_refs(self, wire, key, value):
926 repo_init = self._factory.repo_libgit2(wire)
926 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
927 with repo_init as repo:
928 repo.references.create(key, value, force=True)
928 repo.references.create(key, value, force=True)
929
929
930 @reraise_safe_exceptions
930 @reraise_safe_exceptions
931 def create_branch(self, wire, branch_name, commit_id, force=False):
931 def create_branch(self, wire, branch_name, commit_id, force=False):
932 repo_init = self._factory.repo_libgit2(wire)
932 repo_init = self._factory.repo_libgit2(wire)
933 with repo_init as repo:
933 with repo_init as repo:
934 commit = repo[commit_id]
934 commit = repo[commit_id]
935
935
936 if force:
936 if force:
937 repo.branches.local.create(branch_name, commit, force=force)
937 repo.branches.local.create(branch_name, commit, force=force)
938 elif not repo.branches.get(branch_name):
938 elif not repo.branches.get(branch_name):
939 # create only if that branch isn't existing
939 # create only if that branch isn't existing
940 repo.branches.local.create(branch_name, commit, force=force)
940 repo.branches.local.create(branch_name, commit, force=force)
941
941
942 @reraise_safe_exceptions
942 @reraise_safe_exceptions
943 def remove_ref(self, wire, key):
943 def remove_ref(self, wire, key):
944 repo_init = self._factory.repo_libgit2(wire)
944 repo_init = self._factory.repo_libgit2(wire)
945 with repo_init as repo:
945 with repo_init as repo:
946 repo.references.delete(key)
946 repo.references.delete(key)
947
947
948 @reraise_safe_exceptions
948 @reraise_safe_exceptions
949 def tag_remove(self, wire, tag_name):
949 def tag_remove(self, wire, tag_name):
950 repo_init = self._factory.repo_libgit2(wire)
950 repo_init = self._factory.repo_libgit2(wire)
951 with repo_init as repo:
951 with repo_init as repo:
952 key = 'refs/tags/{}'.format(tag_name)
952 key = 'refs/tags/{}'.format(tag_name)
953 repo.references.delete(key)
953 repo.references.delete(key)
954
954
955 @reraise_safe_exceptions
955 @reraise_safe_exceptions
956 def tree_changes(self, wire, source_id, target_id):
956 def tree_changes(self, wire, source_id, target_id):
957 # TODO(marcink): remove this seems it's only used by tests
957 # TODO(marcink): remove this seems it's only used by tests
958 repo = self._factory.repo(wire)
958 repo = self._factory.repo(wire)
959 source = repo[source_id].tree if source_id else None
959 source = repo[source_id].tree if source_id else None
960 target = repo[target_id].tree
960 target = repo[target_id].tree
961 result = repo.object_store.tree_changes(source, target)
961 result = repo.object_store.tree_changes(source, target)
962 return list(result)
962 return list(result)
963
963
964 @reraise_safe_exceptions
964 @reraise_safe_exceptions
965 def tree_and_type_for_path(self, wire, commit_id, path):
965 def tree_and_type_for_path(self, wire, commit_id, path):
966
966
967 cache_on, context_uid, repo_id = self._cache_on(wire)
967 cache_on, context_uid, repo_id = self._cache_on(wire)
968 region = self.region(wire)
968 region = self._region(wire)
969 @region.conditional_cache_on_arguments(condition=cache_on)
969 @region.conditional_cache_on_arguments(condition=cache_on)
970 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
970 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
971 repo_init = self._factory.repo_libgit2(wire)
971 repo_init = self._factory.repo_libgit2(wire)
972
972
973 with repo_init as repo:
973 with repo_init as repo:
974 commit = repo[commit_id]
974 commit = repo[commit_id]
975 try:
975 try:
976 tree = commit.tree[path]
976 tree = commit.tree[path]
977 except KeyError:
977 except KeyError:
978 return None, None, None
978 return None, None, None
979
979
980 return tree.id.hex, tree.type, tree.filemode
980 return tree.id.hex, tree.type, tree.filemode
981 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
981 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
982
982
983 @reraise_safe_exceptions
983 @reraise_safe_exceptions
984 def tree_items(self, wire, tree_id):
984 def tree_items(self, wire, tree_id):
985 cache_on, context_uid, repo_id = self._cache_on(wire)
985 cache_on, context_uid, repo_id = self._cache_on(wire)
986 region = self.region(wire)
986 region = self._region(wire)
987 @region.conditional_cache_on_arguments(condition=cache_on)
987 @region.conditional_cache_on_arguments(condition=cache_on)
988 def _tree_items(_repo_id, _tree_id):
988 def _tree_items(_repo_id, _tree_id):
989
989
990 repo_init = self._factory.repo_libgit2(wire)
990 repo_init = self._factory.repo_libgit2(wire)
991 with repo_init as repo:
991 with repo_init as repo:
992 try:
992 try:
993 tree = repo[tree_id]
993 tree = repo[tree_id]
994 except KeyError:
994 except KeyError:
995 raise ObjectMissing('No tree with id: {}'.format(tree_id))
995 raise ObjectMissing('No tree with id: {}'.format(tree_id))
996
996
997 result = []
997 result = []
998 for item in tree:
998 for item in tree:
999 item_sha = item.hex
999 item_sha = item.hex
1000 item_mode = item.filemode
1000 item_mode = item.filemode
1001 item_type = item.type
1001 item_type = item.type
1002
1002
1003 if item_type == 'commit':
1003 if item_type == 'commit':
1004 # NOTE(marcink): submodules we translate to 'link' for backward compat
1004 # NOTE(marcink): submodules we translate to 'link' for backward compat
1005 item_type = 'link'
1005 item_type = 'link'
1006
1006
1007 result.append((item.name, item_mode, item_sha, item_type))
1007 result.append((item.name, item_mode, item_sha, item_type))
1008 return result
1008 return result
1009 return _tree_items(repo_id, tree_id)
1009 return _tree_items(repo_id, tree_id)
1010
1010
1011 @reraise_safe_exceptions
1011 @reraise_safe_exceptions
1012 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1012 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1013 """
1013 """
1014 Old version that uses subprocess to call diff
1014 Old version that uses subprocess to call diff
1015 """
1015 """
1016
1016
1017 flags = [
1017 flags = [
1018 '-U%s' % context, '--patch',
1018 '-U%s' % context, '--patch',
1019 '--binary',
1019 '--binary',
1020 '--find-renames',
1020 '--find-renames',
1021 '--no-indent-heuristic',
1021 '--no-indent-heuristic',
1022 # '--indent-heuristic',
1022 # '--indent-heuristic',
1023 #'--full-index',
1023 #'--full-index',
1024 #'--abbrev=40'
1024 #'--abbrev=40'
1025 ]
1025 ]
1026
1026
1027 if opt_ignorews:
1027 if opt_ignorews:
1028 flags.append('--ignore-all-space')
1028 flags.append('--ignore-all-space')
1029
1029
1030 if commit_id_1 == self.EMPTY_COMMIT:
1030 if commit_id_1 == self.EMPTY_COMMIT:
1031 cmd = ['show'] + flags + [commit_id_2]
1031 cmd = ['show'] + flags + [commit_id_2]
1032 else:
1032 else:
1033 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1033 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1034
1034
1035 if file_filter:
1035 if file_filter:
1036 cmd.extend(['--', file_filter])
1036 cmd.extend(['--', file_filter])
1037
1037
1038 diff, __ = self.run_git_command(wire, cmd)
1038 diff, __ = self.run_git_command(wire, cmd)
1039 # If we used 'show' command, strip first few lines (until actual diff
1039 # If we used 'show' command, strip first few lines (until actual diff
1040 # starts)
1040 # starts)
1041 if commit_id_1 == self.EMPTY_COMMIT:
1041 if commit_id_1 == self.EMPTY_COMMIT:
1042 lines = diff.splitlines()
1042 lines = diff.splitlines()
1043 x = 0
1043 x = 0
1044 for line in lines:
1044 for line in lines:
1045 if line.startswith('diff'):
1045 if line.startswith('diff'):
1046 break
1046 break
1047 x += 1
1047 x += 1
1048 # Append new line just like 'diff' command do
1048 # Append new line just like 'diff' command do
1049 diff = '\n'.join(lines[x:]) + '\n'
1049 diff = '\n'.join(lines[x:]) + '\n'
1050 return diff
1050 return diff
1051
1051
1052 @reraise_safe_exceptions
1052 @reraise_safe_exceptions
1053 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1053 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1054 repo_init = self._factory.repo_libgit2(wire)
1054 repo_init = self._factory.repo_libgit2(wire)
1055 with repo_init as repo:
1055 with repo_init as repo:
1056 swap = True
1056 swap = True
1057 flags = 0
1057 flags = 0
1058 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1058 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1059
1059
1060 if opt_ignorews:
1060 if opt_ignorews:
1061 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1061 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1062
1062
1063 if commit_id_1 == self.EMPTY_COMMIT:
1063 if commit_id_1 == self.EMPTY_COMMIT:
1064 comm1 = repo[commit_id_2]
1064 comm1 = repo[commit_id_2]
1065 diff_obj = comm1.tree.diff_to_tree(
1065 diff_obj = comm1.tree.diff_to_tree(
1066 flags=flags, context_lines=context, swap=swap)
1066 flags=flags, context_lines=context, swap=swap)
1067
1067
1068 else:
1068 else:
1069 comm1 = repo[commit_id_2]
1069 comm1 = repo[commit_id_2]
1070 comm2 = repo[commit_id_1]
1070 comm2 = repo[commit_id_1]
1071 diff_obj = comm1.tree.diff_to_tree(
1071 diff_obj = comm1.tree.diff_to_tree(
1072 comm2.tree, flags=flags, context_lines=context, swap=swap)
1072 comm2.tree, flags=flags, context_lines=context, swap=swap)
1073 similar_flags = 0
1073 similar_flags = 0
1074 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1074 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1075 diff_obj.find_similar(flags=similar_flags)
1075 diff_obj.find_similar(flags=similar_flags)
1076
1076
1077 if file_filter:
1077 if file_filter:
1078 for p in diff_obj:
1078 for p in diff_obj:
1079 if p.delta.old_file.path == file_filter:
1079 if p.delta.old_file.path == file_filter:
1080 return p.patch or ''
1080 return p.patch or ''
1081 # fo matching path == no diff
1081 # fo matching path == no diff
1082 return ''
1082 return ''
1083 return diff_obj.patch or ''
1083 return diff_obj.patch or ''
1084
1084
1085 @reraise_safe_exceptions
1085 @reraise_safe_exceptions
1086 def node_history(self, wire, commit_id, path, limit):
1086 def node_history(self, wire, commit_id, path, limit):
1087 cache_on, context_uid, repo_id = self._cache_on(wire)
1087 cache_on, context_uid, repo_id = self._cache_on(wire)
1088 region = self.region(wire)
1088 region = self._region(wire)
1089 @region.conditional_cache_on_arguments(condition=cache_on)
1089 @region.conditional_cache_on_arguments(condition=cache_on)
1090 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1090 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1091 # optimize for n==1, rev-list is much faster for that use-case
1091 # optimize for n==1, rev-list is much faster for that use-case
1092 if limit == 1:
1092 if limit == 1:
1093 cmd = ['rev-list', '-1', commit_id, '--', path]
1093 cmd = ['rev-list', '-1', commit_id, '--', path]
1094 else:
1094 else:
1095 cmd = ['log']
1095 cmd = ['log']
1096 if limit:
1096 if limit:
1097 cmd.extend(['-n', str(safe_int(limit, 0))])
1097 cmd.extend(['-n', str(safe_int(limit, 0))])
1098 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1098 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1099
1099
1100 output, __ = self.run_git_command(wire, cmd)
1100 output, __ = self.run_git_command(wire, cmd)
1101 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1101 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1102
1102
1103 return [x for x in commit_ids]
1103 return [x for x in commit_ids]
1104 return _node_history(context_uid, repo_id, commit_id, path, limit)
1104 return _node_history(context_uid, repo_id, commit_id, path, limit)
1105
1105
1106 @reraise_safe_exceptions
1106 @reraise_safe_exceptions
1107 def node_annotate(self, wire, commit_id, path):
1107 def node_annotate(self, wire, commit_id, path):
1108
1108
1109 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1109 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1110 # -l ==> outputs long shas (and we need all 40 characters)
1110 # -l ==> outputs long shas (and we need all 40 characters)
1111 # --root ==> doesn't put '^' character for boundaries
1111 # --root ==> doesn't put '^' character for boundaries
1112 # -r commit_id ==> blames for the given commit
1112 # -r commit_id ==> blames for the given commit
1113 output, __ = self.run_git_command(wire, cmd)
1113 output, __ = self.run_git_command(wire, cmd)
1114
1114
1115 result = []
1115 result = []
1116 for i, blame_line in enumerate(output.split('\n')[:-1]):
1116 for i, blame_line in enumerate(output.split('\n')[:-1]):
1117 line_no = i + 1
1117 line_no = i + 1
1118 commit_id, line = re.split(r' ', blame_line, 1)
1118 commit_id, line = re.split(r' ', blame_line, 1)
1119 result.append((line_no, commit_id, line))
1119 result.append((line_no, commit_id, line))
1120 return result
1120 return result
1121
1121
1122 @reraise_safe_exceptions
1122 @reraise_safe_exceptions
1123 def update_server_info(self, wire):
1123 def update_server_info(self, wire):
1124 repo = self._factory.repo(wire)
1124 repo = self._factory.repo(wire)
1125 update_server_info(repo)
1125 update_server_info(repo)
1126
1126
1127 @reraise_safe_exceptions
1127 @reraise_safe_exceptions
1128 def get_all_commit_ids(self, wire):
1128 def get_all_commit_ids(self, wire):
1129
1129
1130 cache_on, context_uid, repo_id = self._cache_on(wire)
1130 cache_on, context_uid, repo_id = self._cache_on(wire)
1131 region = self.region(wire)
1131 region = self._region(wire)
1132 @region.conditional_cache_on_arguments(condition=cache_on)
1132 @region.conditional_cache_on_arguments(condition=cache_on)
1133 def _get_all_commit_ids(_context_uid, _repo_id):
1133 def _get_all_commit_ids(_context_uid, _repo_id):
1134
1134
1135 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1135 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1136 try:
1136 try:
1137 output, __ = self.run_git_command(wire, cmd)
1137 output, __ = self.run_git_command(wire, cmd)
1138 return output.splitlines()
1138 return output.splitlines()
1139 except Exception:
1139 except Exception:
1140 # Can be raised for empty repositories
1140 # Can be raised for empty repositories
1141 return []
1141 return []
1142 return _get_all_commit_ids(context_uid, repo_id)
1142 return _get_all_commit_ids(context_uid, repo_id)
1143
1143
1144 @reraise_safe_exceptions
1144 @reraise_safe_exceptions
1145 def run_git_command(self, wire, cmd, **opts):
1145 def run_git_command(self, wire, cmd, **opts):
1146 path = wire.get('path', None)
1146 path = wire.get('path', None)
1147
1147
1148 if path and os.path.isdir(path):
1148 if path and os.path.isdir(path):
1149 opts['cwd'] = path
1149 opts['cwd'] = path
1150
1150
1151 if '_bare' in opts:
1151 if '_bare' in opts:
1152 _copts = []
1152 _copts = []
1153 del opts['_bare']
1153 del opts['_bare']
1154 else:
1154 else:
1155 _copts = ['-c', 'core.quotepath=false', ]
1155 _copts = ['-c', 'core.quotepath=false', ]
1156 safe_call = False
1156 safe_call = False
1157 if '_safe' in opts:
1157 if '_safe' in opts:
1158 # no exc on failure
1158 # no exc on failure
1159 del opts['_safe']
1159 del opts['_safe']
1160 safe_call = True
1160 safe_call = True
1161
1161
1162 if '_copts' in opts:
1162 if '_copts' in opts:
1163 _copts.extend(opts['_copts'] or [])
1163 _copts.extend(opts['_copts'] or [])
1164 del opts['_copts']
1164 del opts['_copts']
1165
1165
1166 gitenv = os.environ.copy()
1166 gitenv = os.environ.copy()
1167 gitenv.update(opts.pop('extra_env', {}))
1167 gitenv.update(opts.pop('extra_env', {}))
1168 # need to clean fix GIT_DIR !
1168 # need to clean fix GIT_DIR !
1169 if 'GIT_DIR' in gitenv:
1169 if 'GIT_DIR' in gitenv:
1170 del gitenv['GIT_DIR']
1170 del gitenv['GIT_DIR']
1171 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1171 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1172 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1172 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1173
1173
1174 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1174 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1175 _opts = {'env': gitenv, 'shell': False}
1175 _opts = {'env': gitenv, 'shell': False}
1176
1176
1177 proc = None
1177 proc = None
1178 try:
1178 try:
1179 _opts.update(opts)
1179 _opts.update(opts)
1180 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1180 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1181
1181
1182 return ''.join(proc), ''.join(proc.error)
1182 return ''.join(proc), ''.join(proc.error)
1183 except (EnvironmentError, OSError) as err:
1183 except (EnvironmentError, OSError) as err:
1184 cmd = ' '.join(cmd) # human friendly CMD
1184 cmd = ' '.join(cmd) # human friendly CMD
1185 tb_err = ("Couldn't run git command (%s).\n"
1185 tb_err = ("Couldn't run git command (%s).\n"
1186 "Original error was:%s\n"
1186 "Original error was:%s\n"
1187 "Call options:%s\n"
1187 "Call options:%s\n"
1188 % (cmd, err, _opts))
1188 % (cmd, err, _opts))
1189 log.exception(tb_err)
1189 log.exception(tb_err)
1190 if safe_call:
1190 if safe_call:
1191 return '', err
1191 return '', err
1192 else:
1192 else:
1193 raise exceptions.VcsException()(tb_err)
1193 raise exceptions.VcsException()(tb_err)
1194 finally:
1194 finally:
1195 if proc:
1195 if proc:
1196 proc.close()
1196 proc.close()
1197
1197
1198 @reraise_safe_exceptions
1198 @reraise_safe_exceptions
1199 def install_hooks(self, wire, force=False):
1199 def install_hooks(self, wire, force=False):
1200 from vcsserver.hook_utils import install_git_hooks
1200 from vcsserver.hook_utils import install_git_hooks
1201 bare = self.bare(wire)
1201 bare = self.bare(wire)
1202 path = wire['path']
1202 path = wire['path']
1203 return install_git_hooks(path, bare, force_create=force)
1203 return install_git_hooks(path, bare, force_create=force)
1204
1204
1205 @reraise_safe_exceptions
1205 @reraise_safe_exceptions
1206 def get_hooks_info(self, wire):
1206 def get_hooks_info(self, wire):
1207 from vcsserver.hook_utils import (
1207 from vcsserver.hook_utils import (
1208 get_git_pre_hook_version, get_git_post_hook_version)
1208 get_git_pre_hook_version, get_git_post_hook_version)
1209 bare = self.bare(wire)
1209 bare = self.bare(wire)
1210 path = wire['path']
1210 path = wire['path']
1211 return {
1211 return {
1212 'pre_version': get_git_pre_hook_version(path, bare),
1212 'pre_version': get_git_pre_hook_version(path, bare),
1213 'post_version': get_git_post_hook_version(path, bare),
1213 'post_version': get_git_post_hook_version(path, bare),
1214 }
1214 }
1215
1215
1216 @reraise_safe_exceptions
1216 @reraise_safe_exceptions
1217 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1217 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1218 archive_dir_name, commit_id):
1218 archive_dir_name, commit_id):
1219
1219
1220 def file_walker(_commit_id, path):
1220 def file_walker(_commit_id, path):
1221 repo_init = self._factory.repo_libgit2(wire)
1221 repo_init = self._factory.repo_libgit2(wire)
1222
1222
1223 with repo_init as repo:
1223 with repo_init as repo:
1224 commit = repo[commit_id]
1224 commit = repo[commit_id]
1225
1225
1226 if path in ['', '/']:
1226 if path in ['', '/']:
1227 tree = commit.tree
1227 tree = commit.tree
1228 else:
1228 else:
1229 tree = commit.tree[path.rstrip('/')]
1229 tree = commit.tree[path.rstrip('/')]
1230 tree_id = tree.id.hex
1230 tree_id = tree.id.hex
1231 try:
1231 try:
1232 tree = repo[tree_id]
1232 tree = repo[tree_id]
1233 except KeyError:
1233 except KeyError:
1234 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1234 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1235
1235
1236 index = LibGit2Index.Index()
1236 index = LibGit2Index.Index()
1237 index.read_tree(tree)
1237 index.read_tree(tree)
1238 file_iter = index
1238 file_iter = index
1239
1239
1240 for fn in file_iter:
1240 for fn in file_iter:
1241 file_path = fn.path
1241 file_path = fn.path
1242 mode = fn.mode
1242 mode = fn.mode
1243 is_link = stat.S_ISLNK(mode)
1243 is_link = stat.S_ISLNK(mode)
1244 if mode == pygit2.GIT_FILEMODE_COMMIT:
1244 if mode == pygit2.GIT_FILEMODE_COMMIT:
1245 log.debug('Skipping path %s as a commit node', file_path)
1245 log.debug('Skipping path %s as a commit node', file_path)
1246 continue
1246 continue
1247 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1247 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1248
1248
1249 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1249 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1250 archive_dir_name, commit_id)
1250 archive_dir_name, commit_id)
@@ -1,1043 +1,1043 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import os
21 import stat
21 import stat
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24 import traceback
24 import traceback
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
42 alwaysmatcher, patternmatcher, hgutil)
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
54
54
55 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
58
58
59 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
62
62
63 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
66
66
67 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
69 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
70
70
71 baseui = LoggingUI()
71 baseui = LoggingUI()
72
72
73 # clean the baseui object
73 # clean the baseui object
74 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
77
77
78 for section, option, value in repo_config:
78 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
80
80
81 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
83
83
84 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
85 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
87
87
88 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
91
91
92 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
95 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
99
99
100 return baseui
100 return baseui
101
101
102
102
103 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
104 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
105
105
106 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
107 try:
107 try:
108 return func(*args, **kwargs)
108 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
111 except RepoLookupError as e:
111 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
113 except RequirementError as e:
113 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
115 except RepoError as e:
115 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
117 except LookupError as e:
117 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
119 except Exception as e:
119 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
123
123
124 raise
124 raise
125 return wrapper
125 return wrapper
126
126
127
127
128 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
129 repo_type = 'hg'
129 repo_type = 'hg'
130
130
131 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
132 if not hooks:
132 if not hooks:
133 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 new_config = []
136 new_config = []
137 for section, option, value in config:
137 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
139 continue
139 continue
140 new_config.append((section, option, value))
140 new_config.append((section, option, value))
141 config = new_config
141 config = new_config
142
142
143 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
144 return baseui
144 return baseui
145
145
146 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
149
149
150 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
151 """
151 """
152 Get a repository instance for the given path.
152 Get a repository instance for the given path.
153 """
153 """
154 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
155
155
156
156
157 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
159 output = io.BytesIO()
159 output = io.BytesIO()
160
160
161 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
162 output.write(data)
162 output.write(data)
163
163
164 baseui.status = write
164 baseui.status = write
165 baseui.write = write
165 baseui.write = write
166 baseui.warn = write
166 baseui.warn = write
167 baseui.debug = write
167 baseui.debug = write
168
168
169 return baseui, output
169 return baseui, output
170
170
171
171
172 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
173
173
174 def __init__(self, factory):
174 def __init__(self, factory):
175 self._factory = factory
175 self._factory = factory
176 self._bulk_methods = {
176 self._bulk_methods = {
177 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
178 "author": self.ctx_user,
178 "author": self.ctx_user,
179 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
180 "children": self.ctx_children,
180 "children": self.ctx_children,
181 "date": self.ctx_date,
181 "date": self.ctx_date,
182 "message": self.ctx_description,
182 "message": self.ctx_description,
183 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
184 "status": self.ctx_status,
184 "status": self.ctx_status,
185 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
186 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
187 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
189 }
189 }
190
190
191 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def discover_hg_version(self):
195 def discover_hg_version(self):
196 from mercurial import util
196 from mercurial import util
197 return util.version()
197 return util.version()
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def is_empty(self, wire):
200 def is_empty(self, wire):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202
202
203 try:
203 try:
204 return len(repo) == 0
204 return len(repo) == 0
205 except Exception:
205 except Exception:
206 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
207 return False
207 return False
208
208
209 @reraise_safe_exceptions
209 @reraise_safe_exceptions
210 def bookmarks(self, wire):
210 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 region = self.region(wire)
212 region = self._region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
213 @region.conditional_cache_on_arguments(condition=cache_on)
214 def _bookmarks(_context_uid, _repo_id):
214 def _bookmarks(_context_uid, _repo_id):
215 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
216 return dict(repo._bookmarks)
216 return dict(repo._bookmarks)
217
217
218 return _bookmarks(context_uid, repo_id)
218 return _bookmarks(context_uid, repo_id)
219
219
220 @reraise_safe_exceptions
220 @reraise_safe_exceptions
221 def branches(self, wire, normal, closed):
221 def branches(self, wire, normal, closed):
222 cache_on, context_uid, repo_id = self._cache_on(wire)
222 cache_on, context_uid, repo_id = self._cache_on(wire)
223 region = self.region(wire)
223 region = self._region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
224 @region.conditional_cache_on_arguments(condition=cache_on)
225 def _branches(_context_uid, _repo_id, _normal, _closed):
225 def _branches(_context_uid, _repo_id, _normal, _closed):
226 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
227 iter_branches = repo.branchmap().iterbranches()
227 iter_branches = repo.branchmap().iterbranches()
228 bt = {}
228 bt = {}
229 for branch_name, _heads, tip, is_closed in iter_branches:
229 for branch_name, _heads, tip, is_closed in iter_branches:
230 if normal and not is_closed:
230 if normal and not is_closed:
231 bt[branch_name] = tip
231 bt[branch_name] = tip
232 if closed and is_closed:
232 if closed and is_closed:
233 bt[branch_name] = tip
233 bt[branch_name] = tip
234
234
235 return bt
235 return bt
236
236
237 return _branches(context_uid, repo_id, normal, closed)
237 return _branches(context_uid, repo_id, normal, closed)
238
238
239 @reraise_safe_exceptions
239 @reraise_safe_exceptions
240 def bulk_request(self, wire, commit_id, pre_load):
240 def bulk_request(self, wire, commit_id, pre_load):
241 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
242 region = self.region(wire)
242 region = self._region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
243 @region.conditional_cache_on_arguments(condition=cache_on)
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
245 result = {}
245 result = {}
246 for attr in pre_load:
246 for attr in pre_load:
247 try:
247 try:
248 method = self._bulk_methods[attr]
248 method = self._bulk_methods[attr]
249 result[attr] = method(wire, commit_id)
249 result[attr] = method(wire, commit_id)
250 except KeyError as e:
250 except KeyError as e:
251 raise exceptions.VcsException(e)(
251 raise exceptions.VcsException(e)(
252 'Unknown bulk attribute: "%s"' % attr)
252 'Unknown bulk attribute: "%s"' % attr)
253 return result
253 return result
254
254
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
256
256
257 @reraise_safe_exceptions
257 @reraise_safe_exceptions
258 def ctx_branch(self, wire, commit_id):
258 def ctx_branch(self, wire, commit_id):
259 cache_on, context_uid, repo_id = self._cache_on(wire)
259 cache_on, context_uid, repo_id = self._cache_on(wire)
260 region = self.region(wire)
260 region = self._region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
261 @region.conditional_cache_on_arguments(condition=cache_on)
262 def _ctx_branch(_repo_id, _commit_id):
262 def _ctx_branch(_repo_id, _commit_id):
263 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
264 ctx = self._get_ctx(repo, commit_id)
264 ctx = self._get_ctx(repo, commit_id)
265 return ctx.branch()
265 return ctx.branch()
266 return _ctx_branch(repo_id, commit_id)
266 return _ctx_branch(repo_id, commit_id)
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_date(self, wire, commit_id):
269 def ctx_date(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 region = self.region(wire)
271 region = self._region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
272 @region.conditional_cache_on_arguments(condition=cache_on)
273 def _ctx_date(_repo_id, _commit_id):
273 def _ctx_date(_repo_id, _commit_id):
274 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
275 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
276 return ctx.date()
276 return ctx.date()
277 return _ctx_date(repo_id, commit_id)
277 return _ctx_date(repo_id, commit_id)
278
278
279 @reraise_safe_exceptions
279 @reraise_safe_exceptions
280 def ctx_description(self, wire, revision):
280 def ctx_description(self, wire, revision):
281 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
282 ctx = self._get_ctx(repo, revision)
282 ctx = self._get_ctx(repo, revision)
283 return ctx.description()
283 return ctx.description()
284
284
285 @reraise_safe_exceptions
285 @reraise_safe_exceptions
286 def ctx_files(self, wire, commit_id):
286 def ctx_files(self, wire, commit_id):
287 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
288 region = self.region(wire)
288 region = self._region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
289 @region.conditional_cache_on_arguments(condition=cache_on)
290 def _ctx_files(_repo_id, _commit_id):
290 def _ctx_files(_repo_id, _commit_id):
291 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
292 ctx = self._get_ctx(repo, commit_id)
292 ctx = self._get_ctx(repo, commit_id)
293 return ctx.files()
293 return ctx.files()
294
294
295 return _ctx_files(repo_id, commit_id)
295 return _ctx_files(repo_id, commit_id)
296
296
297 @reraise_safe_exceptions
297 @reraise_safe_exceptions
298 def ctx_list(self, path, revision):
298 def ctx_list(self, path, revision):
299 repo = self._factory.repo(path)
299 repo = self._factory.repo(path)
300 ctx = self._get_ctx(repo, revision)
300 ctx = self._get_ctx(repo, revision)
301 return list(ctx)
301 return list(ctx)
302
302
303 @reraise_safe_exceptions
303 @reraise_safe_exceptions
304 def ctx_parents(self, wire, commit_id):
304 def ctx_parents(self, wire, commit_id):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self.region(wire)
306 region = self._region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
307 @region.conditional_cache_on_arguments(condition=cache_on)
308 def _ctx_parents(_repo_id, _commit_id):
308 def _ctx_parents(_repo_id, _commit_id):
309 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
310 ctx = self._get_ctx(repo, commit_id)
310 ctx = self._get_ctx(repo, commit_id)
311 return [parent.hex() for parent in ctx.parents()
311 return [parent.hex() for parent in ctx.parents()
312 if not (parent.hidden() or parent.obsolete())]
312 if not (parent.hidden() or parent.obsolete())]
313
313
314 return _ctx_parents(repo_id, commit_id)
314 return _ctx_parents(repo_id, commit_id)
315
315
316 @reraise_safe_exceptions
316 @reraise_safe_exceptions
317 def ctx_children(self, wire, commit_id):
317 def ctx_children(self, wire, commit_id):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self.region(wire)
319 region = self._region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
320 @region.conditional_cache_on_arguments(condition=cache_on)
321 def _ctx_children(_repo_id, _commit_id):
321 def _ctx_children(_repo_id, _commit_id):
322 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
323 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
324 return [child.hex() for child in ctx.children()
324 return [child.hex() for child in ctx.children()
325 if not (child.hidden() or child.obsolete())]
325 if not (child.hidden() or child.obsolete())]
326
326
327 return _ctx_children(repo_id, commit_id)
327 return _ctx_children(repo_id, commit_id)
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def ctx_phase(self, wire, commit_id):
330 def ctx_phase(self, wire, commit_id):
331 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
332 region = self.region(wire)
332 region = self._region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
333 @region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
337 # public=0, draft=1, secret=3
337 # public=0, draft=1, secret=3
338 return ctx.phase()
338 return ctx.phase()
339 return _ctx_phase(context_uid, repo_id, commit_id)
339 return _ctx_phase(context_uid, repo_id, commit_id)
340
340
341 @reraise_safe_exceptions
341 @reraise_safe_exceptions
342 def ctx_obsolete(self, wire, commit_id):
342 def ctx_obsolete(self, wire, commit_id):
343 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
344 region = self.region(wire)
344 region = self._region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
345 @region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
348 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
349 return ctx.obsolete()
349 return ctx.obsolete()
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def ctx_hidden(self, wire, commit_id):
353 def ctx_hidden(self, wire, commit_id):
354 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 region = self.region(wire)
355 region = self._region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
356 @region.conditional_cache_on_arguments(condition=cache_on)
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
358 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
359 ctx = self._get_ctx(repo, commit_id)
359 ctx = self._get_ctx(repo, commit_id)
360 return ctx.hidden()
360 return ctx.hidden()
361 return _ctx_hidden(context_uid, repo_id, commit_id)
361 return _ctx_hidden(context_uid, repo_id, commit_id)
362
362
363 @reraise_safe_exceptions
363 @reraise_safe_exceptions
364 def ctx_substate(self, wire, revision):
364 def ctx_substate(self, wire, revision):
365 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
366 ctx = self._get_ctx(repo, revision)
366 ctx = self._get_ctx(repo, revision)
367 return ctx.substate
367 return ctx.substate
368
368
369 @reraise_safe_exceptions
369 @reraise_safe_exceptions
370 def ctx_status(self, wire, revision):
370 def ctx_status(self, wire, revision):
371 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
372 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
373 status = repo[ctx.p1().node()].status(other=ctx.node())
373 status = repo[ctx.p1().node()].status(other=ctx.node())
374 # object of status (odd, custom named tuple in mercurial) is not
374 # object of status (odd, custom named tuple in mercurial) is not
375 # correctly serializable, we make it a list, as the underling
375 # correctly serializable, we make it a list, as the underling
376 # API expects this to be a list
376 # API expects this to be a list
377 return list(status)
377 return list(status)
378
378
379 @reraise_safe_exceptions
379 @reraise_safe_exceptions
380 def ctx_user(self, wire, revision):
380 def ctx_user(self, wire, revision):
381 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
382 ctx = self._get_ctx(repo, revision)
382 ctx = self._get_ctx(repo, revision)
383 return ctx.user()
383 return ctx.user()
384
384
385 @reraise_safe_exceptions
385 @reraise_safe_exceptions
386 def check_url(self, url, config):
386 def check_url(self, url, config):
387 _proto = None
387 _proto = None
388 if '+' in url[:url.find('://')]:
388 if '+' in url[:url.find('://')]:
389 _proto = url[0:url.find('+')]
389 _proto = url[0:url.find('+')]
390 url = url[url.find('+') + 1:]
390 url = url[url.find('+') + 1:]
391 handlers = []
391 handlers = []
392 url_obj = url_parser(url)
392 url_obj = url_parser(url)
393 test_uri, authinfo = url_obj.authinfo()
393 test_uri, authinfo = url_obj.authinfo()
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
395 url_obj.query = obfuscate_qs(url_obj.query)
395 url_obj.query = obfuscate_qs(url_obj.query)
396
396
397 cleaned_uri = str(url_obj)
397 cleaned_uri = str(url_obj)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
399
399
400 if authinfo:
400 if authinfo:
401 # create a password manager
401 # create a password manager
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
403 passmgr.add_password(*authinfo)
403 passmgr.add_password(*authinfo)
404
404
405 handlers.extend((httpbasicauthhandler(passmgr),
405 handlers.extend((httpbasicauthhandler(passmgr),
406 httpdigestauthhandler(passmgr)))
406 httpdigestauthhandler(passmgr)))
407
407
408 o = urllib2.build_opener(*handlers)
408 o = urllib2.build_opener(*handlers)
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
410 ('Accept', 'application/mercurial-0.1')]
410 ('Accept', 'application/mercurial-0.1')]
411
411
412 q = {"cmd": 'between'}
412 q = {"cmd": 'between'}
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
414 qs = '?%s' % urllib.urlencode(q)
414 qs = '?%s' % urllib.urlencode(q)
415 cu = "%s%s" % (test_uri, qs)
415 cu = "%s%s" % (test_uri, qs)
416 req = urllib2.Request(cu, None, {})
416 req = urllib2.Request(cu, None, {})
417
417
418 try:
418 try:
419 log.debug("Trying to open URL %s", cleaned_uri)
419 log.debug("Trying to open URL %s", cleaned_uri)
420 resp = o.open(req)
420 resp = o.open(req)
421 if resp.code != 200:
421 if resp.code != 200:
422 raise exceptions.URLError()('Return Code is not 200')
422 raise exceptions.URLError()('Return Code is not 200')
423 except Exception as e:
423 except Exception as e:
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
425 # means it cannot be cloned
425 # means it cannot be cloned
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
427
427
428 # now check if it's a proper hg repo, but don't do it for svn
428 # now check if it's a proper hg repo, but don't do it for svn
429 try:
429 try:
430 if _proto == 'svn':
430 if _proto == 'svn':
431 pass
431 pass
432 else:
432 else:
433 # check for pure hg repos
433 # check for pure hg repos
434 log.debug(
434 log.debug(
435 "Verifying if URL is a Mercurial repository: %s",
435 "Verifying if URL is a Mercurial repository: %s",
436 cleaned_uri)
436 cleaned_uri)
437 ui = make_ui_from_config(config)
437 ui = make_ui_from_config(config)
438 peer_checker = makepeer(ui, url)
438 peer_checker = makepeer(ui, url)
439 peer_checker.lookup('tip')
439 peer_checker.lookup('tip')
440 except Exception as e:
440 except Exception as e:
441 log.warning("URL is not a valid Mercurial repository: %s",
441 log.warning("URL is not a valid Mercurial repository: %s",
442 cleaned_uri)
442 cleaned_uri)
443 raise exceptions.URLError(e)(
443 raise exceptions.URLError(e)(
444 "url [%s] does not look like an hg repo org_exc: %s"
444 "url [%s] does not look like an hg repo org_exc: %s"
445 % (cleaned_uri, e))
445 % (cleaned_uri, e))
446
446
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
448 return True
448 return True
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
452 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
453
453
454 if file_filter:
454 if file_filter:
455 match_filter = match(file_filter[0], '', [file_filter[1]])
455 match_filter = match(file_filter[0], '', [file_filter[1]])
456 else:
456 else:
457 match_filter = file_filter
457 match_filter = file_filter
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
459
459
460 try:
460 try:
461 return "".join(patch.diff(
461 return "".join(patch.diff(
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
463 except RepoLookupError as e:
463 except RepoLookupError as e:
464 raise exceptions.LookupException(e)()
464 raise exceptions.LookupException(e)()
465
465
466 @reraise_safe_exceptions
466 @reraise_safe_exceptions
467 def node_history(self, wire, revision, path, limit):
467 def node_history(self, wire, revision, path, limit):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
469 region = self.region(wire)
469 region = self._region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
470 @region.conditional_cache_on_arguments(condition=cache_on)
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
472 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
473
473
474 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
475 fctx = ctx.filectx(path)
475 fctx = ctx.filectx(path)
476
476
477 def history_iter():
477 def history_iter():
478 limit_rev = fctx.rev()
478 limit_rev = fctx.rev()
479 for obj in reversed(list(fctx.filelog())):
479 for obj in reversed(list(fctx.filelog())):
480 obj = fctx.filectx(obj)
480 obj = fctx.filectx(obj)
481 ctx = obj.changectx()
481 ctx = obj.changectx()
482 if ctx.hidden() or ctx.obsolete():
482 if ctx.hidden() or ctx.obsolete():
483 continue
483 continue
484
484
485 if limit_rev >= obj.rev():
485 if limit_rev >= obj.rev():
486 yield obj
486 yield obj
487
487
488 history = []
488 history = []
489 for cnt, obj in enumerate(history_iter()):
489 for cnt, obj in enumerate(history_iter()):
490 if limit and cnt >= limit:
490 if limit and cnt >= limit:
491 break
491 break
492 history.append(hex(obj.node()))
492 history.append(hex(obj.node()))
493
493
494 return [x for x in history]
494 return [x for x in history]
495 return _node_history(context_uid, repo_id, revision, path, limit)
495 return _node_history(context_uid, repo_id, revision, path, limit)
496
496
497 @reraise_safe_exceptions
497 @reraise_safe_exceptions
498 def node_history_untill(self, wire, revision, path, limit):
498 def node_history_untill(self, wire, revision, path, limit):
499 cache_on, context_uid, repo_id = self._cache_on(wire)
499 cache_on, context_uid, repo_id = self._cache_on(wire)
500 region = self.region(wire)
500 region = self._region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
501 @region.conditional_cache_on_arguments(condition=cache_on)
502 def _node_history_until(_context_uid, _repo_id):
502 def _node_history_until(_context_uid, _repo_id):
503 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
504 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
505 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
506
506
507 file_log = list(fctx.filelog())
507 file_log = list(fctx.filelog())
508 if limit:
508 if limit:
509 # Limit to the last n items
509 # Limit to the last n items
510 file_log = file_log[-limit:]
510 file_log = file_log[-limit:]
511
511
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
517 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
518 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
519 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
520
520
521 result = []
521 result = []
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 ln_no = i
523 ln_no = i
524 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
525 content = annotate_obj.text
525 content = annotate_obj.text
526 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
527 return result
527 return result
528
528
529 @reraise_safe_exceptions
529 @reraise_safe_exceptions
530 def fctx_node_data(self, wire, revision, path):
530 def fctx_node_data(self, wire, revision, path):
531 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
532 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
533 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
534 return fctx.data()
534 return fctx.data()
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def fctx_flags(self, wire, commit_id, path):
537 def fctx_flags(self, wire, commit_id, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self.region(wire)
539 region = self._region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
540 @region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 region = self.region(wire)
552 region = self._region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
553 @region.conditional_cache_on_arguments(condition=cache_on)
554 def _fctx_size(_repo_id, _revision, _path):
554 def _fctx_size(_repo_id, _revision, _path):
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556 ctx = self._get_ctx(repo, commit_id)
556 ctx = self._get_ctx(repo, commit_id)
557 fctx = ctx.filectx(path)
557 fctx = ctx.filectx(path)
558 return fctx.size()
558 return fctx.size()
559 return _fctx_size(repo_id, commit_id, path)
559 return _fctx_size(repo_id, commit_id, path)
560
560
561 @reraise_safe_exceptions
561 @reraise_safe_exceptions
562 def get_all_commit_ids(self, wire, name):
562 def get_all_commit_ids(self, wire, name):
563 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
564 region = self.region(wire)
564 region = self._region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
565 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
567 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
568 repo = repo.filtered(name)
568 repo = repo.filtered(name)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
570 return revs
570 return revs
571 return _get_all_commit_ids(context_uid, repo_id, name)
571 return _get_all_commit_ids(context_uid, repo_id, name)
572
572
573 @reraise_safe_exceptions
573 @reraise_safe_exceptions
574 def get_config_value(self, wire, section, name, untrusted=False):
574 def get_config_value(self, wire, section, name, untrusted=False):
575 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
576 return repo.ui.config(section, name, untrusted=untrusted)
576 return repo.ui.config(section, name, untrusted=untrusted)
577
577
578 @reraise_safe_exceptions
578 @reraise_safe_exceptions
579 def is_large_file(self, wire, commit_id, path):
579 def is_large_file(self, wire, commit_id, path):
580 cache_on, context_uid, repo_id = self._cache_on(wire)
580 cache_on, context_uid, repo_id = self._cache_on(wire)
581 region = self.region(wire)
581 region = self._region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
582 @region.conditional_cache_on_arguments(condition=cache_on)
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
584 return largefiles.lfutil.isstandin(path)
584 return largefiles.lfutil.isstandin(path)
585
585
586 return _is_large_file(context_uid, repo_id, commit_id, path)
586 return _is_large_file(context_uid, repo_id, commit_id, path)
587
587
588 @reraise_safe_exceptions
588 @reraise_safe_exceptions
589 def is_binary(self, wire, revision, path):
589 def is_binary(self, wire, revision, path):
590 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
591
591
592 region = self.region(wire)
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
593 @region.conditional_cache_on_arguments(condition=cache_on)
594 def _is_binary(_repo_id, _sha, _path):
594 def _is_binary(_repo_id, _sha, _path):
595 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
596 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
597 fctx = ctx.filectx(path)
597 fctx = ctx.filectx(path)
598 return fctx.isbinary()
598 return fctx.isbinary()
599
599
600 return _is_binary(repo_id, revision, path)
600 return _is_binary(repo_id, revision, path)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def in_largefiles_store(self, wire, sha):
603 def in_largefiles_store(self, wire, sha):
604 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
605 return largefiles.lfutil.instore(repo, sha)
605 return largefiles.lfutil.instore(repo, sha)
606
606
607 @reraise_safe_exceptions
607 @reraise_safe_exceptions
608 def in_user_cache(self, wire, sha):
608 def in_user_cache(self, wire, sha):
609 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
611
611
612 @reraise_safe_exceptions
612 @reraise_safe_exceptions
613 def store_path(self, wire, sha):
613 def store_path(self, wire, sha):
614 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
615 return largefiles.lfutil.storepath(repo, sha)
615 return largefiles.lfutil.storepath(repo, sha)
616
616
617 @reraise_safe_exceptions
617 @reraise_safe_exceptions
618 def link(self, wire, sha, path):
618 def link(self, wire, sha, path):
619 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
620 largefiles.lfutil.link(
620 largefiles.lfutil.link(
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
622
622
623 @reraise_safe_exceptions
623 @reraise_safe_exceptions
624 def localrepository(self, wire, create=False):
624 def localrepository(self, wire, create=False):
625 self._factory.repo(wire, create=create)
625 self._factory.repo(wire, create=create)
626
626
627 @reraise_safe_exceptions
627 @reraise_safe_exceptions
628 def lookup(self, wire, revision, both):
628 def lookup(self, wire, revision, both):
629 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
630
630
631 region = self.region(wire)
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
632 @region.conditional_cache_on_arguments(condition=cache_on)
633 def _lookup(_context_uid, _repo_id, _revision, _both):
633 def _lookup(_context_uid, _repo_id, _revision, _both):
634
634
635 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
636 rev = _revision
636 rev = _revision
637 if isinstance(rev, int):
637 if isinstance(rev, int):
638 # NOTE(marcink):
638 # NOTE(marcink):
639 # since Mercurial doesn't support negative indexes properly
639 # since Mercurial doesn't support negative indexes properly
640 # we need to shift accordingly by one to get proper index, e.g
640 # we need to shift accordingly by one to get proper index, e.g
641 # repo[-1] => repo[-2]
641 # repo[-1] => repo[-2]
642 # repo[0] => repo[-1]
642 # repo[0] => repo[-1]
643 if rev <= 0:
643 if rev <= 0:
644 rev = rev + -1
644 rev = rev + -1
645 try:
645 try:
646 ctx = self._get_ctx(repo, rev)
646 ctx = self._get_ctx(repo, rev)
647 except (TypeError, RepoLookupError) as e:
647 except (TypeError, RepoLookupError) as e:
648 e._org_exc_tb = traceback.format_exc()
648 e._org_exc_tb = traceback.format_exc()
649 raise exceptions.LookupException(e)(rev)
649 raise exceptions.LookupException(e)(rev)
650 except LookupError as e:
650 except LookupError as e:
651 e._org_exc_tb = traceback.format_exc()
651 e._org_exc_tb = traceback.format_exc()
652 raise exceptions.LookupException(e)(e.name)
652 raise exceptions.LookupException(e)(e.name)
653
653
654 if not both:
654 if not both:
655 return ctx.hex()
655 return ctx.hex()
656
656
657 ctx = repo[ctx.hex()]
657 ctx = repo[ctx.hex()]
658 return ctx.hex(), ctx.rev()
658 return ctx.hex(), ctx.rev()
659
659
660 return _lookup(context_uid, repo_id, revision, both)
660 return _lookup(context_uid, repo_id, revision, both)
661
661
662 @reraise_safe_exceptions
662 @reraise_safe_exceptions
663 def sync_push(self, wire, url):
663 def sync_push(self, wire, url):
664 if not self.check_url(url, wire['config']):
664 if not self.check_url(url, wire['config']):
665 return
665 return
666
666
667 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
668
668
669 # Disable any prompts for this repo
669 # Disable any prompts for this repo
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
671
671
672 bookmarks = dict(repo._bookmarks).keys()
672 bookmarks = dict(repo._bookmarks).keys()
673 remote = peer(repo, {}, url)
673 remote = peer(repo, {}, url)
674 # Disable any prompts for this remote
674 # Disable any prompts for this remote
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
676
676
677 return exchange.push(
677 return exchange.push(
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def revision(self, wire, rev):
681 def revision(self, wire, rev):
682 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
683 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
684 return ctx.rev()
684 return ctx.rev()
685
685
686 @reraise_safe_exceptions
686 @reraise_safe_exceptions
687 def rev_range(self, wire, commit_filter):
687 def rev_range(self, wire, commit_filter):
688 cache_on, context_uid, repo_id = self._cache_on(wire)
688 cache_on, context_uid, repo_id = self._cache_on(wire)
689
689
690 region = self.region(wire)
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
691 @region.conditional_cache_on_arguments(condition=cache_on)
692 def _rev_range(_context_uid, _repo_id, _filter):
692 def _rev_range(_context_uid, _repo_id, _filter):
693 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
694 revisions = [rev for rev in revrange(repo, commit_filter)]
694 revisions = [rev for rev in revrange(repo, commit_filter)]
695 return revisions
695 return revisions
696
696
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
698
698
699 @reraise_safe_exceptions
699 @reraise_safe_exceptions
700 def rev_range_hash(self, wire, node):
700 def rev_range_hash(self, wire, node):
701 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
702
702
703 def get_revs(repo, rev_opt):
703 def get_revs(repo, rev_opt):
704 if rev_opt:
704 if rev_opt:
705 revs = revrange(repo, rev_opt)
705 revs = revrange(repo, rev_opt)
706 if len(revs) == 0:
706 if len(revs) == 0:
707 return (nullrev, nullrev)
707 return (nullrev, nullrev)
708 return max(revs), min(revs)
708 return max(revs), min(revs)
709 else:
709 else:
710 return len(repo) - 1, 0
710 return len(repo) - 1, 0
711
711
712 stop, start = get_revs(repo, [node + ':'])
712 stop, start = get_revs(repo, [node + ':'])
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
714 return revs
714 return revs
715
715
716 @reraise_safe_exceptions
716 @reraise_safe_exceptions
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
718 other_path = kwargs.pop('other_path', None)
718 other_path = kwargs.pop('other_path', None)
719
719
720 # case when we want to compare two independent repositories
720 # case when we want to compare two independent repositories
721 if other_path and other_path != wire["path"]:
721 if other_path and other_path != wire["path"]:
722 baseui = self._factory._create_config(wire["config"])
722 baseui = self._factory._create_config(wire["config"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
724 else:
724 else:
725 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
726 return list(repo.revs(rev_spec, *args))
726 return list(repo.revs(rev_spec, *args))
727
727
728 @reraise_safe_exceptions
728 @reraise_safe_exceptions
729 def verify(self, wire,):
729 def verify(self, wire,):
730 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
731 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
732
732
733 baseui, output = patch_ui_message_output(baseui)
733 baseui, output = patch_ui_message_output(baseui)
734
734
735 repo.ui = baseui
735 repo.ui = baseui
736 verify.verify(repo)
736 verify.verify(repo)
737 return output.getvalue()
737 return output.getvalue()
738
738
739 @reraise_safe_exceptions
739 @reraise_safe_exceptions
740 def hg_update_cache(self, wire,):
740 def hg_update_cache(self, wire,):
741 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
742 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
743 baseui, output = patch_ui_message_output(baseui)
743 baseui, output = patch_ui_message_output(baseui)
744
744
745 repo.ui = baseui
745 repo.ui = baseui
746 with repo.wlock(), repo.lock():
746 with repo.wlock(), repo.lock():
747 repo.updatecaches(full=True)
747 repo.updatecaches(full=True)
748
748
749 return output.getvalue()
749 return output.getvalue()
750
750
751 @reraise_safe_exceptions
751 @reraise_safe_exceptions
752 def hg_rebuild_fn_cache(self, wire,):
752 def hg_rebuild_fn_cache(self, wire,):
753 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
754 baseui = self._factory._create_config(wire['config'])
754 baseui = self._factory._create_config(wire['config'])
755 baseui, output = patch_ui_message_output(baseui)
755 baseui, output = patch_ui_message_output(baseui)
756
756
757 repo.ui = baseui
757 repo.ui = baseui
758
758
759 repair.rebuildfncache(baseui, repo)
759 repair.rebuildfncache(baseui, repo)
760
760
761 return output.getvalue()
761 return output.getvalue()
762
762
763 @reraise_safe_exceptions
763 @reraise_safe_exceptions
764 def tags(self, wire):
764 def tags(self, wire):
765 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
766 region = self.region(wire)
766 region = self._region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
767 @region.conditional_cache_on_arguments(condition=cache_on)
768 def _tags(_context_uid, _repo_id):
768 def _tags(_context_uid, _repo_id):
769 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
770 return repo.tags()
770 return repo.tags()
771
771
772 return _tags(context_uid, repo_id)
772 return _tags(context_uid, repo_id)
773
773
774 @reraise_safe_exceptions
774 @reraise_safe_exceptions
775 def update(self, wire, node=None, clean=False):
775 def update(self, wire, node=None, clean=False):
776 repo = self._factory.repo(wire)
776 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
777 baseui = self._factory._create_config(wire['config'])
778 commands.update(baseui, repo, node=node, clean=clean)
778 commands.update(baseui, repo, node=node, clean=clean)
779
779
780 @reraise_safe_exceptions
780 @reraise_safe_exceptions
781 def identify(self, wire):
781 def identify(self, wire):
782 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
783 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
784 output = io.BytesIO()
784 output = io.BytesIO()
785 baseui.write = output.write
785 baseui.write = output.write
786 # This is required to get a full node id
786 # This is required to get a full node id
787 baseui.debugflag = True
787 baseui.debugflag = True
788 commands.identify(baseui, repo, id=True)
788 commands.identify(baseui, repo, id=True)
789
789
790 return output.getvalue()
790 return output.getvalue()
791
791
792 @reraise_safe_exceptions
792 @reraise_safe_exceptions
793 def heads(self, wire, branch=None):
793 def heads(self, wire, branch=None):
794 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
795 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
796 output = io.BytesIO()
796 output = io.BytesIO()
797
797
798 def write(data, **unused_kwargs):
798 def write(data, **unused_kwargs):
799 output.write(data)
799 output.write(data)
800
800
801 baseui.write = write
801 baseui.write = write
802 if branch:
802 if branch:
803 args = [branch]
803 args = [branch]
804 else:
804 else:
805 args = []
805 args = []
806 commands.heads(baseui, repo, template='{node} ', *args)
806 commands.heads(baseui, repo, template='{node} ', *args)
807
807
808 return output.getvalue()
808 return output.getvalue()
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def ancestor(self, wire, revision1, revision2):
811 def ancestor(self, wire, revision1, revision2):
812 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
813 changelog = repo.changelog
813 changelog = repo.changelog
814 lookup = repo.lookup
814 lookup = repo.lookup
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
816 return hex(a)
816 return hex(a)
817
817
818 @reraise_safe_exceptions
818 @reraise_safe_exceptions
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
822
822
823 @reraise_safe_exceptions
823 @reraise_safe_exceptions
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
825
825
826 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
827 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
828 publishing = baseui.configbool('phases', 'publish')
828 publishing = baseui.configbool('phases', 'publish')
829 if publishing:
829 if publishing:
830 new_commit = 'public'
830 new_commit = 'public'
831 else:
831 else:
832 new_commit = 'draft'
832 new_commit = 'draft'
833
833
834 def _filectxfn(_repo, ctx, path):
834 def _filectxfn(_repo, ctx, path):
835 """
835 """
836 Marks given path as added/changed/removed in a given _repo. This is
836 Marks given path as added/changed/removed in a given _repo. This is
837 for internal mercurial commit function.
837 for internal mercurial commit function.
838 """
838 """
839
839
840 # check if this path is removed
840 # check if this path is removed
841 if path in removed:
841 if path in removed:
842 # returning None is a way to mark node for removal
842 # returning None is a way to mark node for removal
843 return None
843 return None
844
844
845 # check if this path is added
845 # check if this path is added
846 for node in updated:
846 for node in updated:
847 if node['path'] == path:
847 if node['path'] == path:
848 return memfilectx(
848 return memfilectx(
849 _repo,
849 _repo,
850 changectx=ctx,
850 changectx=ctx,
851 path=node['path'],
851 path=node['path'],
852 data=node['content'],
852 data=node['content'],
853 islink=False,
853 islink=False,
854 isexec=bool(node['mode'] & stat.S_IXUSR),
854 isexec=bool(node['mode'] & stat.S_IXUSR),
855 copysource=False)
855 copysource=False)
856
856
857 raise exceptions.AbortException()(
857 raise exceptions.AbortException()(
858 "Given path haven't been marked as added, "
858 "Given path haven't been marked as added, "
859 "changed or removed (%s)" % path)
859 "changed or removed (%s)" % path)
860
860
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
862
862
863 commit_ctx = memctx(
863 commit_ctx = memctx(
864 repo=repo,
864 repo=repo,
865 parents=parents,
865 parents=parents,
866 text=message,
866 text=message,
867 files=files,
867 files=files,
868 filectxfn=_filectxfn,
868 filectxfn=_filectxfn,
869 user=user,
869 user=user,
870 date=(commit_time, commit_timezone),
870 date=(commit_time, commit_timezone),
871 extra=extra)
871 extra=extra)
872
872
873 n = repo.commitctx(commit_ctx)
873 n = repo.commitctx(commit_ctx)
874 new_id = hex(n)
874 new_id = hex(n)
875
875
876 return new_id
876 return new_id
877
877
878 @reraise_safe_exceptions
878 @reraise_safe_exceptions
879 def pull(self, wire, url, commit_ids=None):
879 def pull(self, wire, url, commit_ids=None):
880 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
881 # Disable any prompts for this repo
881 # Disable any prompts for this repo
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
883
883
884 remote = peer(repo, {}, url)
884 remote = peer(repo, {}, url)
885 # Disable any prompts for this remote
885 # Disable any prompts for this remote
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
887
887
888 if commit_ids:
888 if commit_ids:
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
890
890
891 return exchange.pull(
891 return exchange.pull(
892 repo, remote, heads=commit_ids, force=None).cgresult
892 repo, remote, heads=commit_ids, force=None).cgresult
893
893
894 @reraise_safe_exceptions
894 @reraise_safe_exceptions
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
896 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
898
898
899 # Mercurial internally has a lot of logic that checks ONLY if
899 # Mercurial internally has a lot of logic that checks ONLY if
900 # option is defined, we just pass those if they are defined then
900 # option is defined, we just pass those if they are defined then
901 opts = {}
901 opts = {}
902 if bookmark:
902 if bookmark:
903 opts['bookmark'] = bookmark
903 opts['bookmark'] = bookmark
904 if branch:
904 if branch:
905 opts['branch'] = branch
905 opts['branch'] = branch
906 if revision:
906 if revision:
907 opts['rev'] = revision
907 opts['rev'] = revision
908
908
909 commands.pull(baseui, repo, source, **opts)
909 commands.pull(baseui, repo, source, **opts)
910
910
911 @reraise_safe_exceptions
911 @reraise_safe_exceptions
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
913 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
916 new_branch=push_branches)
916 new_branch=push_branches)
917
917
918 @reraise_safe_exceptions
918 @reraise_safe_exceptions
919 def strip(self, wire, revision, update, backup):
919 def strip(self, wire, revision, update, backup):
920 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
921 ctx = self._get_ctx(repo, revision)
921 ctx = self._get_ctx(repo, revision)
922 hgext_strip(
922 hgext_strip(
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
924
924
925 @reraise_safe_exceptions
925 @reraise_safe_exceptions
926 def get_unresolved_files(self, wire):
926 def get_unresolved_files(self, wire):
927 repo = self._factory.repo(wire)
927 repo = self._factory.repo(wire)
928
928
929 log.debug('Calculating unresolved files for repo: %s', repo)
929 log.debug('Calculating unresolved files for repo: %s', repo)
930 output = io.BytesIO()
930 output = io.BytesIO()
931
931
932 def write(data, **unused_kwargs):
932 def write(data, **unused_kwargs):
933 output.write(data)
933 output.write(data)
934
934
935 baseui = self._factory._create_config(wire['config'])
935 baseui = self._factory._create_config(wire['config'])
936 baseui.write = write
936 baseui.write = write
937
937
938 commands.resolve(baseui, repo, list=True)
938 commands.resolve(baseui, repo, list=True)
939 unresolved = output.getvalue().splitlines(0)
939 unresolved = output.getvalue().splitlines(0)
940 return unresolved
940 return unresolved
941
941
942 @reraise_safe_exceptions
942 @reraise_safe_exceptions
943 def merge(self, wire, revision):
943 def merge(self, wire, revision):
944 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
945 baseui = self._factory._create_config(wire['config'])
945 baseui = self._factory._create_config(wire['config'])
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
947
947
948 # In case of sub repositories are used mercurial prompts the user in
948 # In case of sub repositories are used mercurial prompts the user in
949 # case of merge conflicts or different sub repository sources. By
949 # case of merge conflicts or different sub repository sources. By
950 # setting the interactive flag to `False` mercurial doesn't prompt the
950 # setting the interactive flag to `False` mercurial doesn't prompt the
951 # used but instead uses a default value.
951 # used but instead uses a default value.
952 repo.ui.setconfig('ui', 'interactive', False)
952 repo.ui.setconfig('ui', 'interactive', False)
953 commands.merge(baseui, repo, rev=revision)
953 commands.merge(baseui, repo, rev=revision)
954
954
955 @reraise_safe_exceptions
955 @reraise_safe_exceptions
956 def merge_state(self, wire):
956 def merge_state(self, wire):
957 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
959
959
960 # In case of sub repositories are used mercurial prompts the user in
960 # In case of sub repositories are used mercurial prompts the user in
961 # case of merge conflicts or different sub repository sources. By
961 # case of merge conflicts or different sub repository sources. By
962 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # setting the interactive flag to `False` mercurial doesn't prompt the
963 # used but instead uses a default value.
963 # used but instead uses a default value.
964 repo.ui.setconfig('ui', 'interactive', False)
964 repo.ui.setconfig('ui', 'interactive', False)
965 ms = hg_merge.mergestate(repo)
965 ms = hg_merge.mergestate(repo)
966 return [x for x in ms.unresolved()]
966 return [x for x in ms.unresolved()]
967
967
968 @reraise_safe_exceptions
968 @reraise_safe_exceptions
969 def commit(self, wire, message, username, close_branch=False):
969 def commit(self, wire, message, username, close_branch=False):
970 repo = self._factory.repo(wire)
970 repo = self._factory.repo(wire)
971 baseui = self._factory._create_config(wire['config'])
971 baseui = self._factory._create_config(wire['config'])
972 repo.ui.setconfig('ui', 'username', username)
972 repo.ui.setconfig('ui', 'username', username)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
974
974
975 @reraise_safe_exceptions
975 @reraise_safe_exceptions
976 def rebase(self, wire, source=None, dest=None, abort=False):
976 def rebase(self, wire, source=None, dest=None, abort=False):
977 repo = self._factory.repo(wire)
977 repo = self._factory.repo(wire)
978 baseui = self._factory._create_config(wire['config'])
978 baseui = self._factory._create_config(wire['config'])
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
980 # In case of sub repositories are used mercurial prompts the user in
980 # In case of sub repositories are used mercurial prompts the user in
981 # case of merge conflicts or different sub repository sources. By
981 # case of merge conflicts or different sub repository sources. By
982 # setting the interactive flag to `False` mercurial doesn't prompt the
982 # setting the interactive flag to `False` mercurial doesn't prompt the
983 # used but instead uses a default value.
983 # used but instead uses a default value.
984 repo.ui.setconfig('ui', 'interactive', False)
984 repo.ui.setconfig('ui', 'interactive', False)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
986
986
987 @reraise_safe_exceptions
987 @reraise_safe_exceptions
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
989 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
990 ctx = self._get_ctx(repo, revision)
990 ctx = self._get_ctx(repo, revision)
991 node = ctx.node()
991 node = ctx.node()
992
992
993 date = (tag_time, tag_timezone)
993 date = (tag_time, tag_timezone)
994 try:
994 try:
995 hg_tag.tag(repo, name, node, message, local, user, date)
995 hg_tag.tag(repo, name, node, message, local, user, date)
996 except Abort as e:
996 except Abort as e:
997 log.exception("Tag operation aborted")
997 log.exception("Tag operation aborted")
998 # Exception can contain unicode which we convert
998 # Exception can contain unicode which we convert
999 raise exceptions.AbortException(e)(repr(e))
999 raise exceptions.AbortException(e)(repr(e))
1000
1000
1001 @reraise_safe_exceptions
1001 @reraise_safe_exceptions
1002 def bookmark(self, wire, bookmark, revision=None):
1002 def bookmark(self, wire, bookmark, revision=None):
1003 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
1004 baseui = self._factory._create_config(wire['config'])
1004 baseui = self._factory._create_config(wire['config'])
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1006
1006
1007 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
1008 def install_hooks(self, wire, force=False):
1008 def install_hooks(self, wire, force=False):
1009 # we don't need any special hooks for Mercurial
1009 # we don't need any special hooks for Mercurial
1010 pass
1010 pass
1011
1011
1012 @reraise_safe_exceptions
1012 @reraise_safe_exceptions
1013 def get_hooks_info(self, wire):
1013 def get_hooks_info(self, wire):
1014 return {
1014 return {
1015 'pre_version': vcsserver.__version__,
1015 'pre_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
1017 }
1017 }
1018
1018
1019 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
1020 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1020 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1021 archive_dir_name, commit_id):
1021 archive_dir_name, commit_id):
1022
1022
1023 def file_walker(_commit_id, path):
1023 def file_walker(_commit_id, path):
1024 repo = self._factory.repo(wire)
1024 repo = self._factory.repo(wire)
1025 ctx = repo[_commit_id]
1025 ctx = repo[_commit_id]
1026 is_root = path in ['', '/']
1026 is_root = path in ['', '/']
1027 if is_root:
1027 if is_root:
1028 matcher = alwaysmatcher(badfn=None)
1028 matcher = alwaysmatcher(badfn=None)
1029 else:
1029 else:
1030 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1030 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1031 file_iter = ctx.manifest().walk(matcher)
1031 file_iter = ctx.manifest().walk(matcher)
1032
1032
1033 for fn in file_iter:
1033 for fn in file_iter:
1034 file_path = fn
1034 file_path = fn
1035 flags = ctx.flags(fn)
1035 flags = ctx.flags(fn)
1036 mode = b'x' in flags and 0o755 or 0o644
1036 mode = b'x' in flags and 0o755 or 0o644
1037 is_link = b'l' in flags
1037 is_link = b'l' in flags
1038
1038
1039 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1039 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1040
1040
1041 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1041 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1042 archive_dir_name, commit_id)
1042 archive_dir_name, commit_id)
1043
1043
@@ -1,862 +1,862 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 import time
22 import time
23 from urllib2 import URLError
23 from urllib2 import URLError
24 import urlparse
24 import urlparse
25 import logging
25 import logging
26 import posixpath as vcspath
26 import posixpath as vcspath
27 import StringIO
27 import StringIO
28 import urllib
28 import urllib
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.12'
55 current_compatible_version = '1.12'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110 # TODO: Remove once we do not use internal Mercurial objects anymore
110 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # for subversion
111 # for subversion
112 self._hg_factory = hg_factory
112 self._hg_factory = hg_factory
113
113
114 @reraise_safe_exceptions
114 @reraise_safe_exceptions
115 def discover_svn_version(self):
115 def discover_svn_version(self):
116 try:
116 try:
117 import svn.core
117 import svn.core
118 svn_ver = svn.core.SVN_VERSION
118 svn_ver = svn.core.SVN_VERSION
119 except ImportError:
119 except ImportError:
120 svn_ver = None
120 svn_ver = None
121 return svn_ver
121 return svn_ver
122
122
123 @reraise_safe_exceptions
123 @reraise_safe_exceptions
124 def is_empty(self, wire):
124 def is_empty(self, wire):
125
125
126 try:
126 try:
127 return self.lookup(wire, -1) == 0
127 return self.lookup(wire, -1) == 0
128 except Exception:
128 except Exception:
129 log.exception("failed to read object_store")
129 log.exception("failed to read object_store")
130 return False
130 return False
131
131
132 def check_url(self, url, config_items):
132 def check_url(self, url, config_items):
133 # this can throw exception if not installed, but we detect this
133 # this can throw exception if not installed, but we detect this
134 from hgsubversion import svnrepo
134 from hgsubversion import svnrepo
135
135
136 baseui = self._hg_factory._create_config(config_items)
136 baseui = self._hg_factory._create_config(config_items)
137 # uuid function get's only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
138 # throws exception
138 # throws exception
139 try:
139 try:
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
141 except Exception:
141 except Exception:
142 tb = traceback.format_exc()
142 tb = traceback.format_exc()
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 raise URLError(
144 raise URLError(
145 '"%s" is not a valid Subversion source url.' % (url, ))
145 '"%s" is not a valid Subversion source url.' % (url, ))
146 return True
146 return True
147
147
148 def is_path_valid_repository(self, wire, path):
148 def is_path_valid_repository(self, wire, path):
149
149
150 # NOTE(marcink): short circuit the check for SVN repo
150 # NOTE(marcink): short circuit the check for SVN repo
151 # the repos.open might be expensive to check, but we have one cheap
151 # the repos.open might be expensive to check, but we have one cheap
152 # pre condition that we can use, to check for 'format' file
152 # pre condition that we can use, to check for 'format' file
153
153
154 if not os.path.isfile(os.path.join(path, 'format')):
154 if not os.path.isfile(os.path.join(path, 'format')):
155 return False
155 return False
156
156
157 try:
157 try:
158 svn.repos.open(path)
158 svn.repos.open(path)
159 except svn.core.SubversionException:
159 except svn.core.SubversionException:
160 tb = traceback.format_exc()
160 tb = traceback.format_exc()
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
162 return False
162 return False
163 return True
163 return True
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def verify(self, wire,):
166 def verify(self, wire,):
167 repo_path = wire['path']
167 repo_path = wire['path']
168 if not self.is_path_valid_repository(wire, repo_path):
168 if not self.is_path_valid_repository(wire, repo_path):
169 raise Exception(
169 raise Exception(
170 "Path %s is not a valid Subversion repository." % repo_path)
170 "Path %s is not a valid Subversion repository." % repo_path)
171
171
172 cmd = ['svnadmin', 'info', repo_path]
172 cmd = ['svnadmin', 'info', repo_path]
173 stdout, stderr = subprocessio.run_command(cmd)
173 stdout, stderr = subprocessio.run_command(cmd)
174 return stdout
174 return stdout
175
175
176 def lookup(self, wire, revision):
176 def lookup(self, wire, revision):
177 if revision not in [-1, None, 'HEAD']:
177 if revision not in [-1, None, 'HEAD']:
178 raise NotImplementedError
178 raise NotImplementedError
179 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
180 fs_ptr = svn.repos.fs(repo)
180 fs_ptr = svn.repos.fs(repo)
181 head = svn.fs.youngest_rev(fs_ptr)
181 head = svn.fs.youngest_rev(fs_ptr)
182 return head
182 return head
183
183
184 def lookup_interval(self, wire, start_ts, end_ts):
184 def lookup_interval(self, wire, start_ts, end_ts):
185 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
186 fsobj = svn.repos.fs(repo)
186 fsobj = svn.repos.fs(repo)
187 start_rev = None
187 start_rev = None
188 end_rev = None
188 end_rev = None
189 if start_ts:
189 if start_ts:
190 start_ts_svn = apr_time_t(start_ts)
190 start_ts_svn = apr_time_t(start_ts)
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
192 else:
192 else:
193 start_rev = 1
193 start_rev = 1
194 if end_ts:
194 if end_ts:
195 end_ts_svn = apr_time_t(end_ts)
195 end_ts_svn = apr_time_t(end_ts)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
197 else:
197 else:
198 end_rev = svn.fs.youngest_rev(fsobj)
198 end_rev = svn.fs.youngest_rev(fsobj)
199 return start_rev, end_rev
199 return start_rev, end_rev
200
200
201 def revision_properties(self, wire, revision):
201 def revision_properties(self, wire, revision):
202
202
203 cache_on, context_uid, repo_id = self._cache_on(wire)
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 region = self.region(wire)
204 region = self._region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
205 @region.conditional_cache_on_arguments(condition=cache_on)
206 def _revision_properties(_repo_id, _revision):
206 def _revision_properties(_repo_id, _revision):
207 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
208 fs_ptr = svn.repos.fs(repo)
208 fs_ptr = svn.repos.fs(repo)
209 return svn.fs.revision_proplist(fs_ptr, revision)
209 return svn.fs.revision_proplist(fs_ptr, revision)
210 return _revision_properties(repo_id, revision)
210 return _revision_properties(repo_id, revision)
211
211
212 def revision_changes(self, wire, revision):
212 def revision_changes(self, wire, revision):
213
213
214 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
215 fsobj = svn.repos.fs(repo)
215 fsobj = svn.repos.fs(repo)
216 rev_root = svn.fs.revision_root(fsobj, revision)
216 rev_root = svn.fs.revision_root(fsobj, revision)
217
217
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
220 base_dir = ""
220 base_dir = ""
221 send_deltas = False
221 send_deltas = False
222 svn.repos.replay2(
222 svn.repos.replay2(
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
224 editor_ptr, editor_baton, None)
224 editor_ptr, editor_baton, None)
225
225
226 added = []
226 added = []
227 changed = []
227 changed = []
228 removed = []
228 removed = []
229
229
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
231 for path, change in editor.changes.iteritems():
231 for path, change in editor.changes.iteritems():
232 # TODO: Decide what to do with directory nodes. Subversion can add
232 # TODO: Decide what to do with directory nodes. Subversion can add
233 # empty directories.
233 # empty directories.
234
234
235 if change.item_kind == svn.core.svn_node_dir:
235 if change.item_kind == svn.core.svn_node_dir:
236 continue
236 continue
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
238 added.append(path)
238 added.append(path)
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
240 svn.repos.CHANGE_ACTION_REPLACE]:
240 svn.repos.CHANGE_ACTION_REPLACE]:
241 changed.append(path)
241 changed.append(path)
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
243 removed.append(path)
243 removed.append(path)
244 else:
244 else:
245 raise NotImplementedError(
245 raise NotImplementedError(
246 "Action %s not supported on path %s" % (
246 "Action %s not supported on path %s" % (
247 change.action, path))
247 change.action, path))
248
248
249 changes = {
249 changes = {
250 'added': added,
250 'added': added,
251 'changed': changed,
251 'changed': changed,
252 'removed': removed,
252 'removed': removed,
253 }
253 }
254 return changes
254 return changes
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def node_history(self, wire, path, revision, limit):
257 def node_history(self, wire, path, revision, limit):
258 cache_on, context_uid, repo_id = self._cache_on(wire)
258 cache_on, context_uid, repo_id = self._cache_on(wire)
259 region = self.region(wire)
259 region = self._region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
260 @region.conditional_cache_on_arguments(condition=cache_on)
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
262 cross_copies = False
262 cross_copies = False
263 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
264 fsobj = svn.repos.fs(repo)
264 fsobj = svn.repos.fs(repo)
265 rev_root = svn.fs.revision_root(fsobj, revision)
265 rev_root = svn.fs.revision_root(fsobj, revision)
266
266
267 history_revisions = []
267 history_revisions = []
268 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.node_history(rev_root, path)
269 history = svn.fs.history_prev(history, cross_copies)
269 history = svn.fs.history_prev(history, cross_copies)
270 while history:
270 while history:
271 __, node_revision = svn.fs.history_location(history)
271 __, node_revision = svn.fs.history_location(history)
272 history_revisions.append(node_revision)
272 history_revisions.append(node_revision)
273 if limit and len(history_revisions) >= limit:
273 if limit and len(history_revisions) >= limit:
274 break
274 break
275 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
276 return history_revisions
276 return history_revisions
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
278
278
279 def node_properties(self, wire, path, revision):
279 def node_properties(self, wire, path, revision):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self.region(wire)
281 region = self._region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
282 @region.conditional_cache_on_arguments(condition=cache_on)
283 def _node_properties(_repo_id, _path, _revision):
283 def _node_properties(_repo_id, _path, _revision):
284 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
285 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
286 rev_root = svn.fs.revision_root(fsobj, revision)
286 rev_root = svn.fs.revision_root(fsobj, revision)
287 return svn.fs.node_proplist(rev_root, path)
287 return svn.fs.node_proplist(rev_root, path)
288 return _node_properties(repo_id, path, revision)
288 return _node_properties(repo_id, path, revision)
289
289
290 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
291 abs_path = 'file://' + urllib.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
292 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
294
294
295 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
296 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
297 end_rev = peg_rev
297 end_rev = peg_rev
298
298
299 annotations = []
299 annotations = []
300
300
301 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
302 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
303
303
304 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
305 try:
305 try:
306 svn.client.blame2(
306 svn.client.blame2(
307 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
308 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
309 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
310 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
311 raise Exception(
311 raise Exception(
312 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
313 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
314
314
315 return annotations
315 return annotations
316
316
317 def get_node_type(self, wire, path, revision=None):
317 def get_node_type(self, wire, path, revision=None):
318
318
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self.region(wire)
320 region = self._region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
321 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _get_node_type(_repo_id, _path, _revision):
322 def _get_node_type(_repo_id, _path, _revision):
323 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
324 fs_ptr = svn.repos.fs(repo)
324 fs_ptr = svn.repos.fs(repo)
325 if _revision is None:
325 if _revision is None:
326 _revision = svn.fs.youngest_rev(fs_ptr)
326 _revision = svn.fs.youngest_rev(fs_ptr)
327 root = svn.fs.revision_root(fs_ptr, _revision)
327 root = svn.fs.revision_root(fs_ptr, _revision)
328 node = svn.fs.check_path(root, path)
328 node = svn.fs.check_path(root, path)
329 return NODE_TYPE_MAPPING.get(node, None)
329 return NODE_TYPE_MAPPING.get(node, None)
330 return _get_node_type(repo_id, path, revision)
330 return _get_node_type(repo_id, path, revision)
331
331
332 def get_nodes(self, wire, path, revision=None):
332 def get_nodes(self, wire, path, revision=None):
333
333
334 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
335 region = self.region(wire)
335 region = self._region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
336 @region.conditional_cache_on_arguments(condition=cache_on)
337 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
338 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
339 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
340 if _revision is None:
340 if _revision is None:
341 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
342 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
343 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
344 result = []
344 result = []
345 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
346 result.append(
346 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
348 return result
349 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
350
350
351 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
352 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
354 if rev is None:
354 if rev is None:
355 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
356 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 return content.read()
358 return content.read()
359
359
360 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
361
361
362 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
363 region = self.region(wire)
363 region = self._region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
364 @region.conditional_cache_on_arguments(condition=cache_on)
365 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
366 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
367 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
368 if _revision is None:
368 if _revision is None:
369 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
370 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
371 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
372 return size
372 return size
373 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
374
374
375 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
377 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
378 compatible_version=compatible_version)
378 compatible_version=compatible_version)
379
379
380 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
381 obj = urlparse.urlparse(src_url)
381 obj = urlparse.urlparse(src_url)
382 username = obj.username or None
382 username = obj.username or None
383 password = obj.password or None
383 password = obj.password or None
384 return username, password, src_url
384 return username, password, src_url
385
385
386 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
387 repo_path = wire['path']
387 repo_path = wire['path']
388 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
389 raise Exception(
389 raise Exception(
390 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
391
391
392 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
394 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
395 if username and password:
395 if username and password:
396 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
397 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
398
398
399 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
400 rdump_cmd,
400 rdump_cmd,
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
402 load = subprocess.Popen(
402 load = subprocess.Popen(
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
404
404
405 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
406 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
407 # import is done.
407 # import is done.
408 rdump.wait()
408 rdump.wait()
409 load.wait()
409 load.wait()
410
410
411 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
412 if rdump.returncode != 0:
412 if rdump.returncode != 0:
413 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
415 rdump.returncode, errors)
415 rdump.returncode, errors)
416 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
417 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
418 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
419
419
420 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
421 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
422 raise Exception(
422 raise Exception(
423 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
424 src_url, reason))
424 src_url, reason))
425 if load.returncode != 0:
425 if load.returncode != 0:
426 raise Exception(
426 raise Exception(
427 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
428 (src_url, ))
428 (src_url, ))
429
429
430 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
431 assert isinstance(message, str)
431 assert isinstance(message, str)
432 assert isinstance(author, str)
432 assert isinstance(author, str)
433
433
434 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
435 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
436
436
437 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
439 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
440
440
441 for node in updated:
441 for node in updated:
442 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
443 for node in removed:
443 for node in removed:
444 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
445
445
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
447
447
448 if timestamp:
448 if timestamp:
449 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
452
452
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
454 return commit_id
454 return commit_id
455
455
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
457 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
458
458
459 wire.update(cache=False)
459 wire.update(cache=False)
460 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
461 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
463 try:
463 try:
464 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
465 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
466 log.exception(
466 log.exception(
467 "Error during diff operation operation. "
467 "Error during diff operation operation. "
468 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
469 return ""
469 return ""
470
470
471 @reraise_safe_exceptions
471 @reraise_safe_exceptions
472 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
473 return False
473 return False
474
474
475 @reraise_safe_exceptions
475 @reraise_safe_exceptions
476 def is_binary(self, wire, rev, path):
476 def is_binary(self, wire, rev, path):
477 cache_on, context_uid, repo_id = self._cache_on(wire)
477 cache_on, context_uid, repo_id = self._cache_on(wire)
478
478
479 region = self.region(wire)
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
480 @region.conditional_cache_on_arguments(condition=cache_on)
481 def _is_binary(_repo_id, _rev, _path):
481 def _is_binary(_repo_id, _rev, _path):
482 raw_bytes = self.get_file_content(wire, path, rev)
482 raw_bytes = self.get_file_content(wire, path, rev)
483 return raw_bytes and '\0' in raw_bytes
483 return raw_bytes and '\0' in raw_bytes
484
484
485 return _is_binary(repo_id, rev, path)
485 return _is_binary(repo_id, rev, path)
486
486
487 @reraise_safe_exceptions
487 @reraise_safe_exceptions
488 def run_svn_command(self, wire, cmd, **opts):
488 def run_svn_command(self, wire, cmd, **opts):
489 path = wire.get('path', None)
489 path = wire.get('path', None)
490
490
491 if path and os.path.isdir(path):
491 if path and os.path.isdir(path):
492 opts['cwd'] = path
492 opts['cwd'] = path
493
493
494 safe_call = opts.pop('_safe', False)
494 safe_call = opts.pop('_safe', False)
495
495
496 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
497 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
498
498
499 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
500
500
501 try:
501 try:
502 _opts.update(opts)
502 _opts.update(opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
504
504
505 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
506 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
507 if safe_call:
507 if safe_call:
508 return '', safe_str(err).strip()
508 return '', safe_str(err).strip()
509 else:
509 else:
510 cmd = ' '.join(cmd) # human friendly CMD
510 cmd = ' '.join(cmd) # human friendly CMD
511 tb_err = ("Couldn't run svn command (%s).\n"
511 tb_err = ("Couldn't run svn command (%s).\n"
512 "Original error was:%s\n"
512 "Original error was:%s\n"
513 "Call options:%s\n"
513 "Call options:%s\n"
514 % (cmd, err, _opts))
514 % (cmd, err, _opts))
515 log.exception(tb_err)
515 log.exception(tb_err)
516 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
520 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
521 repo_path = wire['path']
521 repo_path = wire['path']
522 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
523 executable = None
523 executable = None
524 if binary_dir:
524 if binary_dir:
525 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
526 return install_svn_hooks(
526 return install_svn_hooks(
527 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
528
528
529 @reraise_safe_exceptions
529 @reraise_safe_exceptions
530 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
531 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
532 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
533 repo_path = wire['path']
533 repo_path = wire['path']
534 return {
534 return {
535 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
537 }
537 }
538
538
539 @reraise_safe_exceptions
539 @reraise_safe_exceptions
540 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
540 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
541 archive_dir_name, commit_id):
541 archive_dir_name, commit_id):
542
542
543 def walk_tree(root, root_dir, _commit_id):
543 def walk_tree(root, root_dir, _commit_id):
544 """
544 """
545 Special recursive svn repo walker
545 Special recursive svn repo walker
546 """
546 """
547
547
548 filemode_default = 0o100644
548 filemode_default = 0o100644
549 filemode_executable = 0o100755
549 filemode_executable = 0o100755
550
550
551 file_iter = svn.fs.dir_entries(root, root_dir)
551 file_iter = svn.fs.dir_entries(root, root_dir)
552 for f_name in file_iter:
552 for f_name in file_iter:
553 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
553 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
554
554
555 if f_type == 'dir':
555 if f_type == 'dir':
556 # return only DIR, and then all entries in that dir
556 # return only DIR, and then all entries in that dir
557 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
557 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
558 new_root = os.path.join(root_dir, f_name)
558 new_root = os.path.join(root_dir, f_name)
559 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
559 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
560 yield _f_name, _f_data, _f_type
560 yield _f_name, _f_data, _f_type
561 else:
561 else:
562 f_path = os.path.join(root_dir, f_name).rstrip('/')
562 f_path = os.path.join(root_dir, f_name).rstrip('/')
563 prop_list = svn.fs.node_proplist(root, f_path)
563 prop_list = svn.fs.node_proplist(root, f_path)
564
564
565 f_mode = filemode_default
565 f_mode = filemode_default
566 if prop_list.get('svn:executable'):
566 if prop_list.get('svn:executable'):
567 f_mode = filemode_executable
567 f_mode = filemode_executable
568
568
569 f_is_link = False
569 f_is_link = False
570 if prop_list.get('svn:special'):
570 if prop_list.get('svn:special'):
571 f_is_link = True
571 f_is_link = True
572
572
573 data = {
573 data = {
574 'is_link': f_is_link,
574 'is_link': f_is_link,
575 'mode': f_mode,
575 'mode': f_mode,
576 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
576 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
577 }
577 }
578
578
579 yield f_path, data, f_type
579 yield f_path, data, f_type
580
580
581 def file_walker(_commit_id, path):
581 def file_walker(_commit_id, path):
582 repo = self._factory.repo(wire)
582 repo = self._factory.repo(wire)
583 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
583 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
584
584
585 def no_content():
585 def no_content():
586 raise NoContentException()
586 raise NoContentException()
587
587
588 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
588 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
589 file_path = f_name
589 file_path = f_name
590
590
591 if f_type == 'dir':
591 if f_type == 'dir':
592 mode = f_data['mode']
592 mode = f_data['mode']
593 yield ArchiveNode(file_path, mode, False, no_content)
593 yield ArchiveNode(file_path, mode, False, no_content)
594 else:
594 else:
595 mode = f_data['mode']
595 mode = f_data['mode']
596 is_link = f_data['is_link']
596 is_link = f_data['is_link']
597 data_stream = f_data['content_stream']
597 data_stream = f_data['content_stream']
598 yield ArchiveNode(file_path, mode, is_link, data_stream)
598 yield ArchiveNode(file_path, mode, is_link, data_stream)
599
599
600 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
600 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
601 archive_dir_name, commit_id)
601 archive_dir_name, commit_id)
602
602
603
603
604 class SvnDiffer(object):
604 class SvnDiffer(object):
605 """
605 """
606 Utility to create diffs based on difflib and the Subversion api
606 Utility to create diffs based on difflib and the Subversion api
607 """
607 """
608
608
609 binary_content = False
609 binary_content = False
610
610
611 def __init__(
611 def __init__(
612 self, repo, src_rev, src_path, tgt_rev, tgt_path,
612 self, repo, src_rev, src_path, tgt_rev, tgt_path,
613 ignore_whitespace, context):
613 ignore_whitespace, context):
614 self.repo = repo
614 self.repo = repo
615 self.ignore_whitespace = ignore_whitespace
615 self.ignore_whitespace = ignore_whitespace
616 self.context = context
616 self.context = context
617
617
618 fsobj = svn.repos.fs(repo)
618 fsobj = svn.repos.fs(repo)
619
619
620 self.tgt_rev = tgt_rev
620 self.tgt_rev = tgt_rev
621 self.tgt_path = tgt_path or ''
621 self.tgt_path = tgt_path or ''
622 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
622 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
623 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
623 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
624
624
625 self.src_rev = src_rev
625 self.src_rev = src_rev
626 self.src_path = src_path or self.tgt_path
626 self.src_path = src_path or self.tgt_path
627 self.src_root = svn.fs.revision_root(fsobj, src_rev)
627 self.src_root = svn.fs.revision_root(fsobj, src_rev)
628 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
628 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
629
629
630 self._validate()
630 self._validate()
631
631
632 def _validate(self):
632 def _validate(self):
633 if (self.tgt_kind != svn.core.svn_node_none and
633 if (self.tgt_kind != svn.core.svn_node_none and
634 self.src_kind != svn.core.svn_node_none and
634 self.src_kind != svn.core.svn_node_none and
635 self.src_kind != self.tgt_kind):
635 self.src_kind != self.tgt_kind):
636 # TODO: johbo: proper error handling
636 # TODO: johbo: proper error handling
637 raise Exception(
637 raise Exception(
638 "Source and target are not compatible for diff generation. "
638 "Source and target are not compatible for diff generation. "
639 "Source type: %s, target type: %s" %
639 "Source type: %s, target type: %s" %
640 (self.src_kind, self.tgt_kind))
640 (self.src_kind, self.tgt_kind))
641
641
642 def generate_diff(self):
642 def generate_diff(self):
643 buf = StringIO.StringIO()
643 buf = StringIO.StringIO()
644 if self.tgt_kind == svn.core.svn_node_dir:
644 if self.tgt_kind == svn.core.svn_node_dir:
645 self._generate_dir_diff(buf)
645 self._generate_dir_diff(buf)
646 else:
646 else:
647 self._generate_file_diff(buf)
647 self._generate_file_diff(buf)
648 return buf.getvalue()
648 return buf.getvalue()
649
649
650 def _generate_dir_diff(self, buf):
650 def _generate_dir_diff(self, buf):
651 editor = DiffChangeEditor()
651 editor = DiffChangeEditor()
652 editor_ptr, editor_baton = svn.delta.make_editor(editor)
652 editor_ptr, editor_baton = svn.delta.make_editor(editor)
653 svn.repos.dir_delta2(
653 svn.repos.dir_delta2(
654 self.src_root,
654 self.src_root,
655 self.src_path,
655 self.src_path,
656 '', # src_entry
656 '', # src_entry
657 self.tgt_root,
657 self.tgt_root,
658 self.tgt_path,
658 self.tgt_path,
659 editor_ptr, editor_baton,
659 editor_ptr, editor_baton,
660 authorization_callback_allow_all,
660 authorization_callback_allow_all,
661 False, # text_deltas
661 False, # text_deltas
662 svn.core.svn_depth_infinity, # depth
662 svn.core.svn_depth_infinity, # depth
663 False, # entry_props
663 False, # entry_props
664 False, # ignore_ancestry
664 False, # ignore_ancestry
665 )
665 )
666
666
667 for path, __, change in sorted(editor.changes):
667 for path, __, change in sorted(editor.changes):
668 self._generate_node_diff(
668 self._generate_node_diff(
669 buf, change, path, self.tgt_path, path, self.src_path)
669 buf, change, path, self.tgt_path, path, self.src_path)
670
670
671 def _generate_file_diff(self, buf):
671 def _generate_file_diff(self, buf):
672 change = None
672 change = None
673 if self.src_kind == svn.core.svn_node_none:
673 if self.src_kind == svn.core.svn_node_none:
674 change = "add"
674 change = "add"
675 elif self.tgt_kind == svn.core.svn_node_none:
675 elif self.tgt_kind == svn.core.svn_node_none:
676 change = "delete"
676 change = "delete"
677 tgt_base, tgt_path = vcspath.split(self.tgt_path)
677 tgt_base, tgt_path = vcspath.split(self.tgt_path)
678 src_base, src_path = vcspath.split(self.src_path)
678 src_base, src_path = vcspath.split(self.src_path)
679 self._generate_node_diff(
679 self._generate_node_diff(
680 buf, change, tgt_path, tgt_base, src_path, src_base)
680 buf, change, tgt_path, tgt_base, src_path, src_base)
681
681
682 def _generate_node_diff(
682 def _generate_node_diff(
683 self, buf, change, tgt_path, tgt_base, src_path, src_base):
683 self, buf, change, tgt_path, tgt_base, src_path, src_base):
684
684
685 if self.src_rev == self.tgt_rev and tgt_base == src_base:
685 if self.src_rev == self.tgt_rev and tgt_base == src_base:
686 # makes consistent behaviour with git/hg to return empty diff if
686 # makes consistent behaviour with git/hg to return empty diff if
687 # we compare same revisions
687 # we compare same revisions
688 return
688 return
689
689
690 tgt_full_path = vcspath.join(tgt_base, tgt_path)
690 tgt_full_path = vcspath.join(tgt_base, tgt_path)
691 src_full_path = vcspath.join(src_base, src_path)
691 src_full_path = vcspath.join(src_base, src_path)
692
692
693 self.binary_content = False
693 self.binary_content = False
694 mime_type = self._get_mime_type(tgt_full_path)
694 mime_type = self._get_mime_type(tgt_full_path)
695
695
696 if mime_type and not mime_type.startswith('text'):
696 if mime_type and not mime_type.startswith('text'):
697 self.binary_content = True
697 self.binary_content = True
698 buf.write("=" * 67 + '\n')
698 buf.write("=" * 67 + '\n')
699 buf.write("Cannot display: file marked as a binary type.\n")
699 buf.write("Cannot display: file marked as a binary type.\n")
700 buf.write("svn:mime-type = %s\n" % mime_type)
700 buf.write("svn:mime-type = %s\n" % mime_type)
701 buf.write("Index: %s\n" % (tgt_path, ))
701 buf.write("Index: %s\n" % (tgt_path, ))
702 buf.write("=" * 67 + '\n')
702 buf.write("=" * 67 + '\n')
703 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
703 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
704 'tgt_path': tgt_path})
704 'tgt_path': tgt_path})
705
705
706 if change == 'add':
706 if change == 'add':
707 # TODO: johbo: SVN is missing a zero here compared to git
707 # TODO: johbo: SVN is missing a zero here compared to git
708 buf.write("new file mode 10644\n")
708 buf.write("new file mode 10644\n")
709
709
710 #TODO(marcink): intro to binary detection of svn patches
710 #TODO(marcink): intro to binary detection of svn patches
711 # if self.binary_content:
711 # if self.binary_content:
712 # buf.write('GIT binary patch\n')
712 # buf.write('GIT binary patch\n')
713
713
714 buf.write("--- /dev/null\t(revision 0)\n")
714 buf.write("--- /dev/null\t(revision 0)\n")
715 src_lines = []
715 src_lines = []
716 else:
716 else:
717 if change == 'delete':
717 if change == 'delete':
718 buf.write("deleted file mode 10644\n")
718 buf.write("deleted file mode 10644\n")
719
719
720 #TODO(marcink): intro to binary detection of svn patches
720 #TODO(marcink): intro to binary detection of svn patches
721 # if self.binary_content:
721 # if self.binary_content:
722 # buf.write('GIT binary patch\n')
722 # buf.write('GIT binary patch\n')
723
723
724 buf.write("--- a/%s\t(revision %s)\n" % (
724 buf.write("--- a/%s\t(revision %s)\n" % (
725 src_path, self.src_rev))
725 src_path, self.src_rev))
726 src_lines = self._svn_readlines(self.src_root, src_full_path)
726 src_lines = self._svn_readlines(self.src_root, src_full_path)
727
727
728 if change == 'delete':
728 if change == 'delete':
729 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
729 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
730 tgt_lines = []
730 tgt_lines = []
731 else:
731 else:
732 buf.write("+++ b/%s\t(revision %s)\n" % (
732 buf.write("+++ b/%s\t(revision %s)\n" % (
733 tgt_path, self.tgt_rev))
733 tgt_path, self.tgt_rev))
734 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
734 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
735
735
736 if not self.binary_content:
736 if not self.binary_content:
737 udiff = svn_diff.unified_diff(
737 udiff = svn_diff.unified_diff(
738 src_lines, tgt_lines, context=self.context,
738 src_lines, tgt_lines, context=self.context,
739 ignore_blank_lines=self.ignore_whitespace,
739 ignore_blank_lines=self.ignore_whitespace,
740 ignore_case=False,
740 ignore_case=False,
741 ignore_space_changes=self.ignore_whitespace)
741 ignore_space_changes=self.ignore_whitespace)
742 buf.writelines(udiff)
742 buf.writelines(udiff)
743
743
744 def _get_mime_type(self, path):
744 def _get_mime_type(self, path):
745 try:
745 try:
746 mime_type = svn.fs.node_prop(
746 mime_type = svn.fs.node_prop(
747 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
747 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
748 except svn.core.SubversionException:
748 except svn.core.SubversionException:
749 mime_type = svn.fs.node_prop(
749 mime_type = svn.fs.node_prop(
750 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
750 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 return mime_type
751 return mime_type
752
752
753 def _svn_readlines(self, fs_root, node_path):
753 def _svn_readlines(self, fs_root, node_path):
754 if self.binary_content:
754 if self.binary_content:
755 return []
755 return []
756 node_kind = svn.fs.check_path(fs_root, node_path)
756 node_kind = svn.fs.check_path(fs_root, node_path)
757 if node_kind not in (
757 if node_kind not in (
758 svn.core.svn_node_file, svn.core.svn_node_symlink):
758 svn.core.svn_node_file, svn.core.svn_node_symlink):
759 return []
759 return []
760 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
760 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
761 return content.splitlines(True)
761 return content.splitlines(True)
762
762
763
763
764 class DiffChangeEditor(svn.delta.Editor):
764 class DiffChangeEditor(svn.delta.Editor):
765 """
765 """
766 Records changes between two given revisions
766 Records changes between two given revisions
767 """
767 """
768
768
769 def __init__(self):
769 def __init__(self):
770 self.changes = []
770 self.changes = []
771
771
772 def delete_entry(self, path, revision, parent_baton, pool=None):
772 def delete_entry(self, path, revision, parent_baton, pool=None):
773 self.changes.append((path, None, 'delete'))
773 self.changes.append((path, None, 'delete'))
774
774
775 def add_file(
775 def add_file(
776 self, path, parent_baton, copyfrom_path, copyfrom_revision,
776 self, path, parent_baton, copyfrom_path, copyfrom_revision,
777 file_pool=None):
777 file_pool=None):
778 self.changes.append((path, 'file', 'add'))
778 self.changes.append((path, 'file', 'add'))
779
779
780 def open_file(self, path, parent_baton, base_revision, file_pool=None):
780 def open_file(self, path, parent_baton, base_revision, file_pool=None):
781 self.changes.append((path, 'file', 'change'))
781 self.changes.append((path, 'file', 'change'))
782
782
783
783
784 def authorization_callback_allow_all(root, path, pool):
784 def authorization_callback_allow_all(root, path, pool):
785 return True
785 return True
786
786
787
787
788 class TxnNodeProcessor(object):
788 class TxnNodeProcessor(object):
789 """
789 """
790 Utility to process the change of one node within a transaction root.
790 Utility to process the change of one node within a transaction root.
791
791
792 It encapsulates the knowledge of how to add, update or remove
792 It encapsulates the knowledge of how to add, update or remove
793 a node for a given transaction root. The purpose is to support the method
793 a node for a given transaction root. The purpose is to support the method
794 `SvnRemote.commit`.
794 `SvnRemote.commit`.
795 """
795 """
796
796
797 def __init__(self, node, txn_root):
797 def __init__(self, node, txn_root):
798 assert isinstance(node['path'], str)
798 assert isinstance(node['path'], str)
799
799
800 self.node = node
800 self.node = node
801 self.txn_root = txn_root
801 self.txn_root = txn_root
802
802
803 def update(self):
803 def update(self):
804 self._ensure_parent_dirs()
804 self._ensure_parent_dirs()
805 self._add_file_if_node_does_not_exist()
805 self._add_file_if_node_does_not_exist()
806 self._update_file_content()
806 self._update_file_content()
807 self._update_file_properties()
807 self._update_file_properties()
808
808
809 def remove(self):
809 def remove(self):
810 svn.fs.delete(self.txn_root, self.node['path'])
810 svn.fs.delete(self.txn_root, self.node['path'])
811 # TODO: Clean up directory if empty
811 # TODO: Clean up directory if empty
812
812
813 def _ensure_parent_dirs(self):
813 def _ensure_parent_dirs(self):
814 curdir = vcspath.dirname(self.node['path'])
814 curdir = vcspath.dirname(self.node['path'])
815 dirs_to_create = []
815 dirs_to_create = []
816 while not self._svn_path_exists(curdir):
816 while not self._svn_path_exists(curdir):
817 dirs_to_create.append(curdir)
817 dirs_to_create.append(curdir)
818 curdir = vcspath.dirname(curdir)
818 curdir = vcspath.dirname(curdir)
819
819
820 for curdir in reversed(dirs_to_create):
820 for curdir in reversed(dirs_to_create):
821 log.debug('Creating missing directory "%s"', curdir)
821 log.debug('Creating missing directory "%s"', curdir)
822 svn.fs.make_dir(self.txn_root, curdir)
822 svn.fs.make_dir(self.txn_root, curdir)
823
823
824 def _svn_path_exists(self, path):
824 def _svn_path_exists(self, path):
825 path_status = svn.fs.check_path(self.txn_root, path)
825 path_status = svn.fs.check_path(self.txn_root, path)
826 return path_status != svn.core.svn_node_none
826 return path_status != svn.core.svn_node_none
827
827
828 def _add_file_if_node_does_not_exist(self):
828 def _add_file_if_node_does_not_exist(self):
829 kind = svn.fs.check_path(self.txn_root, self.node['path'])
829 kind = svn.fs.check_path(self.txn_root, self.node['path'])
830 if kind == svn.core.svn_node_none:
830 if kind == svn.core.svn_node_none:
831 svn.fs.make_file(self.txn_root, self.node['path'])
831 svn.fs.make_file(self.txn_root, self.node['path'])
832
832
833 def _update_file_content(self):
833 def _update_file_content(self):
834 assert isinstance(self.node['content'], str)
834 assert isinstance(self.node['content'], str)
835 handler, baton = svn.fs.apply_textdelta(
835 handler, baton = svn.fs.apply_textdelta(
836 self.txn_root, self.node['path'], None, None)
836 self.txn_root, self.node['path'], None, None)
837 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
837 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
838
838
839 def _update_file_properties(self):
839 def _update_file_properties(self):
840 properties = self.node.get('properties', {})
840 properties = self.node.get('properties', {})
841 for key, value in properties.iteritems():
841 for key, value in properties.iteritems():
842 svn.fs.change_node_prop(
842 svn.fs.change_node_prop(
843 self.txn_root, self.node['path'], key, value)
843 self.txn_root, self.node['path'], key, value)
844
844
845
845
846 def apr_time_t(timestamp):
846 def apr_time_t(timestamp):
847 """
847 """
848 Convert a Python timestamp into APR timestamp type apr_time_t
848 Convert a Python timestamp into APR timestamp type apr_time_t
849 """
849 """
850 return timestamp * 1E6
850 return timestamp * 1E6
851
851
852
852
853 def svn_opt_revision_value_t(num):
853 def svn_opt_revision_value_t(num):
854 """
854 """
855 Put `num` into a `svn_opt_revision_value_t` structure.
855 Put `num` into a `svn_opt_revision_value_t` structure.
856 """
856 """
857 value = svn.core.svn_opt_revision_value_t()
857 value = svn.core.svn_opt_revision_value_t()
858 value.number = num
858 value.number = num
859 revision = svn.core.svn_opt_revision_t()
859 revision = svn.core.svn_opt_revision_t()
860 revision.kind = svn.core.svn_opt_revision_number
860 revision.kind = svn.core.svn_opt_revision_number
861 revision.value = value
861 revision.value = value
862 return revision
862 return revision
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire={}, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102
102
103 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
104
105 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 factory = Mock()
106 factory = Mock()
107 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
108
108
109 def fake_function():
109 def fake_function():
110 return None
110 return None
111
111
112 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
113
113
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 for method_name, method in methods:
115 for method_name, method in methods:
116 if not method_name.startswith('_'):
116 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
117 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
118
118
119 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 ])
126 ])
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
129 def fake_method():
129 def fake_method():
130 raise side_effect
130 raise side_effect
131
131
132 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
133 fake_method()
133 fake_method()
134 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
135 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
136
136
137
137
138 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
139 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 del repo
144 del repo
145 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
146
146
147
147
148 class TestGitFactory(object):
148 class TestGitFactory(object):
149 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
150
150
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
153 factory = git.GitFactory()
153 factory = git.GitFactory()
154 wire = {
154 wire = {
155 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
156 }
156 }
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 with isdir_patcher:
158 with isdir_patcher:
159 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
160 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestDiff(object):
29 class TestDiff(object):
30 def test_raising_safe_exception_when_lookup_failed(self):
30 def test_raising_safe_exception_when_lookup_failed(self):
31
31
32 factory = Mock()
32 factory = Mock()
33 hg_remote = hg.HgRemote(factory)
33 hg_remote = hg.HgRemote(factory)
34 with patch('mercurial.patch.diff') as diff_mock:
34 with patch('mercurial.patch.diff') as diff_mock:
35 diff_mock.side_effect = LookupError(
35 diff_mock.side_effect = LookupError(
36 'deadbeef', 'index', 'message')
36 'deadbeef', 'index', 'message')
37 with pytest.raises(Exception) as exc_info:
37 with pytest.raises(Exception) as exc_info:
38 hg_remote.diff(
38 hg_remote.diff(
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 file_filter=None, opt_git=True, opt_ignorews=True,
40 file_filter=None, opt_git=True, opt_ignorews=True,
41 context=3)
41 context=3)
42 assert type(exc_info.value) == Exception
42 assert type(exc_info.value) == Exception
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44
44
45
45
46 class TestReraiseSafeExceptions(object):
46 class TestReraiseSafeExceptions(object):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 factory = Mock()
48 factory = Mock()
49 hg_remote = hg.HgRemote(factory)
49 hg_remote = hg.HgRemote(factory)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 decorator = hg.reraise_safe_exceptions(None)
51 decorator = hg.reraise_safe_exceptions(None)
52 for method_name, method in methods:
52 for method_name, method in methods:
53 if not method_name.startswith('_'):
53 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 assert method.im_func.__code__ == decorator.__code__
54 assert method.im_func.__code__ == decorator.__code__
55
55
56 @pytest.mark.parametrize('side_effect, expected_type', [
56 @pytest.mark.parametrize('side_effect, expected_type', [
57 (hgcompat.Abort(), 'abort'),
57 (hgcompat.Abort(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
59 (hgcompat.RepoLookupError(), 'lookup'),
59 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.RepoError(), 'error'),
61 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RequirementError(), 'requirement'),
62 (hgcompat.RequirementError(), 'requirement'),
63 ])
63 ])
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 @hg.reraise_safe_exceptions
65 @hg.reraise_safe_exceptions
66 def fake_method():
66 def fake_method():
67 raise side_effect
67 raise side_effect
68
68
69 with pytest.raises(Exception) as exc_info:
69 with pytest.raises(Exception) as exc_info:
70 fake_method()
70 fake_method()
71 assert type(exc_info.value) == Exception
71 assert type(exc_info.value) == Exception
72 assert exc_info.value._vcs_kind == expected_type
72 assert exc_info.value._vcs_kind == expected_type
73
73
74 def test_keeps_original_traceback(self):
74 def test_keeps_original_traceback(self):
75 @hg.reraise_safe_exceptions
75 @hg.reraise_safe_exceptions
76 def fake_method():
76 def fake_method():
77 try:
77 try:
78 raise hgcompat.Abort()
78 raise hgcompat.Abort()
79 except:
79 except:
80 self.original_traceback = traceback.format_tb(
80 self.original_traceback = traceback.format_tb(
81 sys.exc_info()[2])
81 sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
85 fake_method()
85 fake_method()
86 except Exception:
86 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
88
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
91
91
92 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
94 def stub_method():
94 def stub_method():
95 raise ValueError('stub')
95 raise ValueError('stub')
96
96
97 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
98 stub_method()
98 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
100
100
101 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
103 def stub_method():
103 def stub_method():
104 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
105
105
106 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
107 stub_method()
107 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,45 +1,45 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18 from vcsserver.lib import rc_cache
19
19
20 class RemoteBase(object):
20 class RemoteBase(object):
21 EMPTY_COMMIT = '0' * 40
21 EMPTY_COMMIT = '0' * 40
22
22
23 def region(self, wire):
23 def _region(self, wire):
24 repo_id = wire.get('repo_id', '')
24 repo_id = wire.get('repo_id', '')
25 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
25 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27
27
28 def _cache_on(self, wire):
28 def _cache_on(self, wire):
29 context = wire.get('context', '')
29 context = wire.get('context', '')
30 context_uid = '{}'.format(context)
30 context_uid = '{}'.format(context)
31 repo_id = wire.get('repo_id', '')
31 repo_id = wire.get('repo_id', '')
32 cache = wire.get('cache', True)
32 cache = wire.get('cache', True)
33 cache_on = context and cache
33 cache_on = context and cache
34 return cache_on, context_uid, repo_id
34 return cache_on, context_uid, repo_id
35
35
36 def vcsserver_invalidate_cache(self, wire, delete):
36 def vcsserver_invalidate_cache(self, wire, delete):
37 from vcsserver.lib import rc_cache
37 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
38 repo_id = wire.get('repo_id', '')
39
39
40 if delete:
40 if delete:
41 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
41 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
42 rc_cache.clear_cache_namespace(
42 rc_cache.clear_cache_namespace(
43 'repo_object', cache_namespace_uid, invalidate=True)
43 'repo_object', cache_namespace_uid, invalidate=True)
44
44
45 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
45 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now